text
stringlengths 213
32.3k
|
---|
import mock
from paasta_tools import adhoc_tools
from paasta_tools.utils import DeploymentsJsonV2
from paasta_tools.utils import NoConfigurationForServiceError
def test_get_default_interactive_config():
with mock.patch(
"paasta_tools.adhoc_tools.load_adhoc_job_config", autospec=True
) as mock_load_adhoc_job_config:
mock_load_adhoc_job_config.return_value = adhoc_tools.AdhocJobConfig(
service="fake_service",
instance="interactive",
cluster="fake_cluster",
config_dict={},
branch_dict={"deploy_group": "fake_deploy_group"},
)
result = adhoc_tools.get_default_interactive_config(
"fake_service", "fake_cluster", "/fake/soa/dir", load_deployments=False
)
assert result.get_cpus() == 4
assert result.get_mem() == 10240
assert result.get_disk() == 1024
def test_get_default_interactive_config_reads_from_tty():
with mock.patch(
"paasta_tools.adhoc_tools.prompt_pick_one", autospec=True
) as mock_prompt_pick_one, mock.patch(
"paasta_tools.adhoc_tools.load_adhoc_job_config", autospec=True
) as mock_load_adhoc_job_config, mock.patch(
"paasta_tools.adhoc_tools.load_v2_deployments_json", autospec=True
) as mock_load_deployments_json:
mock_prompt_pick_one.return_value = "fake_deploygroup"
mock_load_adhoc_job_config.side_effect = NoConfigurationForServiceError
mock_load_deployments_json.return_value = DeploymentsJsonV2(
service="fake-service",
config_dict={
"deployments": {
"fake_deploygroup": {
"docker_image": mock.sentinel.docker_image,
"git_sha": mock.sentinel.git_sha,
}
},
"controls": {},
},
)
result = adhoc_tools.get_default_interactive_config(
"fake_service", "fake_cluster", "/fake/soa/dir", load_deployments=True
)
assert result.get_deploy_group() == "fake_deploygroup"
assert result.get_docker_image() == mock.sentinel.docker_image
|
import asyncio
import aiohttp
import aioshelly
import pytest
from homeassistant import config_entries, setup
from homeassistant.components.shelly.const import DOMAIN
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import MockConfigEntry
MOCK_SETTINGS = {
"name": "Test name",
"device": {"mac": "test-mac", "hostname": "test-host"},
}
DISCOVERY_INFO = {
"host": "1.1.1.1",
"name": "shelly1pm-12345",
"properties": {"id": "shelly1pm-12345"},
}
SWITCH25_DISCOVERY_INFO = {
"host": "1.1.1.1",
"name": "shellyswitch25-12345",
"properties": {"id": "shellyswitch25-12345"},
}
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
), patch(
"aioshelly.Device.create",
new=AsyncMock(
return_value=Mock(
shutdown=AsyncMock(),
settings=MOCK_SETTINGS,
)
),
), patch(
"homeassistant.components.shelly.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.shelly.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Test name"
assert result2["data"] == {
"host": "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_title_without_name_and_prefix(hass):
"""Test we set the title to the hostname when the device doesn't have a name."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
settings = MOCK_SETTINGS.copy()
settings["name"] = None
settings["device"] = settings["device"].copy()
settings["device"]["hostname"] = "shelly1pm-12345"
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
), patch(
"aioshelly.Device.create",
new=AsyncMock(
return_value=Mock(
shutdown=AsyncMock(),
settings=settings,
)
),
), patch(
"homeassistant.components.shelly.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.shelly.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "shelly1pm-12345"
assert result2["data"] == {
"host": "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_auth(hass):
"""Test manual configuration if auth is required."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True},
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
assert result2["type"] == "form"
assert result["errors"] == {}
with patch(
"aioshelly.Device.create",
new=AsyncMock(
return_value=Mock(
shutdown=AsyncMock(),
settings=MOCK_SETTINGS,
)
),
), patch(
"homeassistant.components.shelly.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.shelly.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{"username": "test username", "password": "test password"},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "Test name"
assert result3["data"] == {
"host": "1.1.1.1",
"username": "test username",
"password": "test password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"error", [(asyncio.TimeoutError, "cannot_connect"), (ValueError, "unknown")]
)
async def test_form_errors_get_info(hass, error):
"""Test we handle errors."""
exc, base_error = error
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("aioshelly.get_info", side_effect=exc):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": base_error}
@pytest.mark.parametrize(
"error", [(asyncio.TimeoutError, "cannot_connect"), (ValueError, "unknown")]
)
async def test_form_errors_test_connection(hass, error):
"""Test we handle errors."""
exc, base_error = error
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"aioshelly.get_info", return_value={"mac": "test-mac", "auth": False}
), patch("aioshelly.Device.create", new=AsyncMock(side_effect=exc)):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": base_error}
async def test_form_already_configured(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0"}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
# Test config entry got updated with latest IP
assert entry.data["host"] == "1.1.1.1"
async def test_form_firmware_unsupported(hass):
"""Test we abort if device firmware is unsupported."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("aioshelly.get_info", side_effect=aioshelly.FirmwareUnsupported):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
assert result2["type"] == "abort"
assert result2["reason"] == "unsupported_firmware"
@pytest.mark.parametrize(
"error",
[
(aiohttp.ClientResponseError(Mock(), (), status=400), "cannot_connect"),
(aiohttp.ClientResponseError(Mock(), (), status=401), "invalid_auth"),
(asyncio.TimeoutError, "cannot_connect"),
(ValueError, "unknown"),
],
)
async def test_form_auth_errors_test_connection(hass, error):
"""Test we handle errors in authenticated devices."""
exc, base_error = error
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("aioshelly.get_info", return_value={"mac": "test-mac", "auth": True}):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.1.1.1"},
)
with patch(
"aioshelly.Device.create",
new=AsyncMock(side_effect=exc),
):
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{"username": "test username", "password": "test password"},
)
assert result3["type"] == "form"
assert result3["errors"] == {"base": base_error}
async def test_zeroconf(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "form"
assert result["errors"] == {}
context = next(
flow["context"]
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert context["title_placeholders"]["name"] == "shelly1pm-12345"
with patch(
"aioshelly.Device.create",
new=AsyncMock(
return_value=Mock(
shutdown=AsyncMock(),
settings=MOCK_SETTINGS,
)
),
), patch(
"homeassistant.components.shelly.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.shelly.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Test name"
assert result2["data"] == {
"host": "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_zeroconf_with_switch_prefix(hass):
"""Test we get remove shelly from the prefix."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=SWITCH25_DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "form"
assert result["errors"] == {}
context = next(
flow["context"]
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert context["title_placeholders"]["name"] == "switch25-12345"
@pytest.mark.parametrize(
"error", [(asyncio.TimeoutError, "cannot_connect"), (ValueError, "unknown")]
)
async def test_zeroconf_confirm_error(hass, error):
"""Test we get the form."""
exc, base_error = error
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"aioshelly.Device.create",
new=AsyncMock(side_effect=exc),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": base_error}
async def test_zeroconf_already_configured(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0"}
)
entry.add_to_hass(hass)
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
# Test config entry got updated with latest IP
assert entry.data["host"] == "1.1.1.1"
async def test_zeroconf_firmware_unsupported(hass):
"""Test we abort if device firmware is unsupported."""
with patch("aioshelly.get_info", side_effect=aioshelly.FirmwareUnsupported):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "abort"
assert result["reason"] == "unsupported_firmware"
async def test_zeroconf_cannot_connect(hass):
"""Test we get the form."""
with patch("aioshelly.get_info", side_effect=asyncio.TimeoutError):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_zeroconf_require_auth(hass):
"""Test zeroconf if auth is required."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"aioshelly.get_info",
return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
data=DISCOVERY_INFO,
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "form"
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result2["type"] == "form"
assert result2["errors"] == {}
with patch(
"aioshelly.Device.create",
new=AsyncMock(
return_value=Mock(
shutdown=AsyncMock(),
settings=MOCK_SETTINGS,
)
),
), patch(
"homeassistant.components.shelly.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.shelly.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{"username": "test username", "password": "test password"},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "Test name"
assert result3["data"] == {
"host": "1.1.1.1",
"username": "test username",
"password": "test password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_zeroconf_not_shelly(hass):
"""Test we filter out non-shelly devices."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
data={"host": "1.1.1.1", "name": "notshelly"},
context={"source": config_entries.SOURCE_ZEROCONF},
)
assert result["type"] == "abort"
assert result["reason"] == "not_shelly"
|
import queue
import numpy as np
from qstrader import settings
from qstrader.broker.broker import Broker
from qstrader.broker.fee_model.fee_model import FeeModel
from qstrader.broker.portfolio.portfolio import Portfolio
from qstrader.broker.transaction.transaction import Transaction
from qstrader.broker.fee_model.zero_fee_model import ZeroFeeModel
class SimulatedBroker(Broker):
"""
A class to handle simulation of a brokerage that
provides sensible defaults for both currency (USD) and
transaction cost handling for execution.
The default commission/fee model is a ZeroFeeModel
that charges no commission or tax (such as stamp duty).
Parameters
----------
start_dt : `pd.Timestamp`
The starting datetime of the account
exchange : `Exchange`
Used to determine whether the simulated exchange venue
is open, in order to determine if orders can be executed.
data_handler : `DataHandler`
The data handler used to obtain latest asset prices.
account_id : `str`, optional
The account ID for the brokerage account.
base_currency : `str`, optional
The currency denomination of the brokerage account.
initial_funds : `float`, optional
An initial amount of cash to add to the broker account.
fee_model : `FeeModel`, optional
The commission/fee model used to simulate fees/taxes.
Defaults to the ZeroFeeModel.
slippage_model : `SlippageModel`, optional
The model used to simulate trade slippage.
market_impact_model : `MarketImpactModel`, optional
The model used to simulate market impact of trading.
"""
def __init__(
self,
start_dt,
exchange,
data_handler,
account_id=None,
base_currency="USD",
initial_funds=0.0,
fee_model=ZeroFeeModel(),
slippage_model=None,
market_impact_model=None
):
self.start_dt = start_dt
self.exchange = exchange
self.data_handler = data_handler
self.current_dt = start_dt
self.account_id = account_id
self.base_currency = self._set_base_currency(base_currency)
self.initial_funds = self._set_initial_funds(initial_funds)
self.fee_model = self._set_fee_model(fee_model)
self.slippage_model = None # TODO: Implement
self.market_impact_model = None # TODO: Implement
self.cash_balances = self._set_cash_balances()
self.portfolios = self._set_initial_portfolios()
self.open_orders = self._set_initial_open_orders()
if settings.PRINT_EVENTS:
print('Initialising simulated broker "%s"...' % self.account_id)
def _set_base_currency(self, base_currency):
"""
Check and set the base currency from a list of
allowed currencies. Raise ValueError if the
currency is currently not supported by QSTrader.
Parameters
----------
base_currency : `str`
The base currency string.
Returns
-------
`str`
The base currency string.
"""
if base_currency not in settings.SUPPORTED['CURRENCIES']:
raise ValueError(
"Currency '%s' is not supported by QSTrader. Could not "
"set the base currency in the SimulatedBroker "
"entity." % base_currency
)
else:
return base_currency
def _set_initial_funds(self, initial_funds):
"""
Check and set the initial funds for the broker
master account. Raise ValueError if the
amount is negative.
Parameters
----------
initial_funds : `float`
The initial cash provided to the Broker.
Returns
-------
`float`
The checked initial funds.
"""
if initial_funds < 0.0:
raise ValueError(
"Could not create the SimulatedBroker entity as the "
"provided initial funds of '%s' were "
"negative." % initial_funds
)
else:
return initial_funds
def _set_fee_model(self, fee_model):
"""
Check and set the FeeModel instance for the broker.
The class default is no commission (ZeroFeeModel).
Parameters
----------
fee_model : `FeeModel` (class)
The commission/fee model class provided to the Broker.
Returns
-------
`FeeModel` (instance)
The instantiated FeeModel class.
"""
if issubclass(fee_model.__class__, FeeModel):
return fee_model
else:
raise TypeError(
"Provided fee model '%s' in SimulatedBroker is not a "
"FeeModel subclass, so could not create the "
"Broker entity." % fee_model.__class__
)
def _set_cash_balances(self):
"""
Set the appropriate cash balances in the various
supported currencies, depending upon the availability
of initial funds.
Returns
-------
`dict{str: float}`
The mapping of cash currency strings to
amount stored by broker in local currency.
"""
cash_dict = dict(
(currency, 0.0)
for currency in settings.SUPPORTED['CURRENCIES']
)
if self.initial_funds > 0.0:
cash_dict[self.base_currency] = self.initial_funds
return cash_dict
def _set_initial_portfolios(self):
"""
Set the appropriate initial portfolios dictionary.
Returns
-------
`dict`
The empty initial portfolio dictionary.
"""
return {}
def _set_initial_open_orders(self):
"""
Set the appropriate initial open orders dictionary.
Returns
-------
`dict`
The empty initial open orders dictionary.
"""
return {}
def subscribe_funds_to_account(self, amount):
"""
Subscribe an amount of cash in the base currency
to the broker master cash account.
Parameters
----------
amount : `float`
The amount of cash to subscribe to the master account.
"""
if amount < 0.0:
raise ValueError(
"Cannot credit negative amount: "
"'%s' to the broker account." % amount
)
self.cash_balances[self.base_currency] += amount
if settings.PRINT_EVENTS:
print(
'(%s) - subscription: %0.2f subscribed to broker account "%s"' % (
self.current_dt, amount, self.account_id
)
)
def withdraw_funds_from_account(self, amount):
"""
Withdraws an amount of cash in the base currency
from the broker master cash account, assuming an
amount equal to or more cash is present. If less
cash is present, a ValueError is raised.
Parameters
----------
amount : `float`
The amount of cash to withdraw from the master account.
"""
if amount < 0:
raise ValueError(
"Cannot debit negative amount: "
"'%s' from the broker account." % amount
)
if amount > self.cash_balances[self.base_currency]:
raise ValueError(
"Not enough cash in the broker account to "
"withdraw. %0.2f withdrawal request exceeds "
"current broker account cash balance of %0.2f." % (
amount, self.cash_balances[self.base_currency]
)
)
self.cash_balances[self.base_currency] -= amount
if settings.PRINT_EVENTS:
print(
'(%s) - withdrawal: %0.2f withdrawn from broker account "%s"' % (
self.current_dt, amount, self.account_id
)
)
def get_account_cash_balance(self, currency=None):
"""
Retrieve the cash dictionary of the account, or
if a currency is provided, the cash value itself.
Raises a ValueError if the currency is not
found within the currency cash dictionary.
Parameters
----------
currency : `str`, optional
The currency string to obtain the cash balance for.
"""
if currency is None:
return self.cash_balances
if currency not in self.cash_balances.keys():
raise ValueError(
"Currency of type '%s' is not found within the "
"broker cash master accounts. Could not retrieve "
"cash balance." % currency
)
return self.cash_balances[currency]
def get_account_total_market_value(self):
"""
Retrieve the total market value of the account, across
each portfolio.
Returns
-------
`dict`
The dictionary of each portfolio's total market value.
"""
tmv_dict = {}
master_tmv = 0.0
for portfolio in self.portfolios.values():
pmv = self.get_portfolio_market_value(
portfolio.portfolio_id
)
tmv_dict[portfolio.portfolio_id] = pmv
master_tmv += pmv
tmv_dict["master"] = master_tmv
return tmv_dict
def get_account_total_equity(self):
"""
Retrieve the total equity of the account, across
each portfolio.
Returns
-------
`dict`
The dictionary of each portfolio's total equity.
"""
equity_dict = {}
master_equity = 0.0
for portfolio in self.portfolios.values():
port_equity = self.get_portfolio_total_equity(
portfolio.portfolio_id
)
equity_dict[portfolio.portfolio_id] = port_equity
master_equity += port_equity
equity_dict["master"] = master_equity
return equity_dict
def create_portfolio(self, portfolio_id, name=None):
"""
Create a new sub-portfolio with ID 'portfolio_id' and
an optional name given by 'name'.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
name : `str`, optional
The optional name string of the portfolio.
"""
portfolio_id_str = str(portfolio_id)
if portfolio_id_str in self.portfolios.keys():
raise ValueError(
"Portfolio with ID '%s' already exists. Cannot create "
"second portfolio with the same ID." % portfolio_id_str
)
else:
p = Portfolio(
self.current_dt,
currency=self.base_currency,
portfolio_id=portfolio_id_str,
name=name
)
self.portfolios[portfolio_id_str] = p
self.open_orders[portfolio_id_str] = queue.Queue()
if settings.PRINT_EVENTS:
print(
'(%s) - portfolio creation: Portfolio "%s" created at broker "%s"' % (
self.current_dt, portfolio_id_str, self.account_id
)
)
def list_all_portfolios(self):
"""
List all of the sub-portfolios associated with this
broker account in order of portfolio ID.
Returns
-------
`list`
The list of portfolios associated with the broker account.
"""
if self.portfolios == {}:
return []
return sorted(
list(self.portfolios.values()),
key=lambda port: port.portfolio_id
)
def subscribe_funds_to_portfolio(self, portfolio_id, amount):
"""
Subscribe funds to a particular sub-portfolio, assuming
it exists and the cash amount is positive. Otherwise raise
a ValueError.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
amount : `float`
The amount of cash to subscribe to the portfolio.
"""
if amount < 0.0:
raise ValueError(
"Cannot add negative amount: "
"%0.2f to a portfolio account." % amount
)
if portfolio_id not in self.portfolios.keys():
raise KeyError(
"Portfolio with ID '%s' does not exist. Cannot subscribe "
"funds to a non-existent portfolio." % portfolio_id
)
if amount > self.cash_balances[self.base_currency]:
raise ValueError(
"Not enough cash in the broker master account to "
"fund portfolio '%s'. %0.2f subscription amount exceeds "
"current broker account cash balance of %0.2f." % (
portfolio_id, amount,
self.cash_balances[self.base_currency]
)
)
self.portfolios[portfolio_id].subscribe_funds(self.current_dt, amount)
self.cash_balances[self.base_currency] -= amount
if settings.PRINT_EVENTS:
print(
'(%s) - subscription: %0.2f subscribed to portfolio "%s"' % (
self.current_dt, amount, portfolio_id
)
)
def withdraw_funds_from_portfolio(self, portfolio_id, amount):
"""
Withdraw funds from a particular sub-portfolio, assuming
it exists, the cash amount is positive and there is
sufficient remaining cash in the sub-portfolio to
withdraw. Otherwise raise a ValueError.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
amount : `float`
The amount of cash to withdraw from the portfolio.
"""
if amount < 0.0:
raise ValueError(
"Cannot withdraw negative amount: "
"%0.2f from a portfolio account." % amount
)
if portfolio_id not in self.portfolios.keys():
raise KeyError(
"Portfolio with ID '%s' does not exist. Cannot "
"withdraw funds from a non-existent "
"portfolio. " % portfolio_id
)
if amount > self.portfolios[portfolio_id].cash:
raise ValueError(
"Not enough cash in portfolio '%s' to withdraw "
"into brokerage master account. Withdrawal "
"amount %0.2f exceeds current portfolio cash "
"balance of %0.2f." % (
portfolio_id, amount,
self.portfolios[portfolio_id].cash
)
)
self.portfolios[portfolio_id].withdraw_funds(
self.current_dt, amount
)
self.cash_balances[self.base_currency] += amount
if settings.PRINT_EVENTS:
print(
'(%s) - withdrawal: %0.2f withdrawn from portfolio "%s"' % (
self.current_dt, amount, portfolio_id
)
)
def get_portfolio_cash_balance(self, portfolio_id):
"""
Retrieve the cash balance of a sub-portfolio, if
it exists. Otherwise raise a ValueError.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
Returns
-------
`float`
The cash balance of the portfolio.
"""
if portfolio_id not in self.portfolios.keys():
raise ValueError(
"Portfolio with ID '%s' does not exist. Cannot "
"retrieve cash balance for non-existent "
"portfolio." % portfolio_id
)
return self.portfolios[portfolio_id].cash
def get_portfolio_total_market_value(self, portfolio_id):
"""
Returns the current total market value of a Portfolio
with ID 'portfolio_id'.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
Returns
-------
`float`
The total market value of the portfolio.
"""
if portfolio_id not in self.portfolios.keys():
raise KeyError(
"Portfolio with ID '%s' does not exist. "
"Cannot return total market value for a "
"non-existent portfolio." % portfolio_id
)
return self.portfolios[portfolio_id].total_market_value
def get_portfolio_total_equity(self, portfolio_id):
"""
Returns the current total equity of a Portfolio
with ID 'portfolio_id'.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
Returns
-------
`float`
The total equity of the portfolio.
"""
if portfolio_id not in self.portfolios.keys():
raise KeyError(
"Portfolio with ID '%s' does not exist. "
"Cannot return total equity for a "
"non-existent portfolio." % portfolio_id
)
return self.portfolios[portfolio_id].total_equity
def get_portfolio_as_dict(self, portfolio_id):
"""
Return a particular portfolio with ID 'portolio_id' as
a dictionary with Asset symbol strings as keys, with various
attributes as sub-dictionaries.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
Returns
-------
`dict{str}`
The portfolio representation of Assets as a dictionary.
"""
if portfolio_id not in self.portfolios.keys():
raise KeyError(
"Cannot return portfolio as dictionary since "
"portfolio with ID '%s' does not exist." % portfolio_id
)
return self.portfolios[portfolio_id].portfolio_to_dict()
def _execute_order(self, dt, portfolio_id, order):
"""
For a given portfolio ID string, create a Transaction instance from
the provided Order and ensure the Portfolio is appropriately updated
with the new information.
Parameters
----------
dt : `pd.Timestamp`
The current timestamp.
portfolio_id : `str`
The portfolio ID string.
order : `Order`
The Order instance to create the Transaction for.
"""
# Obtain a price for the asset, if no price then
# raise a ValueError
price_err_msg = (
"Could not obtain a latest market price for "
"Asset with ticker symbol '%s'. Order with ID '%s' was "
"not executed." % (
order.asset, order.order_id
)
)
bid_ask = self.data_handler.get_asset_latest_bid_ask_price(
dt, order.asset
)
if bid_ask == (np.NaN, np.NaN):
raise ValueError(price_err_msg)
# Calculate the consideration and total commission
# based on the commission model
if order.direction > 0:
price = bid_ask[1]
else:
price = bid_ask[0]
consideration = round(price * order.quantity)
total_commission = self.fee_model.calc_total_cost(
order.asset, order.quantity, consideration, self
)
# Check that sufficient cash exists to carry out the
# order, else scale it down
est_total_cost = consideration + total_commission
total_cash = self.portfolios[portfolio_id].cash
scaled_quantity = order.quantity
if est_total_cost > total_cash:
if settings.PRINT_EVENTS:
print(
"WARNING: Estimated transaction size of %0.2f exceeds "
"available cash of %0.2f. Transaction will still occur "
"with a negative cash balance." % (est_total_cost, total_cash)
)
# Create a transaction entity and update the portfolio
txn = Transaction(
order.asset, scaled_quantity, self.current_dt,
price, order.order_id, commission=total_commission
)
self.portfolios[portfolio_id].transact_asset(txn)
if settings.PRINT_EVENTS:
print(
"(%s) - executed order: %s, qty: %s, price: %0.2f, "
"consideration: %0.2f, commission: %0.2f, total: %0.2f" % (
self.current_dt, order.asset, scaled_quantity, price,
consideration, total_commission,
consideration + total_commission
)
)
def submit_order(self, portfolio_id, order):
"""
Execute an Order instance against the sub-portfolio
with ID 'portfolio_id'. For the SimulatedBroker class
specifically there are no restrictions on this occuring
beyond having sufficient cash in the sub-portfolio to
allow this to occur.
This does not take into settlement dates, as with most
brokerage accounts. The cash is taken immediately upon
entering a long position and returned immediately upon
closing out the position.
Parameters
----------
portfolio_id : `str`
The portfolio ID string.
order : `Order`
The Order instance to submit.
"""
# Check that the portfolio actually exists
if portfolio_id not in self.portfolios.keys():
raise KeyError(
"Portfolio with ID '%s' does not exist. Order with "
"ID '%s' was not executed." % (
portfolio_id, order.order_id
)
)
self.open_orders[portfolio_id].put(order)
if settings.PRINT_EVENTS:
print(
"(%s) - submitted order: %s, qty: %s" % (
self.current_dt, order.asset, order.quantity
)
)
def update(self, dt):
"""
Updates the current SimulatedBroker timestamp.
Parameters
----------
dt : `pd.Timestamp`
The current timestamp to update the Broker to.
"""
self.current_dt = dt
# Update portfolio asset values
for portfolio in self.portfolios:
for asset in self.portfolios[portfolio].pos_handler.positions:
mid_price = self.data_handler.get_asset_latest_mid_price(
dt, asset
)
self.portfolios[portfolio].update_market_value_of_asset(
asset, mid_price, self.current_dt
)
# Try to execute orders
if self.exchange.is_open_at_datetime(self.current_dt):
orders = []
for portfolio in self.portfolios:
while not self.open_orders[portfolio].empty():
orders.append(
(portfolio, self.open_orders[portfolio].get())
)
sorted_orders = sorted(orders, key=lambda x: x[1].direction)
for portfolio, order in sorted_orders:
self._execute_order(dt, portfolio, order)
|
import collections
from coverage.backward import iitems
from coverage.debug import SimpleReprMixin
from coverage.misc import contract, CoverageException, nice_pair
class Analysis(object):
"""The results of analyzing a FileReporter."""
def __init__(self, data, file_reporter, file_mapper):
self.data = data
self.file_reporter = file_reporter
self.filename = file_mapper(self.file_reporter.filename)
self.statements = self.file_reporter.lines()
self.excluded = self.file_reporter.excluded_lines()
# Identify missing statements.
executed = self.data.lines(self.filename) or []
executed = self.file_reporter.translate_lines(executed)
self.executed = executed
self.missing = self.statements - self.executed
if self.data.has_arcs():
self._arc_possibilities = sorted(self.file_reporter.arcs())
self.exit_counts = self.file_reporter.exit_counts()
self.no_branch = self.file_reporter.no_branch_lines()
n_branches = self._total_branches()
mba = self.missing_branch_arcs()
n_partial_branches = sum(len(v) for k,v in iitems(mba) if k not in self.missing)
n_missing_branches = sum(len(v) for k,v in iitems(mba))
else:
self._arc_possibilities = []
self.exit_counts = {}
self.no_branch = set()
n_branches = n_partial_branches = n_missing_branches = 0
self.numbers = Numbers(
n_files=1,
n_statements=len(self.statements),
n_excluded=len(self.excluded),
n_missing=len(self.missing),
n_branches=n_branches,
n_partial_branches=n_partial_branches,
n_missing_branches=n_missing_branches,
)
def missing_formatted(self, branches=False):
"""The missing line numbers, formatted nicely.
Returns a string like "1-2, 5-11, 13-14".
If `branches` is true, includes the missing branch arcs also.
"""
if branches and self.has_arcs():
arcs = iitems(self.missing_branch_arcs())
else:
arcs = None
return format_lines(self.statements, self.missing, arcs=arcs)
def has_arcs(self):
"""Were arcs measured in this result?"""
return self.data.has_arcs()
@contract(returns='list(tuple(int, int))')
def arc_possibilities(self):
"""Returns a sorted list of the arcs in the code."""
return self._arc_possibilities
@contract(returns='list(tuple(int, int))')
def arcs_executed(self):
"""Returns a sorted list of the arcs actually executed in the code."""
executed = self.data.arcs(self.filename) or []
executed = self.file_reporter.translate_arcs(executed)
return sorted(executed)
@contract(returns='list(tuple(int, int))')
def arcs_missing(self):
"""Returns a sorted list of the arcs in the code not executed."""
possible = self.arc_possibilities()
executed = self.arcs_executed()
missing = (
p for p in possible
if p not in executed
and p[0] not in self.no_branch
)
return sorted(missing)
@contract(returns='list(tuple(int, int))')
def arcs_unpredicted(self):
"""Returns a sorted list of the executed arcs missing from the code."""
possible = self.arc_possibilities()
executed = self.arcs_executed()
# Exclude arcs here which connect a line to itself. They can occur
# in executed data in some cases. This is where they can cause
# trouble, and here is where it's the least burden to remove them.
# Also, generators can somehow cause arcs from "enter" to "exit", so
# make sure we have at least one positive value.
unpredicted = (
e for e in executed
if e not in possible
and e[0] != e[1]
and (e[0] > 0 or e[1] > 0)
)
return sorted(unpredicted)
def _branch_lines(self):
"""Returns a list of line numbers that have more than one exit."""
return [l1 for l1,count in iitems(self.exit_counts) if count > 1]
def _total_branches(self):
"""How many total branches are there?"""
return sum(count for count in self.exit_counts.values() if count > 1)
@contract(returns='dict(int: list(int))')
def missing_branch_arcs(self):
"""Return arcs that weren't executed from branch lines.
Returns {l1:[l2a,l2b,...], ...}
"""
missing = self.arcs_missing()
branch_lines = set(self._branch_lines())
mba = collections.defaultdict(list)
for l1, l2 in missing:
if l1 in branch_lines:
mba[l1].append(l2)
return mba
@contract(returns='dict(int: tuple(int, int))')
def branch_stats(self):
"""Get stats about branches.
Returns a dict mapping line numbers to a tuple:
(total_exits, taken_exits).
"""
missing_arcs = self.missing_branch_arcs()
stats = {}
for lnum in self._branch_lines():
exits = self.exit_counts[lnum]
try:
missing = len(missing_arcs[lnum])
except KeyError:
missing = 0
stats[lnum] = (exits, exits - missing)
return stats
class Numbers(SimpleReprMixin):
"""The numerical results of measuring coverage.
This holds the basic statistics from `Analysis`, and is used to roll
up statistics across files.
"""
# A global to determine the precision on coverage percentages, the number
# of decimal places.
_precision = 0
_near0 = 1.0 # These will change when _precision is changed.
_near100 = 99.0
def __init__(self, n_files=0, n_statements=0, n_excluded=0, n_missing=0,
n_branches=0, n_partial_branches=0, n_missing_branches=0
):
self.n_files = n_files
self.n_statements = n_statements
self.n_excluded = n_excluded
self.n_missing = n_missing
self.n_branches = n_branches
self.n_partial_branches = n_partial_branches
self.n_missing_branches = n_missing_branches
def init_args(self):
"""Return a list for __init__(*args) to recreate this object."""
return [
self.n_files, self.n_statements, self.n_excluded, self.n_missing,
self.n_branches, self.n_partial_branches, self.n_missing_branches,
]
@classmethod
def set_precision(cls, precision):
"""Set the number of decimal places used to report percentages."""
assert 0 <= precision < 10
cls._precision = precision
cls._near0 = 1.0 / 10**precision
cls._near100 = 100.0 - cls._near0
@property
def n_executed(self):
"""Returns the number of executed statements."""
return self.n_statements - self.n_missing
@property
def n_executed_branches(self):
"""Returns the number of executed branches."""
return self.n_branches - self.n_missing_branches
@property
def pc_covered(self):
"""Returns a single percentage value for coverage."""
if self.n_statements > 0:
numerator, denominator = self.ratio_covered
pc_cov = (100.0 * numerator) / denominator
else:
pc_cov = 100.0
return pc_cov
@property
def pc_covered_str(self):
"""Returns the percent covered, as a string, without a percent sign.
Note that "0" is only returned when the value is truly zero, and "100"
is only returned when the value is truly 100. Rounding can never
result in either "0" or "100".
"""
pc = self.pc_covered
if 0 < pc < self._near0:
pc = self._near0
elif self._near100 < pc < 100:
pc = self._near100
else:
pc = round(pc, self._precision)
return "%.*f" % (self._precision, pc)
@classmethod
def pc_str_width(cls):
"""How many characters wide can pc_covered_str be?"""
width = 3 # "100"
if cls._precision > 0:
width += 1 + cls._precision
return width
@property
def ratio_covered(self):
"""Return a numerator and denominator for the coverage ratio."""
numerator = self.n_executed + self.n_executed_branches
denominator = self.n_statements + self.n_branches
return numerator, denominator
def __add__(self, other):
nums = Numbers()
nums.n_files = self.n_files + other.n_files
nums.n_statements = self.n_statements + other.n_statements
nums.n_excluded = self.n_excluded + other.n_excluded
nums.n_missing = self.n_missing + other.n_missing
nums.n_branches = self.n_branches + other.n_branches
nums.n_partial_branches = (
self.n_partial_branches + other.n_partial_branches
)
nums.n_missing_branches = (
self.n_missing_branches + other.n_missing_branches
)
return nums
def __radd__(self, other):
# Implementing 0+Numbers allows us to sum() a list of Numbers.
if other == 0:
return self
return NotImplemented
def _line_ranges(statements, lines):
"""Produce a list of ranges for `format_lines`."""
statements = sorted(statements)
lines = sorted(lines)
pairs = []
start = None
lidx = 0
for stmt in statements:
if lidx >= len(lines):
break
if stmt == lines[lidx]:
lidx += 1
if not start:
start = stmt
end = stmt
elif start:
pairs.append((start, end))
start = None
if start:
pairs.append((start, end))
return pairs
def format_lines(statements, lines, arcs=None):
"""Nicely format a list of line numbers.
Format a list of line numbers for printing by coalescing groups of lines as
long as the lines represent consecutive statements. This will coalesce
even if there are gaps between statements.
For example, if `statements` is [1,2,3,4,5,10,11,12,13,14] and
`lines` is [1,2,5,10,11,13,14] then the result will be "1-2, 5-11, 13-14".
Both `lines` and `statements` can be any iterable. All of the elements of
`lines` must be in `statements`, and all of the values must be positive
integers.
If `arcs` is provided, they are (start,[end,end,end]) pairs that will be
included in the output as long as start isn't in `lines`.
"""
line_items = [(pair[0], nice_pair(pair)) for pair in _line_ranges(statements, lines)]
if arcs:
line_exits = sorted(arcs)
for line, exits in line_exits:
for ex in sorted(exits):
if line not in lines:
dest = (ex if ex > 0 else "exit")
line_items.append((line, "%d->%s" % (line, dest)))
ret = ', '.join(t[-1] for t in sorted(line_items))
return ret
@contract(total='number', fail_under='number', precision=int, returns=bool)
def should_fail_under(total, fail_under, precision):
"""Determine if a total should fail due to fail-under.
`total` is a float, the coverage measurement total. `fail_under` is the
fail_under setting to compare with. `precision` is the number of digits
to consider after the decimal point.
Returns True if the total should fail.
"""
# We can never achieve higher than 100% coverage, or less than zero.
if not (0 <= fail_under <= 100.0):
msg = "fail_under={} is invalid. Must be between 0 and 100.".format(fail_under)
raise CoverageException(msg)
# Special case for fail_under=100, it must really be 100.
if fail_under == 100.0 and total != 100.0:
return True
return round(total, precision) < fail_under
|
from django.utils.functional import cached_property
from django.utils.text import format_lazy
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from weblate.checks.models import CHECKS
class FilterRegistry:
@cached_property
def full_list(self):
result = [
("all", _("All strings"), ""),
("readonly", _("Read only strings"), "state:read-only"),
("nottranslated", _("Not translated strings"), "state:empty"),
("todo", _("Strings needing action"), "state:<translated"),
("translated", _("Translated strings"), "state:>=translated"),
("fuzzy", _("Strings marked for edit"), "state:needs-editing"),
("suggestions", _("Strings with suggestions"), "has:suggestion"),
("variants", _("Strings with variants"), "has:variant"),
("labels", _("Strings with labels"), "has:label"),
("context", _("Strings with context"), "has:context"),
(
"nosuggestions",
_("Strings needing action without suggestions"),
"state:<translated AND NOT has:suggestion",
),
("comments", _("Strings with comments"), "has:comment"),
("allchecks", _("Strings with any failing checks"), "has:check"),
(
"translated_checks",
_("Translated strings with any failing checks"),
"has:check AND state:>=translated",
),
("approved", _("Approved strings"), "state:approved"),
(
"approved_suggestions",
_("Approved strings with suggestions"),
"state:approved AND has:suggestion",
),
("unapproved", _("Strings waiting for review"), "state:translated"),
("unlabeled", _("Strings without a label"), "NOT has:label"),
("pluralized", _("Pluralized string"), "has:plural"),
]
result.extend(
(
CHECKS[check].url_id,
format_lazy(_("Failed check: {}"), CHECKS[check].name),
f"check:{check}",
)
for check in CHECKS
)
return result
@cached_property
def search_name(self):
return {x[2]: x[1] for x in self.full_list}
def get_search_name(self, query):
try:
return self.search_name[query.strip()]
except KeyError:
return query
@cached_property
def id_name(self):
return {x[0]: x[1] for x in self.full_list}
def get_filter_name(self, name):
try:
return self.id_name[name]
except KeyError:
if name.startswith("label:"):
return _("Labeled: {}").format(gettext(name[6:]))
raise
@cached_property
def id_query(self):
return {x[0]: x[2] for x in self.full_list}
def get_filter_query(self, name):
try:
return self.id_query[name]
except KeyError:
if name.startswith("label:"):
return 'label:"{}"'.format(name[6:])
raise
FILTERS = FilterRegistry()
def get_filter_choice(project=None):
"""Return all filtering choices."""
result = [
("all", _("All strings")),
("nottranslated", _("Not translated strings")),
("todo", _("Strings needing action")),
("translated", _("Translated strings")),
("fuzzy", _("Strings marked for edit")),
("suggestions", _("Strings with suggestions")),
("nosuggestions", _("Strings needing action without suggestions")),
("comments", _("Strings with comments")),
("allchecks", _("Strings with any failing checks")),
("approved", _("Approved strings")),
("approved_suggestions", _("Approved strings with suggestions")),
("unapproved", _("Strings waiting for review")),
]
result.extend(
(CHECKS[check].url_id, format_lazy(_("Failed check: {}"), CHECKS[check].name))
for check in CHECKS
)
if project is not None:
result.extend(
(f"label:{label}", format_lazy(_("Labeled: {}"), label))
for label in project.label_set.values_list("name", flat=True)
)
return result
|
import sys
import os
import mne
def run():
"""Run command."""
from mne.commands.utils import get_optparser
parser = get_optparser(__file__)
parser.add_option("-i", "--in", dest="in_fname",
help="Input raw FIF file", metavar="FILE")
parser.add_option("-o", dest="out_fname",
help="Output FIF file (if not set, suffix '_sss' will "
"be used)", metavar="FILE", default=None)
parser.add_option("--origin", dest="origin",
help="Head origin in mm, or a filename to read the "
"origin from. If not set it will be estimated from "
"headshape points", default=None)
parser.add_option("--origin-out", dest="origin_out",
help="Filename to use for computed origin", default=None)
parser.add_option("--frame", dest="frame", type="string",
help="Coordinate frame for head center ('device' or "
"'head')", default="device")
parser.add_option("--bad", dest="bad", type="string",
help="List of static bad channels",
default=None)
parser.add_option("--autobad", dest="autobad", type="string",
help="Set automated bad channel detection ('on', 'off', "
"'n')", default="off")
parser.add_option("--skip", dest="skip",
help="Skips raw data sequences, time intervals pairs in "
"sec, e.g.: 0 30 120 150", default=None)
parser.add_option("--force", dest="force", action="store_true",
help="Ignore program warnings",
default=False)
parser.add_option("--st", dest="st", action="store_true",
help="Apply the time-domain MaxST extension",
default=False)
parser.add_option("--buflen", dest="st_buflen", type="float",
help="MaxSt buffer length in sec",
default=16.0)
parser.add_option("--corr", dest="st_corr", type="float",
help="MaxSt subspace correlation",
default=0.96)
parser.add_option("--trans", dest="mv_trans",
help="Transforms the data into the coil definitions of "
"in_fname, or into the default frame", default=None)
parser.add_option("--movecomp", dest="mv_comp", action="store_true",
help="Estimates and compensates head movements in "
"continuous raw data", default=False)
parser.add_option("--headpos", dest="mv_headpos", action="store_true",
help="Estimates and stores head position parameters, "
"but does not compensate movements", default=False)
parser.add_option("--hp", dest="mv_hp", type="string",
help="Stores head position data in an ascii file",
default=None)
parser.add_option("--hpistep", dest="mv_hpistep", type="float",
help="Sets head position update interval in ms",
default=None)
parser.add_option("--hpisubt", dest="mv_hpisubt", type="string",
help="Subtracts hpi signals: sine amplitudes, amp + "
"baseline, or switch off", default=None)
parser.add_option("--nohpicons", dest="mv_hpicons", action="store_false",
help="Do not check initial consistency isotrak vs "
"hpifit", default=True)
parser.add_option("--linefreq", dest="linefreq", type="float",
help="Sets the basic line interference frequency (50 or "
"60 Hz)", default=None)
parser.add_option("--nooverwrite", dest="overwrite", action="store_false",
help="Do not overwrite output file if it already exists",
default=True)
parser.add_option("--args", dest="mx_args", type="string",
help="Additional command line arguments to pass to "
"MaxFilter", default="")
options, args = parser.parse_args()
in_fname = options.in_fname
if in_fname is None:
parser.print_help()
sys.exit(1)
out_fname = options.out_fname
origin = options.origin
origin_out = options.origin_out
frame = options.frame
bad = options.bad
autobad = options.autobad
skip = options.skip
force = options.force
st = options.st
st_buflen = options.st_buflen
st_corr = options.st_corr
mv_trans = options.mv_trans
mv_comp = options.mv_comp
mv_headpos = options.mv_headpos
mv_hp = options.mv_hp
mv_hpistep = options.mv_hpistep
mv_hpisubt = options.mv_hpisubt
mv_hpicons = options.mv_hpicons
linefreq = options.linefreq
overwrite = options.overwrite
mx_args = options.mx_args
if in_fname.endswith('_raw.fif') or in_fname.endswith('-raw.fif'):
prefix = in_fname[:-8]
else:
prefix = in_fname[:-4]
if out_fname is None:
if st:
out_fname = prefix + '_tsss.fif'
else:
out_fname = prefix + '_sss.fif'
if origin is not None and os.path.exists(origin):
with open(origin, 'r') as fid:
origin = fid.readlines()[0].strip()
origin = mne.preprocessing.apply_maxfilter(
in_fname, out_fname, origin, frame,
bad, autobad, skip, force, st, st_buflen, st_corr, mv_trans,
mv_comp, mv_headpos, mv_hp, mv_hpistep, mv_hpisubt, mv_hpicons,
linefreq, mx_args, overwrite)
if origin_out is not None:
with open(origin_out, 'w') as fid:
fid.write(origin + '\n')
mne.utils.run_command_if_main()
|
import collections
import csv
import json
import re
import tempfile
import unittest
import uuid
from absl import flags
import mock
from perfkitbenchmarker import pkb # pylint: disable=unused-import
from perfkitbenchmarker import publisher
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.gcp import util
import six
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
class PrettyPrintStreamPublisherTestCase(unittest.TestCase):
def testDefaultsToStdout(self):
with mock.patch('sys.stdout') as mock_stdout:
instance = publisher.PrettyPrintStreamPublisher()
self.assertEqual(mock_stdout, instance.stream)
def testSucceedsWithNoSamples(self):
stream = six.StringIO()
instance = publisher.PrettyPrintStreamPublisher(stream)
instance.PublishSamples([])
self.assertRegexpMatches(
stream.getvalue(), r'^\s*-+PerfKitBenchmarker\sResults\sSummary-+\s*$')
def testWritesToStream(self):
stream = six.StringIO()
instance = publisher.PrettyPrintStreamPublisher(stream)
samples = [{'test': 'testb', 'metric': '1', 'value': 1.0, 'unit': 'MB',
'metadata': {}},
{'test': 'testb', 'metric': '2', 'value': 14.0, 'unit': 'MB',
'metadata': {}},
{'test': 'testa', 'metric': '3', 'value': 47.0, 'unit': 'us',
'metadata': {}}]
instance.PublishSamples(samples)
value = stream.getvalue()
self.assertRegexpMatches(value, re.compile(r'TESTA.*TESTB', re.DOTALL))
class LogPublisherTestCase(unittest.TestCase):
def testCallsLoggerAtCorrectLevel(self):
logger = mock.MagicMock()
level = mock.MagicMock()
instance = publisher.LogPublisher(logger=logger, level=level)
instance.PublishSamples([{'test': 'testa'}, {'test': 'testb'}])
logger.log.assert_called_with(level, mock.ANY)
class NewlineDelimitedJSONPublisherTestCase(unittest.TestCase):
def setUp(self):
self.fp = tempfile.NamedTemporaryFile(mode='w+',
prefix='perfkit-test-',
suffix='.json')
self.addCleanup(self.fp.close)
self.instance = publisher.NewlineDelimitedJSONPublisher(self.fp.name)
def testEmptyInput(self):
self.instance.PublishSamples([])
self.assertEqual('', self.fp.read())
def testMetadataConvertedToLabels(self):
samples = [{'test': 'testa',
'metadata': collections.OrderedDict([('key', 'value'),
('foo', 'bar')])}]
self.instance.PublishSamples(samples)
d = json.load(self.fp)
self.assertDictEqual({'test': 'testa', 'labels': '|foo:bar|,|key:value|'},
d)
def testJSONRecordPerLine(self):
samples = [{'test': 'testa', 'metadata': {'key': 'val'}},
{'test': 'testb', 'metadata': {'key2': 'val2'}}]
self.instance.PublishSamples(samples)
self.assertRaises(ValueError, json.load, self.fp)
self.fp.seek(0)
result = [json.loads(i) for i in self.fp]
self.assertListEqual([{u'test': u'testa', u'labels': u'|key:val|'},
{u'test': u'testb', u'labels': u'|key2:val2|'}],
result)
class BigQueryPublisherTestCase(unittest.TestCase):
def setUp(self):
p = mock.patch(publisher.__name__ + '.vm_util', spec=publisher.vm_util)
self.mock_vm_util = p.start()
publisher.vm_util.NamedTemporaryFile = vm_util.NamedTemporaryFile
self.mock_vm_util.GetTempDir.return_value = tempfile.gettempdir()
self.addCleanup(p.stop)
self.samples = [{'test': 'testa', 'metadata': {}},
{'test': 'testb', 'metadata': {}}]
self.table = 'samples_mart.results'
def testNoSamples(self):
instance = publisher.BigQueryPublisher(self.table)
instance.PublishSamples([])
self.assertEqual([], self.mock_vm_util.IssueRetryableCommand.mock_calls)
def testNoProject(self):
instance = publisher.BigQueryPublisher(self.table)
instance.PublishSamples(self.samples)
self.mock_vm_util.IssueRetryableCommand.assert_called_once_with(
['bq',
'load',
'--autodetect',
'--source_format=NEWLINE_DELIMITED_JSON',
self.table,
mock.ANY])
def testServiceAccountFlags_MissingPrivateKey(self):
self.assertRaises(ValueError,
publisher.BigQueryPublisher,
self.table,
service_account=mock.MagicMock())
def testServiceAccountFlags_MissingServiceAccount(self):
self.assertRaises(ValueError,
publisher.BigQueryPublisher,
self.table,
service_account_private_key_file=mock.MagicMock())
def testServiceAccountFlags_BothSpecified(self):
instance = publisher.BigQueryPublisher(
self.table,
service_account=mock.MagicMock(),
service_account_private_key_file=mock.MagicMock())
instance.PublishSamples(self.samples) # No error
self.mock_vm_util.IssueRetryableCommand.assert_called_once_with(mock.ANY)
class CloudStoragePublisherTestCase(unittest.TestCase):
def setUp(self):
p = mock.patch(publisher.__name__ + '.vm_util', spec=publisher.vm_util)
self.mock_vm_util = p.start()
publisher.vm_util.NamedTemporaryFile = vm_util.NamedTemporaryFile
self.mock_vm_util.GetTempDir.return_value = tempfile.gettempdir()
self.addCleanup(p.stop)
p = mock.patch(publisher.__name__ + '.time', spec=publisher.time)
self.mock_time = p.start()
self.addCleanup(p.stop)
p = mock.patch(publisher.__name__ + '.uuid', spec=publisher.uuid)
self.mock_uuid = p.start()
self.addCleanup(p.stop)
self.samples = [{'test': 'testa', 'metadata': {}},
{'test': 'testb', 'metadata': {}}]
def testPublishSamples(self):
self.mock_time.time.return_value = 1417647763.387665
self.mock_uuid.uuid4.return_value = uuid.UUID(
'be428eb3-a54a-4615-b7ca-f962b729c7ab')
instance = publisher.CloudStoragePublisher('test-bucket')
instance.PublishSamples(self.samples)
self.mock_vm_util.IssueRetryableCommand.assert_called_once_with(
['gsutil', 'cp', mock.ANY,
'gs://test-bucket/141764776338_be428eb'])
class SampleCollectorTestCase(unittest.TestCase):
def setUp(self):
self.instance = publisher.SampleCollector(publishers=[])
self.sample = sample.Sample('widgets', 100, 'oz', {'foo': 'bar'})
self.benchmark = 'test!'
self.benchmark_spec = mock.MagicMock()
p = mock.patch(publisher.__name__ + '.FLAGS')
p2 = mock.patch(util.__name__ + '.GetDefaultProject')
p2.start()
self.addCleanup(p2.stop)
self.mock_flags = p.start()
self.addCleanup(p.stop)
self.mock_flags.product_name = 'PerfKitBenchmarker'
def _VerifyResult(self, contains_metadata=True):
self.assertEqual(1, len(self.instance.samples))
collector_sample = self.instance.samples[0]
metadata = collector_sample.pop('metadata')
self.assertDictContainsSubset(
{
'value': 100,
'metric': 'widgets',
'unit': 'oz',
'test': self.benchmark,
'product_name': 'PerfKitBenchmarker'
},
collector_sample)
if contains_metadata:
self.assertDictContainsSubset({'foo': 'bar'}, metadata)
else:
self.assertNotIn('foo', metadata)
def testAddSamples_SampleClass(self):
samples = [self.sample]
self.instance.AddSamples(samples, self.benchmark, self.benchmark_spec)
self._VerifyResult()
def testAddSamples_WithTimestamp(self):
timestamp_sample = sample.Sample('widgets', 100, 'oz', {}, 1.0)
samples = [timestamp_sample]
self.instance.AddSamples(samples, self.benchmark, self.benchmark_spec)
self.assertDictContainsSubset(
{
'timestamp': 1.0
},
self.instance.samples[0])
class DefaultMetadataProviderTestCase(unittest.TestCase):
def setUp(self):
p = mock.patch(publisher.__name__ + '.FLAGS')
self.mock_flags = p.start()
self.mock_flags.configure_mock(metadata=[],
num_striped_disks=1,
sysctl=[],
set_files=[],
simulate_maintenance=False)
self.addCleanup(p.stop)
self.maxDiff = None
p = mock.patch(publisher.__name__ + '.version',
VERSION='v1')
p.start()
self.addCleanup(p.stop)
# Need iops=None in self.mock_disk because otherwise doing
# mock_disk.iops returns a mock.MagicMock, which is not None,
# which defeats the getattr check in
# publisher.DefaultMetadataProvider.
self.mock_disk = mock.MagicMock(disk_type='disk-type',
disk_size=20, num_striped_disks=1,
iops=None)
self.disk_metadata = {
'type': self.mock_disk.disk_type,
'size': self.mock_disk.disk_size,
'num_stripes': self.mock_disk.num_striped_disks,
}
self.mock_disk.GetResourceMetadata.return_value = self.disk_metadata
self.mock_vm = mock.MagicMock(CLOUD='GCP',
zone='us-central1-a',
machine_type='n1-standard-1',
image='ubuntu-14-04',
scratch_disks=[],
hostname='Hostname')
self.mock_vm.GetResourceMetadata.return_value = {
'machine_type': self.mock_vm.machine_type,
'image': self.mock_vm.image,
'zone': self.mock_vm.zone,
'cloud': self.mock_vm.CLOUD,
}
self.mock_spec = mock.MagicMock(vm_groups={'default': [self.mock_vm]},
vms=[self.mock_vm])
self.default_meta = {'perfkitbenchmarker_version': 'v1',
'cloud': self.mock_vm.CLOUD,
'zone': 'us-central1-a',
'machine_type': self.mock_vm.machine_type,
'image': self.mock_vm.image,
'vm_count': 1,
'hostnames': 'Hostname'}
def _RunTest(self, spec, expected, input_metadata=None):
input_metadata = input_metadata or {}
instance = publisher.DefaultMetadataProvider()
result = instance.AddMetadata(input_metadata, self.mock_spec)
self.assertIsNot(input_metadata, result,
msg='Input metadata was not copied.')
self.assertDictContainsSubset(expected, result)
def testAddMetadata_ScratchDiskUndefined(self):
self._RunTest(self.mock_spec, self.default_meta)
def testAddMetadata_NoScratchDisk(self):
self.mock_spec.scratch_disk = False
self._RunTest(self.mock_spec, self.default_meta)
def testAddMetadata_WithScratchDisk(self):
self.mock_vm.configure_mock(scratch_disks=[self.mock_disk])
expected = self.default_meta.copy()
expected.update(data_disk_0_size=20,
data_disk_0_type='disk-type',
data_disk_count=1,
data_disk_0_num_stripes=1)
self._RunTest(self.mock_spec, expected)
def testAddMetadata_DiskSizeNone(self):
# This situation can happen with static VMs
self.disk_metadata['size'] = None
self.mock_vm.configure_mock(scratch_disks=[self.mock_disk])
expected = self.default_meta.copy()
expected.update(data_disk_0_size=None,
data_disk_0_type='disk-type',
data_disk_count=1,
data_disk_0_num_stripes=1)
self._RunTest(self.mock_spec, expected)
def testAddMetadata_PIOPS(self):
self.disk_metadata['iops'] = 1000
self.mock_vm.configure_mock(scratch_disks=[self.mock_disk])
expected = self.default_meta.copy()
expected.update(data_disk_0_size=20,
data_disk_0_type='disk-type',
data_disk_count=1,
data_disk_0_num_stripes=1,
data_disk_0_iops=1000)
self._RunTest(self.mock_spec, expected)
def testDiskMetadata(self):
self.disk_metadata['foo'] = 'bar'
self.mock_vm.configure_mock(scratch_disks=[self.mock_disk])
expected = self.default_meta.copy()
expected.update(data_disk_0_size=20,
data_disk_0_type='disk-type',
data_disk_count=1,
data_disk_0_num_stripes=1,
data_disk_0_foo='bar')
self._RunTest(self.mock_spec, expected)
class CSVPublisherTestCase(unittest.TestCase):
def setUp(self):
self.tf = tempfile.NamedTemporaryFile(mode='w+',
prefix='perfkit-csv-publisher',
suffix='.csv')
self.addCleanup(self.tf.close)
def testWritesToStream(self):
instance = publisher.CSVPublisher(self.tf.name)
samples = [{'test': 'testb', 'metric': '1', 'value': 1.0, 'unit': 'MB',
'metadata': {}},
{'test': 'testb', 'metric': '2', 'value': 14.0, 'unit': 'MB',
'metadata': {}},
{'test': 'testa', 'metric': '3', 'value': 47.0, 'unit': 'us',
'metadata': {}}]
instance.PublishSamples(samples)
self.tf.seek(0)
rows = list(csv.DictReader(self.tf))
six.assertCountEqual(self, ['1', '2', '3'], [i['metric'] for i in rows])
def testUsesUnionOfMetaKeys(self):
instance = publisher.CSVPublisher(self.tf.name)
samples = [{'test': 'testb', 'metric': '1', 'value': 1.0, 'unit': 'MB',
'metadata': {'key1': 'value1'}},
{'test': 'testb', 'metric': '2', 'value': 14.0, 'unit': 'MB',
'metadata': {'key1': 'value2'}},
{'test': 'testa', 'metric': '3', 'value': 47.0, 'unit': 'us',
'metadata': {'key3': 'value3'}}]
instance.PublishSamples(samples)
self.tf.seek(0)
reader = csv.DictReader(self.tf)
rows = list(reader)
self.assertEqual(['key1', 'key3'], reader.fieldnames[-2:])
self.assertEqual(3, len(rows))
class InfluxDBPublisherTestCase(unittest.TestCase):
def setUp(self):
self.db_name = 'test_db'
self.db_uri = 'test'
self.test_db = publisher.InfluxDBPublisher(self.db_uri, self.db_name)
def testFormatToKeyValue(self):
sample_1 = {'test': 'testa', 'metric': '3', 'official': 47.0,
'value': 'non', 'unit': 'us', 'owner': 'Rackspace',
'run_uri': '5rtw', 'sample_uri': '5r', 'timestamp': 123}
sample_2 = {'test': 'testb', 'metric': '2', 'official': 14.0,
'value': 'non', 'unit': 'MB', 'owner': 'Rackspace',
'run_uri': 'bba3', 'sample_uri': 'bb',
'timestamp': 55}
sample_3 = {'test': 'testc', 'metric': '1', 'official': 1.0,
'value': 'non', 'unit': 'MB', 'owner': 'Rackspace',
'run_uri': '323', 'sample_uri': '33',
'timestamp': 123}
sample_4 = {'test': 'testc', 'metric': 'some,metric', 'official': 1.0,
'value': 'non', 'unit': 'Some MB', 'owner': 'Rackspace',
'run_uri': '323', 'sample_uri': '33',
'timestamp': 123}
sample_5 = {'test': 'testc', 'metric': 'some,metric', 'official': 1.0,
'value': 'non', 'unit': '', 'owner': 'Rackspace',
'run_uri': '323', 'sample_uri': '',
'timestamp': 123}
sample_1_formatted_key_value = self.test_db._FormatToKeyValue(sample_1)
sample_2_formatted_key_value = self.test_db._FormatToKeyValue(sample_2)
sample_3_formatted_key_value = self.test_db._FormatToKeyValue(sample_3)
sample_4_formatted_key_value = self.test_db._FormatToKeyValue(sample_4)
sample_5_formatted_key_value = self.test_db._FormatToKeyValue(sample_5)
expected_sample_1 = ['owner=Rackspace', 'unit=us', 'run_uri=5rtw',
'test=testa', 'timestamp=123', 'metric=3',
'official=47.0', 'value=non', 'sample_uri=5r']
expected_sample_2 = ['owner=Rackspace', 'unit=MB', 'run_uri=bba3',
'test=testb', 'timestamp=55', 'metric=2',
'official=14.0', 'value=non', 'sample_uri=bb']
expected_sample_3 = ['owner=Rackspace', 'unit=MB', 'run_uri=323',
'test=testc', 'timestamp=123', 'metric=1',
'official=1.0', 'value=non', 'sample_uri=33']
expected_sample_4 = ['owner=Rackspace', 'unit=Some\ MB', 'run_uri=323',
'test=testc', 'timestamp=123', 'metric=some\,metric',
'official=1.0', 'value=non', 'sample_uri=33']
expected_sample_5 = ['owner=Rackspace', 'unit=\\"\\"', 'run_uri=323',
'test=testc', 'timestamp=123', 'metric=some\,metric',
'official=1.0', 'value=non', 'sample_uri=\\"\\"']
six.assertCountEqual(self, sample_1_formatted_key_value, expected_sample_1)
six.assertCountEqual(self, sample_2_formatted_key_value, expected_sample_2)
six.assertCountEqual(self, sample_3_formatted_key_value, expected_sample_3)
six.assertCountEqual(self, sample_4_formatted_key_value, expected_sample_4)
six.assertCountEqual(self, sample_5_formatted_key_value, expected_sample_5)
def testConstructSample(self):
sample_with_metadata = {
'test': 'testc', 'metric': '1', 'official': 1.0,
'value': 'non', 'unit': 'MB', 'owner': 'Rackspace',
'product_name': 'PerfKitBenchmarker',
'run_uri': '323', 'sample_uri': '33',
'timestamp': 123,
'metadata': collections.OrderedDict([('info', '1'),
('more_info', '2'),
('bar', 'foo')])}
constructed_sample = self.test_db._ConstructSample(sample_with_metadata)
sample_results = ('perfkitbenchmarker,test=testc,official=1.0,'
'owner=Rackspace,run_uri=323,sample_uri=33,'
'metric=1,unit=MB,product_name=PerfKitBenchmarker,info=1,more_info=2,bar=foo '
'value=non 123000000000')
self.assertEqual(constructed_sample, sample_results)
@mock.patch.object(publisher.InfluxDBPublisher, '_Publish')
def testPublishSamples(self, mock_publish_method):
samples = [
{
'test': 'testc', 'metric': '1', 'official': 1.0,
'value': 'non', 'unit': 'MB', 'owner': 'Rackspace',
'run_uri': '323', 'sample_uri': '33', 'timestamp': 123,
'metadata': collections.OrderedDict([('info', '1'),
('more_info', '2'),
('bar', 'foo')])
},
{
'test': 'testb', 'metric': '2', 'official': 14.0,
'value': 'non', 'unit': 'MB', 'owner': 'Rackspace',
'run_uri': 'bba3', 'sample_uri': 'bb', 'timestamp': 55,
'metadata': collections.OrderedDict()
},
{
'test': 'testa', 'metric': '3', 'official': 47.0,
'value': 'non', 'unit': 'us', 'owner': 'Rackspace',
'run_uri': '5rtw', 'sample_uri': '5r', 'timestamp': 123
}
]
expected = [
('perfkitbenchmarker,test=testc,official=1.0,owner=Rackspace,'
'run_uri=323,sample_uri=33,metric=1,unit=MB,product_name=PerfKitBenchmarker,info=1,more_info=2,'
'bar=foo value=non 123000000000'),
('perfkitbenchmarker,test=testb,official=14.0,owner=Rackspace,'
'run_uri=bba3,sample_uri=bb,metric=2,unit=MB,product_name=PerfKitBenchmarker value=non 55000000000'),
('perfkitbenchmarker,test=testa,official=47.0,owner=Rackspace,'
'run_uri=5rtw,sample_uri=5r,metric=3,unit=us,product_name=PerfKitBenchmarker value=non 123000000000')
]
mock_publish_method.return_value = None
self.test_db.PublishSamples(samples)
mock_publish_method.assert_called_once_with(expected)
@mock.patch.object(publisher.InfluxDBPublisher, '_WriteData')
@mock.patch.object(publisher.InfluxDBPublisher, '_CreateDB')
def testPublish(self, mock_create_db, mock_write_data):
formatted_samples = [
('perfkitbenchmarker,test=testc,official=1.0,owner=Rackspace,'
'run_uri=323,sample_uri=33,metric=1,unit=MB,info=1,more_info=2,'
'bar=foo value=non 123000000000'),
('perfkitbenchmarker,test=testb,official=14.0,owner=Rackspace,'
'run_uri=bba3,sample_uri=bb,metric=2,unit=MB value=non 55000000000'),
('perfkitbenchmarker,test=testa,official=47.0,owner=Rackspace,'
'run_uri=5rtw,sample_uri=5r,metric=3,unit=us value=non 123000000000')
]
expected_output = ('perfkitbenchmarker,test=testc,official=1.0,'
'owner=Rackspace,run_uri=323,sample_uri=33,metric=1,'
'unit=MB,info=1,more_info=2,bar=foo value=non '
'123000000000\nperfkitbenchmarker,test=testb,'
'official=14.0,owner=Rackspace,run_uri=bba3,'
'sample_uri=bb,metric=2,unit=MB value=non 55000000000\n'
'perfkitbenchmarker,test=testa,official=47.0,'
'owner=Rackspace,run_uri=5rtw,sample_uri=5r,'
'metric=3,unit=us value=non 123000000000')
mock_create_db.return_value = None
mock_write_data.return_value = None
self.test_db._Publish(formatted_samples)
mock_create_db.assert_called_once()
mock_write_data.assert_called_once_with(expected_output)
if __name__ == '__main__':
unittest.main()
|
import functools
import os
import numpy as np
import pandas as pd
import pytz
from qstrader import settings
class CSVDailyBarDataSource(object):
"""
Encapsulates loading, preparation and querying of CSV files of
daily 'bar' OHLCV data. The CSV files are converted into a intraday
timestamped Pandas DataFrame with opening and closing prices.
Optionally utilises adjusted closing prices (if available) to
adjust both the close and open.
Parameters
----------
csv_dir : `str`
The full path to the directory where the CSV is located.
asset_type : `str`
The asset type that the price/volume data is for.
TODO: Unused at this stage and currently hardcoded to Equity.
adjust_prices : `Boolean`, optional
Whether to utilise corporate-action adjusted prices for both
the open and closing prices. Defaults to True.
csv_symbols : `list`, optional
An optional list of CSV symbols to restrict the data source to.
The alternative is to convert all CSVs found within the
provided directory.
"""
def __init__(self, csv_dir, asset_type, adjust_prices=True, csv_symbols=None):
self.csv_dir = csv_dir
self.asset_type = asset_type
self.adjust_prices = adjust_prices
self.csv_symbols = csv_symbols
self.asset_bar_frames = self._load_csvs_into_dfs()
self.asset_bid_ask_frames = self._convert_bars_into_bid_ask_dfs()
def _obtain_asset_csv_files(self):
"""
Obtain the list of all CSV filenames in the CSV directory.
Returns
-------
`list[str]`
The list of all CSV filenames.
"""
return [
file for file in os.listdir(self.csv_dir)
if file.endswith('.csv')
]
def _obtain_asset_symbol_from_filename(self, csv_file):
"""
Return the QSTrader symbology for the asset.
TODO: Remove hardcoding to Equity asset types.
Parameters
----------
csv_file : `str`
The name of the CSV file.
Returns
-------
`str`
The QSTrader symbology of the asset. e.g. 'EQ:SPY'.
"""
return 'EQ:%s' % csv_file.replace('.csv', '')
def _load_csv_into_df(self, csv_file):
"""
Loads the CSV file into a Pandas DataFrame with dates parsed,
sorted on datetime localised to UTC.
Parameters
----------
csv_file : `str`
The name of the CSV file.
Returns
-------
`pd.DataFrame`
DataFrame of the CSV file with timestamps localised to UTC.
"""
csv_df = pd.read_csv(
os.path.join(self.csv_dir, csv_file),
index_col='Date',
parse_dates=True
).sort_index()
# Ensure all timestamps are set to UTC for consistency
csv_df = csv_df.set_index(csv_df.index.tz_localize(pytz.UTC))
return csv_df
def _load_csvs_into_dfs(self):
"""
Load all CSVs in the CSV directory into Pandas DataFrames.
Returns
-------
`dict{pd.DataFrame}`
The asset-symbol keyed dictionary of Pandas DataFrames
containing the timestamped price/volume data.
"""
if settings.PRINT_EVENTS:
print("Loading CSV files into DataFrames...")
if self.csv_symbols is not None:
# TODO/NOTE: This assumes existence of CSV symbols
# within the provided directory.
csv_files = ['%s.csv' % symbol for symbol in self.csv_symbols]
else:
csv_files = self._obtain_asset_csv_files()
asset_frames = {}
for csv_file in csv_files:
asset_symbol = self._obtain_asset_symbol_from_filename(csv_file)
if settings.PRINT_EVENTS:
print("Loading CSV file for symbol '%s'..." % asset_symbol)
csv_df = self._load_csv_into_df(csv_file)
asset_frames[asset_symbol] = csv_df
return asset_frames
def _convert_bar_frame_into_bid_ask_df(self, bar_df):
"""
Converts the DataFrame from daily OHLCV 'bars' into a DataFrame
of open and closing price timestamps.
Optionally adjusts the open/close prices for corporate actions
using any provided 'Adjusted Close' column.
Parameters
----------
`pd.DataFrame`
The daily 'bar' OHLCV DataFrame.
Returns
-------
`pd.DataFrame`
The individually-timestamped open/closing prices, optionally
adjusted for corporate actions.
"""
bar_df = bar_df.sort_index()
if self.adjust_prices:
if 'Adj Close' not in bar_df.columns:
raise ValueError(
"Unable to locate Adjusted Close pricing column in CSV data file. "
"Prices cannot be adjusted. Exiting."
)
# Restrict solely to the open/closing prices
oc_df = bar_df.loc[:, ['Open', 'Close', 'Adj Close']]
# Adjust opening prices
oc_df['Adj Open'] = (oc_df['Adj Close'] / oc_df['Close']) * oc_df['Open']
oc_df = oc_df.loc[:, ['Adj Open', 'Adj Close']]
oc_df.columns = ['Open', 'Close']
else:
oc_df = bar_df.loc[:, ['Open', 'Close']]
# Convert bars into separate rows for open/close prices
# appropriately timestamped
seq_oc_df = oc_df.T.unstack(level=0).reset_index()
seq_oc_df.columns = ['Date', 'Market', 'Price']
seq_oc_df.loc[seq_oc_df['Market'] == 'Open', 'Date'] += pd.Timedelta(hours=14, minutes=30)
seq_oc_df.loc[seq_oc_df['Market'] == 'Close', 'Date'] += pd.Timedelta(hours=21, minutes=00)
# TODO: Unable to distinguish between Bid/Ask, implement later
dp_df = seq_oc_df[['Date', 'Price']]
dp_df['Bid'] = dp_df['Price']
dp_df['Ask'] = dp_df['Price']
dp_df = dp_df.loc[:, ['Date', 'Bid', 'Ask']].fillna(method='ffill').set_index('Date').sort_index()
return dp_df
def _convert_bars_into_bid_ask_dfs(self):
"""
Convert all of the daily OHLCV 'bar' based DataFrames into
individually-timestamped open/closing price DataFrames.
Returns
-------
`dict{pd.DataFrame}`
The converted DataFrames.
"""
if settings.PRINT_EVENTS:
print("Adjusting pricing in CSV files...")
asset_bid_ask_frames = {}
for asset_symbol, bar_df in self.asset_bar_frames.items():
if settings.PRINT_EVENTS:
print("Adjusting CSV file for symbol '%s'..." % asset_symbol)
asset_bid_ask_frames[asset_symbol] = \
self._convert_bar_frame_into_bid_ask_df(bar_df)
return asset_bid_ask_frames
@functools.lru_cache(maxsize=1024 * 1024)
def get_bid(self, dt, asset):
"""
Obtain the bid price of an asset at the provided timestamp.
Parameters
----------
dt : `pd.Timestamp`
When to obtain the bid price for.
asset : `str`
The asset symbol to obtain the bid price for.
Returns
-------
`float`
The bid price.
"""
bid_ask_df = self.asset_bid_ask_frames[asset]
try:
bid = bid_ask_df.iloc[bid_ask_df.index.get_loc(dt, method='pad')]['Bid']
except KeyError: # Before start date
return np.NaN
return bid
@functools.lru_cache(maxsize=1024 * 1024)
def get_ask(self, dt, asset):
"""
Obtain the ask price of an asset at the provided timestamp.
Parameters
----------
dt : `pd.Timestamp`
When to obtain the ask price for.
asset : `str`
The asset symbol to obtain the ask price for.
Returns
-------
`float`
The ask price.
"""
bid_ask_df = self.asset_bid_ask_frames[asset]
try:
ask = bid_ask_df.iloc[bid_ask_df.index.get_loc(dt, method='pad')]['Ask']
except KeyError: # Before start date
return np.NaN
return ask
def get_assets_historical_closes(self, start_dt, end_dt, assets):
"""
Obtain a multi-asset historical range of closing prices as a DataFrame,
indexed by timestamp with asset symbols as columns.
Parameters
----------
start_dt : `pd.Timestamp`
The starting datetime of the range to obtain.
end_dt : `pd.Timestamp`
The ending datetime of the range to obtain.
assets : `list[str]`
The list of asset symbols to obtain closing prices for.
Returns
-------
`pd.DataFrame`
The multi-asset closing prices DataFrame.
"""
close_series = []
for asset in assets:
if asset in self.asset_bar_frames.keys():
asset_close_prices = self.asset_bar_frames[asset][['Close']]
asset_close_prices.columns = [asset]
close_series.append(asset_close_prices)
prices_df = pd.concat(close_series, axis=1).dropna(how='all')
prices_df = prices_df.loc[start_dt:end_dt]
return prices_df
|
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components import ssdp
from homeassistant.components.isy994.config_flow import CannotConnect
from homeassistant.components.isy994.const import (
CONF_IGNORE_STRING,
CONF_RESTORE_LIGHT_STATE,
CONF_SENSOR_STRING,
CONF_TLS_VER,
CONF_VAR_SENSOR_STRING,
DOMAIN,
ISY_URL_POSTFIX,
UDN_UUID_PREFIX,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.common import MockConfigEntry
MOCK_HOSTNAME = "1.1.1.1"
MOCK_USERNAME = "test-username"
MOCK_PASSWORD = "test-password"
# Don't use the integration defaults here to make sure they're being set correctly.
MOCK_TLS_VERSION = 1.2
MOCK_IGNORE_STRING = "{IGNOREME}"
MOCK_RESTORE_LIGHT_STATE = True
MOCK_SENSOR_STRING = "IMASENSOR"
MOCK_VARIABLE_SENSOR_STRING = "HomeAssistant."
MOCK_USER_INPUT = {
CONF_HOST: f"http://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
CONF_TLS_VER: MOCK_TLS_VERSION,
}
MOCK_IMPORT_WITH_SSL = {
CONF_HOST: f"https://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
CONF_TLS_VER: MOCK_TLS_VERSION,
}
MOCK_IMPORT_BASIC_CONFIG = {
CONF_HOST: f"http://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
}
MOCK_IMPORT_FULL_CONFIG = {
CONF_HOST: f"http://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
CONF_IGNORE_STRING: MOCK_IGNORE_STRING,
CONF_RESTORE_LIGHT_STATE: MOCK_RESTORE_LIGHT_STATE,
CONF_SENSOR_STRING: MOCK_SENSOR_STRING,
CONF_TLS_VER: MOCK_TLS_VERSION,
CONF_VAR_SENSOR_STRING: MOCK_VARIABLE_SENSOR_STRING,
}
MOCK_DEVICE_NAME = "Name of the device"
MOCK_UUID = "CE:FB:72:31:B7:B9"
MOCK_VALIDATED_RESPONSE = {"name": MOCK_DEVICE_NAME, "uuid": MOCK_UUID}
PATCH_CONFIGURATION = "homeassistant.components.isy994.config_flow.Configuration"
PATCH_CONNECTION = "homeassistant.components.isy994.config_flow.Connection"
PATCH_ASYNC_SETUP = "homeassistant.components.isy994.async_setup"
PATCH_ASYNC_SETUP_ENTRY = "homeassistant.components.isy994.async_setup_entry"
async def test_form(hass: HomeAssistantType):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(
PATCH_ASYNC_SETUP, return_value=True
) as mock_setup, patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
) as mock_setup_entry:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == f"{MOCK_DEVICE_NAME} ({MOCK_HOSTNAME})"
assert result2["result"].unique_id == MOCK_UUID
assert result2["data"] == MOCK_USER_INPUT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_host(hass: HomeAssistantType):
"""Test we handle invalid host."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": MOCK_HOSTNAME, # Test with missing protocol (http://)
"username": MOCK_USERNAME,
"password": MOCK_PASSWORD,
"tls": MOCK_TLS_VERSION,
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_host"}
async def test_form_invalid_auth(hass: HomeAssistantType):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(PATCH_CONFIGURATION), patch(
PATCH_CONNECTION,
side_effect=ValueError("PyISY could not connect to the ISY."),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass: HomeAssistantType):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(PATCH_CONFIGURATION), patch(
PATCH_CONNECTION,
side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_existing_config_entry(hass: HomeAssistantType):
"""Test if config entry already exists."""
MockConfigEntry(domain=DOMAIN, unique_id=MOCK_UUID).add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_import_flow_some_fields(hass: HomeAssistantType) -> None:
"""Test import config flow with just the basic fields."""
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(PATCH_ASYNC_SETUP, return_value=True), patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
):
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_IMPORT_BASIC_CONFIG,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_HOST] == f"http://{MOCK_HOSTNAME}"
assert result["data"][CONF_USERNAME] == MOCK_USERNAME
assert result["data"][CONF_PASSWORD] == MOCK_PASSWORD
async def test_import_flow_with_https(hass: HomeAssistantType) -> None:
"""Test import config with https."""
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(PATCH_ASYNC_SETUP, return_value=True), patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
):
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_IMPORT_WITH_SSL,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_HOST] == f"https://{MOCK_HOSTNAME}"
assert result["data"][CONF_USERNAME] == MOCK_USERNAME
assert result["data"][CONF_PASSWORD] == MOCK_PASSWORD
async def test_import_flow_all_fields(hass: HomeAssistantType) -> None:
"""Test import config flow with all fields."""
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(PATCH_ASYNC_SETUP, return_value=True), patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
):
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_IMPORT_FULL_CONFIG,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_HOST] == f"http://{MOCK_HOSTNAME}"
assert result["data"][CONF_USERNAME] == MOCK_USERNAME
assert result["data"][CONF_PASSWORD] == MOCK_PASSWORD
assert result["data"][CONF_IGNORE_STRING] == MOCK_IGNORE_STRING
assert result["data"][CONF_RESTORE_LIGHT_STATE] == MOCK_RESTORE_LIGHT_STATE
assert result["data"][CONF_SENSOR_STRING] == MOCK_SENSOR_STRING
assert result["data"][CONF_VAR_SENSOR_STRING] == MOCK_VARIABLE_SENSOR_STRING
assert result["data"][CONF_TLS_VER] == MOCK_TLS_VERSION
async def test_form_ssdp_already_configured(hass: HomeAssistantType) -> None:
"""Test ssdp abort when the serial number is already configured."""
await setup.async_setup_component(hass, "persistent_notification", {})
MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: f"http://{MOCK_HOSTNAME}{ISY_URL_POSTFIX}"},
unique_id=MOCK_UUID,
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: f"http://{MOCK_HOSTNAME}{ISY_URL_POSTFIX}",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "myisy",
ssdp.ATTR_UPNP_UDN: f"{UDN_UUID_PREFIX}{MOCK_UUID}",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_form_ssdp(hass: HomeAssistantType):
"""Test we can setup from ssdp."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: f"http://{MOCK_HOSTNAME}{ISY_URL_POSTFIX}",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "myisy",
ssdp.ATTR_UPNP_UDN: f"{UDN_UUID_PREFIX}{MOCK_UUID}",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(
PATCH_ASYNC_SETUP, return_value=True
) as mock_setup, patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
) as mock_setup_entry:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == f"{MOCK_DEVICE_NAME} ({MOCK_HOSTNAME})"
assert result2["result"].unique_id == MOCK_UUID
assert result2["data"] == MOCK_USER_INPUT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
from compare_gan import datasets
from compare_gan import eval_utils
from compare_gan.metrics import eval_task
import numpy as np
class AccuracyTask(eval_task.EvalTask):
"""Evaluation Task for computing and reporting accuracy."""
def metric_list(self):
return frozenset([
"train_accuracy", "test_accuracy", "fake_accuracy", "train_d_loss",
"test_d_loss"
])
def run_in_session(self, options, sess, gan, real_images):
del options
return compute_accuracy_loss(sess, gan, real_images)
def compute_accuracy_loss(sess,
gan,
test_images,
max_train_examples=50000,
num_repeat=5):
"""Compute discriminator's accuracy and loss on a given dataset.
Args:
sess: Tf.Session object.
gan: Any AbstractGAN instance.
test_images: numpy array with test images.
max_train_examples: How many "train" examples to get from the dataset.
In each round, some of them will be randomly selected
to evaluate train set accuracy.
num_repeat: How many times to repreat the computation.
The mean of all the results is reported.
Returns:
Dict[Text, float] with all the computed scores.
Raises:
ValueError: If the number of test_images is greater than the number of
training images returned by the dataset.
"""
logging.info("Evaluating training and test accuracy...")
train_images = eval_utils.get_real_images(
dataset=datasets.get_dataset(),
num_examples=max_train_examples,
split="train",
failure_on_insufficient_examples=False)
if train_images.shape[0] < test_images.shape[0]:
raise ValueError("num_train %d must be larger than num_test %d." %
(train_images.shape[0], test_images.shape[0]))
num_batches = int(np.floor(test_images.shape[0] / gan.batch_size))
if num_batches * gan.batch_size < test_images.shape[0]:
logging.error("Ignoring the last batch with %d samples / %d epoch size.",
test_images.shape[0] - num_batches * gan.batch_size,
gan.batch_size)
ret = {
"train_accuracy": [],
"test_accuracy": [],
"fake_accuracy": [],
"train_d_loss": [],
"test_d_loss": []
}
for _ in range(num_repeat):
idx = np.random.choice(train_images.shape[0], test_images.shape[0])
bs = gan.batch_size
train_subset = [train_images[i] for i in idx]
train_predictions, test_predictions, fake_predictions = [], [], []
train_d_losses, test_d_losses = [], []
for i in range(num_batches):
z_sample = gan.z_generator(gan.batch_size, gan.z_dim)
start_idx = i * bs
end_idx = start_idx + bs
test_batch = test_images[start_idx : end_idx]
train_batch = train_subset[start_idx : end_idx]
test_prediction, test_d_loss, fake_images = sess.run(
[gan.discriminator_output, gan.d_loss, gan.fake_images],
feed_dict={
gan.inputs: test_batch, gan.z: z_sample
})
train_prediction, train_d_loss = sess.run(
[gan.discriminator_output, gan.d_loss],
feed_dict={
gan.inputs: train_batch,
gan.z: z_sample
})
fake_prediction = sess.run(
gan.discriminator_output,
feed_dict={gan.inputs: fake_images})[0]
train_predictions.append(train_prediction[0])
test_predictions.append(test_prediction[0])
fake_predictions.append(fake_prediction)
train_d_losses.append(train_d_loss)
test_d_losses.append(test_d_loss)
train_predictions = [x >= 0.5 for x in train_predictions]
test_predictions = [x >= 0.5 for x in test_predictions]
fake_predictions = [x < 0.5 for x in fake_predictions]
ret["train_accuracy"].append(np.array(train_predictions).mean())
ret["test_accuracy"].append(np.array(test_predictions).mean())
ret["fake_accuracy"].append(np.array(fake_predictions).mean())
ret["train_d_loss"].append(np.mean(train_d_losses))
ret["test_d_loss"].append(np.mean(test_d_losses))
for key in ret:
ret[key] = np.mean(ret[key])
return ret
|
import vcr
class MockSerializer:
def __init__(self):
self.serialize_count = 0
self.deserialize_count = 0
self.load_args = None
def deserialize(self, cassette_string):
self.serialize_count += 1
self.cassette_string = cassette_string
return {"interactions": []}
def serialize(self, cassette_dict):
self.deserialize_count += 1
return ""
def test_registered_serializer(tmpdir):
ms = MockSerializer()
my_vcr = vcr.VCR()
my_vcr.register_serializer("mock", ms)
tmpdir.join("test.mock").write("test_data")
with my_vcr.use_cassette(str(tmpdir.join("test.mock")), serializer="mock"):
# Serializer deserialized once
assert ms.serialize_count == 1
# and serialized the test data string
assert ms.cassette_string == "test_data"
# and hasn't serialized yet
assert ms.deserialize_count == 0
assert ms.serialize_count == 1
|
import pywink
from homeassistant.components.cover import ATTR_POSITION, CoverEntity
from . import DOMAIN, WinkDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink cover platform."""
for shade in pywink.get_shades():
_id = shade.object_id() + shade.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkCoverEntity(shade, hass)])
for shade in pywink.get_shade_groups():
_id = shade.object_id() + shade.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkCoverEntity(shade, hass)])
for door in pywink.get_garage_doors():
_id = door.object_id() + door.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkCoverEntity(door, hass)])
class WinkCoverEntity(WinkDevice, CoverEntity):
"""Representation of a Wink cover device."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["cover"].append(self)
def close_cover(self, **kwargs):
"""Close the cover."""
self.wink.set_state(0)
def open_cover(self, **kwargs):
"""Open the cover."""
self.wink.set_state(1)
def set_cover_position(self, **kwargs):
"""Move the cover shutter to a specific position."""
position = kwargs.get(ATTR_POSITION)
self.wink.set_state(position / 100)
@property
def current_cover_position(self):
"""Return the current position of cover shutter."""
if self.wink.state() is not None:
return int(self.wink.state() * 100)
return None
@property
def is_closed(self):
"""Return if the cover is closed."""
state = self.wink.state()
return bool(state == 0)
|
import io
import textwrap
import re
import uuid
import pytest
mhtml = pytest.importorskip('qutebrowser.browser.webkit.mhtml')
try:
import cssutils
except ImportError:
cssutils = None
@pytest.fixture(autouse=True)
def patch_uuid(monkeypatch):
monkeypatch.setattr(uuid, "uuid4", lambda: "UUID")
class Checker:
"""A helper to check mhtml output.
Attributes:
fp: A BytesIO object for passing to MHTMLWriter.write_to.
"""
def __init__(self):
self.fp = io.BytesIO()
@property
def value(self):
return self.fp.getvalue()
def expect(self, expected):
actual = self.value.decode('ascii')
# Make sure there are no stray \r or \n
assert re.search(r'\r[^\n]', actual) is None
assert re.search(r'[^\r]\n', actual) is None
actual = actual.replace('\r\n', '\n')
expected = textwrap.dedent(expected).lstrip('\n')
assert expected == actual
@pytest.fixture
def checker():
return Checker()
def test_quoted_printable_umlauts(checker):
content = 'Die süße Hündin läuft in die Höhle des Bären'
content = content.encode('iso-8859-1')
writer = mhtml.MHTMLWriter(root_content=content,
content_location='localhost',
content_type='text/plain')
writer.write_to(checker.fp)
checker.expect("""
Content-Type: multipart/related; boundary="---=_qute-UUID"
MIME-Version: 1.0
-----=_qute-UUID
Content-Location: localhost
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
Die s=FC=DFe H=FCndin l=E4uft in die H=F6hle des B=E4ren
-----=_qute-UUID--
""")
@pytest.mark.parametrize('header, value', [
('content_location', 'http://brötli.com'),
('content_type', 'text/pläin'),
])
def test_refuses_non_ascii_header_value(checker, header, value):
defaults = {
'root_content': b'',
'content_location': 'http://example.com',
'content_type': 'text/plain',
}
defaults[header] = value
writer = mhtml.MHTMLWriter(**defaults)
with pytest.raises(UnicodeEncodeError, match="'ascii' codec can't encode"):
writer.write_to(checker.fp)
def test_file_encoded_as_base64(checker):
content = b'Image file attached'
writer = mhtml.MHTMLWriter(root_content=content, content_type='text/plain',
content_location='http://example.com')
writer.add_file(location='http://a.example.com/image.png',
content='\U0001F601 image data'.encode('utf-8'),
content_type='image/png',
transfer_encoding=mhtml.E_BASE64)
writer.write_to(checker.fp)
checker.expect("""
Content-Type: multipart/related; boundary="---=_qute-UUID"
MIME-Version: 1.0
-----=_qute-UUID
Content-Location: http://example.com
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
Image file attached
-----=_qute-UUID
Content-Location: http://a.example.com/image.png
MIME-Version: 1.0
Content-Type: image/png
Content-Transfer-Encoding: base64
8J+YgSBpbWFnZSBkYXRh
-----=_qute-UUID--
""")
@pytest.mark.parametrize('transfer_encoding', [
pytest.param(mhtml.E_BASE64, id='base64'),
pytest.param(mhtml.E_QUOPRI, id='quoted-printable')])
def test_payload_lines_wrap(checker, transfer_encoding):
payload = b'1234567890' * 10
writer = mhtml.MHTMLWriter(root_content=b'', content_type='text/plain',
content_location='http://example.com')
writer.add_file(location='http://example.com/payload', content=payload,
content_type='text/plain',
transfer_encoding=transfer_encoding)
writer.write_to(checker.fp)
for line in checker.value.split(b'\r\n'):
assert len(line) < 77
def test_files_appear_sorted(checker):
writer = mhtml.MHTMLWriter(root_content=b'root file',
content_type='text/plain',
content_location='http://www.example.com/')
for subdomain in 'ahgbizt':
writer.add_file(location='http://{}.example.com/'.format(subdomain),
content='file {}'.format(subdomain).encode('utf-8'),
content_type='text/plain',
transfer_encoding=mhtml.E_QUOPRI)
writer.write_to(checker.fp)
checker.expect("""
Content-Type: multipart/related; boundary="---=_qute-UUID"
MIME-Version: 1.0
-----=_qute-UUID
Content-Location: http://www.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
root file
-----=_qute-UUID
Content-Location: http://a.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file a
-----=_qute-UUID
Content-Location: http://b.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file b
-----=_qute-UUID
Content-Location: http://g.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file g
-----=_qute-UUID
Content-Location: http://h.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file h
-----=_qute-UUID
Content-Location: http://i.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file i
-----=_qute-UUID
Content-Location: http://t.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file t
-----=_qute-UUID
Content-Location: http://z.example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
file z
-----=_qute-UUID--
""")
def test_empty_content_type(checker):
writer = mhtml.MHTMLWriter(root_content=b'',
content_location='http://example.com/',
content_type='text/plain')
writer.add_file('http://example.com/file', b'file content')
writer.write_to(checker.fp)
checker.expect("""
Content-Type: multipart/related; boundary="---=_qute-UUID"
MIME-Version: 1.0
-----=_qute-UUID
Content-Location: http://example.com/
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
-----=_qute-UUID
MIME-Version: 1.0
Content-Location: http://example.com/file
Content-Transfer-Encoding: quoted-printable
file content
-----=_qute-UUID--
""")
@pytest.mark.parametrize('has_cssutils', [
pytest.param(True, marks=pytest.mark.skipif(
cssutils is None, reason="requires cssutils"), id='with_cssutils'),
pytest.param(False, id='no_cssutils'),
])
@pytest.mark.parametrize('inline, style, expected_urls', [
pytest.param(False, "@import 'default.css'", ['default.css'],
id='import with apostrophe'),
pytest.param(False, '@import "default.css"', ['default.css'],
id='import with quote'),
pytest.param(False, "@import \t 'tabbed.css'", ['tabbed.css'],
id='import with tab'),
pytest.param(False, "@import url('default.css')", ['default.css'],
id='import with url()'),
pytest.param(False, """body {
background: url("/bg-img.png")
}""", ['/bg-img.png'], id='background with body'),
pytest.param(True, 'background: url(folder/file.png) no-repeat',
['folder/file.png'], id='background'),
pytest.param(True, 'content: url()', [], id='content'),
])
def test_css_url_scanner(monkeypatch, has_cssutils, inline, style,
expected_urls):
if not has_cssutils:
monkeypatch.setattr(mhtml, '_get_css_imports_cssutils',
lambda data, inline=False: None)
expected_urls.sort()
urls = mhtml._get_css_imports(style, inline=inline)
urls.sort()
assert urls == expected_urls
def test_quoted_printable_spaces(checker):
content = b' ' * 100
writer = mhtml.MHTMLWriter(root_content=content,
content_location='localhost',
content_type='text/plain')
writer.write_to(checker.fp)
checker.expect("""
Content-Type: multipart/related; boundary="---=_qute-UUID"
MIME-Version: 1.0
-----=_qute-UUID
Content-Location: localhost
MIME-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: quoted-printable
{}=
{}=20
-----=_qute-UUID--
""".format(' ' * 75, ' ' * 24))
class TestNoCloseBytesIO:
def test_fake_close(self):
fp = mhtml._NoCloseBytesIO()
fp.write(b'Value')
fp.close()
assert fp.getvalue() == b'Value'
fp.write(b'Eulav')
assert fp.getvalue() == b'ValueEulav'
def test_actual_close(self):
fp = mhtml._NoCloseBytesIO()
fp.write(b'Value')
fp.actual_close()
with pytest.raises(ValueError, match="I/O operation on closed file."):
fp.getvalue()
with pytest.raises(ValueError, match="I/O operation on closed file."):
fp.getvalue()
fp.write(b'Closed')
|
import os
from stash.tests.stashtest import StashTestCase
class PwdTests(StashTestCase):
"""tests for the 'pwd' command."""
cwd = os.path.expanduser("~")
def test_help(self):
"""test 'pwd --help'."""
output = self.run_command("pwd --help")
self.assertIn("pwd", output)
self.assertIn("-h", output)
self.assertIn("--help", output)
self.assertIn("-b", output)
self.assertIn("--basename", output)
self.assertIn("-f", output)
self.assertIn("--fullname", output)
def test_pwd_collapseuser(self):
"""tests 'pwd'."""
output = self.run_command("pwd").replace("\n", "").replace("/", "")
self.assertEqual(output, "~")
def test_pwd_fullname(self):
"""tests 'pwd --fullname'."""
output = self.run_command("pwd --fullname").replace("\n", "")
self.assertEqual(output, os.path.abspath(os.getcwd()))
def test_pwd_basename(self):
"""tests 'pwd --basename'."""
output = self.run_command("pwd --basename").replace("\n", "")
self.assertEqual(output, os.path.basename(os.getcwd()))
|
import os
import diamond.collector
class OneWireCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OneWireCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OneWireCollector, self).get_default_config()
config.update({
'path': 'owfs',
'owfs': '/mnt/1wire',
# 'scan': {'temperature': 't'},
# 'id:24.BB000000': {'file_with_value': 'alias'},
})
return config
def collect(self):
"""
Overrides the Collector.collect method
"""
metrics = {}
if 'scan' in self.config:
for ld in os.listdir(self.config['owfs']):
if '.' in ld:
self.read_values(ld, self.config['scan'], metrics)
for oid, files in self.config.iteritems():
if oid[:3] == 'id:':
self.read_values(oid[3:], files, metrics)
for fn, fv in metrics.iteritems():
self.publish(fn, fv, 2)
def read_values(self, oid, files, metrics):
"""
Reads values from owfs/oid/{files} and update
metrics with format [oid.alias] = value
"""
oid_path = os.path.join(self.config['owfs'], oid)
oid = oid.replace('.', '_')
for fn, alias in files.iteritems():
fv = os.path.join(oid_path, fn)
if os.path.isfile(fv):
try:
f = open(fv)
v = f.read()
f.close()
except:
self.log.error("Unable to read %s", fv)
raise
try:
v = float(v)
except:
self.log.error("Unexpected value %s in %s", v, fv)
raise
metrics["%s.%s" % (oid, alias)] = v
|
from os import path
import numpy as np
from ...utils import warn, fill_doc, _check_option
from ...channels.layout import _topo_to_sphere
from ..constants import FIFF
from ..utils import (_mult_cal_one, _find_channels, _create_chs, read_str)
from ..meas_info import _empty_info
from ..base import BaseRaw
from ...annotations import Annotations
from ._utils import (_read_teeg, _get_event_parser, _session_date_2_meas_date,
_compute_robust_event_table_position, CNTEventType3)
def _read_annotations_cnt(fname, data_format='int16'):
"""CNT Annotation File Reader.
This method opens the .cnt files, searches all the metadata to construct
the annotations and parses the event table. Notice that CNT files, can
point to a different file containing the events. This case when the
event table is separated from the main .cnt is not supported.
Parameters
----------
fname: str
path to cnt file containing the annotations.
data_format : 'int16' | 'int32'
Defines the data format the data is read in.
Returns
-------
annot : instance of Annotations
The annotations.
"""
# Offsets from SETUP structure in http://paulbourke.net/dataformats/eeg/
SETUP_NCHANNELS_OFFSET = 370
SETUP_RATE_OFFSET = 376
def _translating_function(offset, n_channels, event_type,
data_format=data_format):
n_bytes = 2 if data_format == 'int16' else 4
if event_type == CNTEventType3:
offset *= n_bytes * n_channels
event_time = offset - 900 - (75 * n_channels)
event_time //= n_channels * n_bytes
return event_time - 1
with open(fname, 'rb') as fid:
fid.seek(SETUP_NCHANNELS_OFFSET)
(n_channels,) = np.frombuffer(fid.read(2), dtype='<u2')
fid.seek(SETUP_RATE_OFFSET)
(sfreq,) = np.frombuffer(fid.read(2), dtype='<u2')
event_table_pos = _compute_robust_event_table_position(
fid=fid, data_format=data_format)
with open(fname, 'rb') as fid:
teeg = _read_teeg(fid, teeg_offset=event_table_pos)
event_parser = _get_event_parser(event_type=teeg.event_type)
with open(fname, 'rb') as fid:
fid.seek(event_table_pos + 9) # the real table stats at +9
buffer = fid.read(teeg.total_length)
my_events = list(event_parser(buffer))
if not my_events:
return Annotations(list(), list(), list(), None)
else:
onset = _translating_function(np.array([e.Offset for e in my_events],
dtype=float),
n_channels=n_channels,
event_type=type(my_events[0]),
data_format=data_format)
duration = np.array([getattr(e, 'Latency', 0.) for e in my_events],
dtype=float)
description = np.array([str(e.StimType) for e in my_events])
return Annotations(onset=onset / sfreq,
duration=duration,
description=description,
orig_time=None)
@fill_doc
def read_raw_cnt(input_fname, eog=(), misc=(), ecg=(),
emg=(), data_format='auto', date_format='mm/dd/yy',
preload=False, verbose=None):
"""Read CNT data as raw object.
.. Note::
2d spatial coordinates (x, y) for EEG channels are read from the file
header and fit to a sphere to compute corresponding z-coordinates.
If channels assigned as EEG channels have locations
far away from the head (i.e. x and y coordinates don't fit to a
sphere), all the channel locations will be distorted
(all channels that are not assigned with keywords ``eog``, ``ecg``,
``emg`` and ``misc`` are assigned as EEG channels). If you are not
sure that the channel locations in the header are correct, it is
probably safer to replace them with :meth:`mne.io.Raw.set_montage`.
Montages can be created/imported with:
- Standard montages with :func:`mne.channels.make_standard_montage`
- Montages for `Compumedics systems <https://compumedicsneuroscan.com/
scan-acquire-configuration-files/>`_ with
:func:`mne.channels.read_dig_dat`
- Other reader functions are listed under *See Also* at
:class:`mne.channels.DigMontage`
Parameters
----------
input_fname : str
Path to the data file.
eog : list | tuple | 'auto' | 'header'
Names of channels or list of indices that should be designated
EOG channels. If 'header', VEOG and HEOG channels assigned in the file
header are used. If 'auto', channel names containing 'EOG' are used.
Defaults to empty tuple.
misc : list | tuple
Names of channels or list of indices that should be designated
MISC channels. Defaults to empty tuple.
ecg : list | tuple | 'auto'
Names of channels or list of indices that should be designated
ECG channels. If 'auto', the channel names containing 'ECG' are used.
Defaults to empty tuple.
emg : list | tuple
Names of channels or list of indices that should be designated
EMG channels. If 'auto', the channel names containing 'EMG' are used.
Defaults to empty tuple.
data_format : 'auto' | 'int16' | 'int32'
Defines the data format the data is read in. If 'auto', it is
determined from the file header using ``numsamples`` field.
Defaults to 'auto'.
date_format : 'mm/dd/yy' | 'dd/mm/yy'
Format of date in the header. Defaults to 'mm/dd/yy'.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawCNT.
The raw data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
Notes
-----
.. versionadded:: 0.12
"""
return RawCNT(input_fname, eog=eog, misc=misc, ecg=ecg,
emg=emg, data_format=data_format, date_format=date_format,
preload=preload, verbose=verbose)
def _get_cnt_info(input_fname, eog, ecg, emg, misc, data_format, date_format):
"""Read the cnt header."""
# XXX stim_channel_toggle is used because stim_channel was in use already
data_offset = 900 # Size of the 'SETUP' header.
cnt_info = dict()
# Reading only the fields of interest. Structure of the whole header at
# http://paulbourke.net/dataformats/eeg/
with open(input_fname, 'rb', buffering=0) as fid:
fid.seek(21)
patient_id = read_str(fid, 20)
patient_id = int(patient_id) if patient_id.isdigit() else 0
fid.seek(121)
patient_name = read_str(fid, 20).split()
last_name = patient_name[0] if len(patient_name) > 0 else ''
first_name = patient_name[-1] if len(patient_name) > 0 else ''
fid.seek(2, 1)
sex = read_str(fid, 1)
if sex == 'M':
sex = FIFF.FIFFV_SUBJ_SEX_MALE
elif sex == 'F':
sex = FIFF.FIFFV_SUBJ_SEX_FEMALE
else: # can be 'U'
sex = FIFF.FIFFV_SUBJ_SEX_UNKNOWN
hand = read_str(fid, 1)
if hand == 'R':
hand = FIFF.FIFFV_SUBJ_HAND_RIGHT
elif hand == 'L':
hand = FIFF.FIFFV_SUBJ_HAND_LEFT
else: # can be 'M' for mixed or 'U'
hand = None
fid.seek(205)
session_label = read_str(fid, 20)
session_date = ('%s %s' % (read_str(fid, 10), read_str(fid, 12)))
meas_date = _session_date_2_meas_date(session_date, date_format)
fid.seek(370)
n_channels = np.fromfile(fid, dtype='<u2', count=1)[0]
fid.seek(376)
sfreq = np.fromfile(fid, dtype='<u2', count=1)[0]
if eog == 'header':
fid.seek(402)
eog = [idx for idx in np.fromfile(fid, dtype='i2', count=2) if
idx >= 0]
fid.seek(438)
lowpass_toggle = np.fromfile(fid, 'i1', count=1)[0]
highpass_toggle = np.fromfile(fid, 'i1', count=1)[0]
# Header has a field for number of samples, but it does not seem to be
# too reliable. That's why we have option for setting n_bytes manually.
fid.seek(864)
n_samples = np.fromfile(fid, dtype='<i4', count=1)[0]
fid.seek(869)
lowcutoff = np.fromfile(fid, dtype='f4', count=1)[0]
fid.seek(2, 1)
highcutoff = np.fromfile(fid, dtype='f4', count=1)[0]
event_offset = _compute_robust_event_table_position(
fid=fid, data_format=data_format
)
fid.seek(890)
cnt_info['continuous_seconds'] = np.fromfile(fid, dtype='<f4',
count=1)[0]
if event_offset < data_offset: # no events
data_size = n_samples * n_channels
else:
data_size = event_offset - (data_offset + 75 * n_channels)
_check_option('data_format', data_format, ['auto', 'int16', 'int32'])
if data_format == 'auto':
if (n_samples == 0 or
data_size // (n_samples * n_channels) not in [2, 4]):
warn('Could not define the number of bytes automatically. '
'Defaulting to 2.')
n_bytes = 2
n_samples = data_size // (n_bytes * n_channels)
else:
n_bytes = data_size // (n_samples * n_channels)
else:
n_bytes = 2 if data_format == 'int16' else 4
n_samples = data_size // (n_bytes * n_channels)
# Channel offset refers to the size of blocks per channel in the file.
cnt_info['channel_offset'] = np.fromfile(fid, dtype='<i4', count=1)[0]
if cnt_info['channel_offset'] > 1:
cnt_info['channel_offset'] //= n_bytes
else:
cnt_info['channel_offset'] = 1
ch_names, cals, baselines, chs, pos = (
list(), list(), list(), list(), list()
)
bads = list()
for ch_idx in range(n_channels): # ELECTLOC fields
fid.seek(data_offset + 75 * ch_idx)
ch_name = read_str(fid, 10)
ch_names.append(ch_name)
fid.seek(data_offset + 75 * ch_idx + 4)
if np.fromfile(fid, dtype='u1', count=1)[0]:
bads.append(ch_name)
fid.seek(data_offset + 75 * ch_idx + 19)
xy = np.fromfile(fid, dtype='f4', count=2)
xy[1] *= -1 # invert y-axis
pos.append(xy)
fid.seek(data_offset + 75 * ch_idx + 47)
# Baselines are subtracted before scaling the data.
baselines.append(np.fromfile(fid, dtype='i2', count=1)[0])
fid.seek(data_offset + 75 * ch_idx + 59)
sensitivity = np.fromfile(fid, dtype='f4', count=1)[0]
fid.seek(data_offset + 75 * ch_idx + 71)
cal = np.fromfile(fid, dtype='f4', count=1)[0]
cals.append(cal * sensitivity * 1e-6 / 204.8)
info = _empty_info(sfreq)
if lowpass_toggle == 1:
info['lowpass'] = highcutoff
if highpass_toggle == 1:
info['highpass'] = lowcutoff
subject_info = {'hand': hand, 'id': patient_id, 'sex': sex,
'first_name': first_name, 'last_name': last_name}
if eog == 'auto':
eog = _find_channels(ch_names, 'EOG')
if ecg == 'auto':
ecg = _find_channels(ch_names, 'ECG')
if emg == 'auto':
emg = _find_channels(ch_names, 'EMG')
chs = _create_chs(ch_names, cals, FIFF.FIFFV_COIL_EEG,
FIFF.FIFFV_EEG_CH, eog, ecg, emg, misc)
eegs = [idx for idx, ch in enumerate(chs) if
ch['coil_type'] == FIFF.FIFFV_COIL_EEG]
# XXX this should probably use mne.transforms._topo_to_sph and _sph_to_cart
coords = _topo_to_sphere(pos, eegs)
locs = np.full((len(chs), 12), np.nan)
locs[:, :3] = coords
for ch, loc in zip(chs, locs):
ch.update(loc=loc)
cnt_info.update(baselines=np.array(baselines), n_samples=n_samples,
n_bytes=n_bytes)
session_label = None if str(session_label) == '' else str(session_label)
info.update(meas_date=meas_date,
description=session_label, bads=bads,
subject_info=subject_info, chs=chs)
info._update_redundant()
return info, cnt_info
@fill_doc
class RawCNT(BaseRaw):
"""Raw object from Neuroscan CNT file.
.. Note::
The channel positions are read from the file header. Channels that are
not assigned with keywords ``eog``, ``ecg``, ``emg`` and ``misc`` are
assigned as eeg channels. All the eeg channel locations are fit to a
sphere when computing the z-coordinates for the channels. If channels
assigned as eeg channels have locations far away from the head (i.e.
x and y coordinates don't fit to a sphere), all the channel locations
will be distorted. If you are not sure that the channel locations in
the header are correct, it is probably safer to use a (standard)
montage. See :func:`mne.channels.make_standard_montage`
Parameters
----------
input_fname : str
Path to the CNT file.
eog : list | tuple
Names of channels or list of indices that should be designated
EOG channels. If 'auto', the channel names beginning with
``EOG`` are used. Defaults to empty tuple.
misc : list | tuple
Names of channels or list of indices that should be designated
MISC channels. Defaults to empty tuple.
ecg : list | tuple
Names of channels or list of indices that should be designated
ECG channels. If 'auto', the channel names beginning with
``ECG`` are used. Defaults to empty tuple.
emg : list | tuple
Names of channels or list of indices that should be designated
EMG channels. If 'auto', the channel names beginning with
``EMG`` are used. Defaults to empty tuple.
data_format : 'auto' | 'int16' | 'int32'
Defines the data format the data is read in. If 'auto', it is
determined from the file header using ``numsamples`` field.
Defaults to 'auto'.
date_format : 'mm/dd/yy' | 'dd/mm/yy'
Format of date in the header. Defaults to 'mm/dd/yy'.
%(preload)s
stim_channel : bool | None
Add a stim channel from the events. Defaults to None to trigger a
future warning.
.. warning:: This defaults to True in 0.18 but will change to False in
0.19 (when no stim channel synthesis will be allowed)
and be removed in 0.20; migrate code to use
:func:`mne.events_from_annotations` instead.
.. versionadded:: 0.18
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
def __init__(self, input_fname, eog=(), misc=(),
ecg=(), emg=(), data_format='auto', date_format='mm/dd/yy',
preload=False, verbose=None): # noqa: D102
_check_option('date_format', date_format, ['mm/dd/yy', 'dd/mm/yy'])
if date_format == 'dd/mm/yy':
_date_format = '%d/%m/%y %H:%M:%S'
else:
_date_format = '%m/%d/%y %H:%M:%S'
input_fname = path.abspath(input_fname)
info, cnt_info = _get_cnt_info(input_fname, eog, ecg, emg, misc,
data_format, _date_format)
last_samps = [cnt_info['n_samples'] - 1]
super(RawCNT, self).__init__(
info, preload, filenames=[input_fname], raw_extras=[cnt_info],
last_samps=last_samps, orig_format='int', verbose=verbose)
data_format = 'int32' if cnt_info['n_bytes'] == 4 else 'int16'
self.set_annotations(
_read_annotations_cnt(input_fname, data_format=data_format))
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Take a chunk of raw data, multiply by mult or cals, and store."""
n_channels = self._raw_extras[fi]['orig_nchan']
if 'stim_channel' in self._raw_extras[fi]:
f_channels = n_channels - 1 # Stim channel already read.
stim_ch = self._raw_extras[fi]['stim_channel']
else:
f_channels = n_channels
stim_ch = None
channel_offset = self._raw_extras[fi]['channel_offset']
baselines = self._raw_extras[fi]['baselines']
n_bytes = self._raw_extras[fi]['n_bytes']
dtype = '<i4' if n_bytes == 4 else '<i2'
chunk_size = channel_offset * f_channels # Size of chunks in file.
# The data is divided into blocks of samples / channel.
# channel_offset determines the amount of successive samples.
# Here we use sample offset to align the data because start can be in
# the middle of these blocks.
data_left = (stop - start) * f_channels
# Read up to 100 MB of data at a time, block_size is in data samples
block_size = ((int(100e6) // n_bytes) // chunk_size) * chunk_size
block_size = min(data_left, block_size)
s_offset = start % channel_offset
with open(self._filenames[fi], 'rb', buffering=0) as fid:
fid.seek(900 + f_channels * (75 + (start - s_offset) * n_bytes))
for sample_start in np.arange(0, data_left,
block_size) // f_channels:
sample_stop = sample_start + min((block_size // f_channels,
data_left // f_channels -
sample_start))
n_samps = sample_stop - sample_start
one = np.zeros((n_channels, n_samps))
# In case channel offset and start time do not align perfectly,
# extra sample sets are read here to cover the desired time
# window. The whole (up to 100 MB) block is read at once and
# then reshaped to (n_channels, n_samples).
extra_samps = chunk_size if (s_offset != 0 or n_samps %
channel_offset != 0) else 0
if s_offset >= (channel_offset / 2): # Extend at the end.
extra_samps += chunk_size
count = n_samps // channel_offset * chunk_size + extra_samps
n_chunks = count // chunk_size
samps = np.fromfile(fid, dtype=dtype, count=count)
samps = samps.reshape((n_chunks, f_channels, channel_offset),
order='C')
# Intermediate shaping to chunk sizes.
block = np.zeros((n_channels, channel_offset * n_chunks))
for set_idx, row in enumerate(samps): # Final shape.
block_slice = slice(set_idx * channel_offset,
(set_idx + 1) * channel_offset)
block[:f_channels, block_slice] = row
if 'stim_channel' in self._raw_extras[fi]:
_data_start = start + sample_start
_data_stop = start + sample_stop
block[-1] = stim_ch[_data_start:_data_stop]
one[idx] = block[idx, s_offset:n_samps + s_offset]
one[idx] -= baselines[idx][:, None]
_mult_cal_one(data[:, sample_start:sample_stop], one, idx,
cals, mult)
|
import mock
from pytest import raises
from paasta_tools import paasta_cluster_boost
FAKE_SLAVE_DATA = [
{
"datacenter": "westeros-1",
"ecosystem": "stagef",
"habitat": "uswest1cstagef",
"instance_type": "c3.4xlarge",
"kwatest": "foo",
"pool": "default",
"region": "uswest1-stagef",
"role": "taskproc",
"runtimeenv": "stage",
"superregion": "norcal-stagef",
"topology_env": "stagef",
},
{
"datacenter": "westeros-1",
"ecosystem": "stagef",
"habitat": "uswest1cstagef",
"instance_type": "c4.4xlarge",
"kwatest": "foo",
"pool": "default",
"region": "uswest1-stagef",
"role": "taskproc",
"runtimeenv": "stage",
"superregion": "norcal-stagef",
"topology_env": "stagef",
},
]
def test_main():
with mock.patch(
"paasta_tools.paasta_cluster_boost.parse_args",
autospec=True,
return_value=mock.Mock(verbose=1),
), mock.patch(
"paasta_tools.paasta_cluster_boost.paasta_cluster_boost", autospec=True
) as mock_paasta_cluster_boost:
mock_paasta_cluster_boost.return_value = True
with raises(SystemExit) as e:
paasta_cluster_boost.main()
assert e.value.code == 0
mock_paasta_cluster_boost.return_value = False
with raises(SystemExit) as e:
paasta_cluster_boost.main()
assert e.value.code == 1
def test_paasta_cluster_boost():
with mock.patch(
"paasta_tools.paasta_cluster_boost.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config, mock.patch(
"paasta_tools.paasta_cluster_boost.load_boost.get_zk_cluster_boost_path",
autospec=True,
) as mock_get_zk_cluster_boost_path, mock.patch(
"paasta_tools.paasta_cluster_boost.load_boost.set_boost_factor", autospec=True
) as mock_set_boost_factor, mock.patch(
"paasta_tools.paasta_cluster_boost.load_boost.clear_boost", autospec=True
) as mock_clear_boost, mock.patch(
"paasta_tools.paasta_cluster_boost.load_boost.get_boost_factor", autospec=True
):
mock_get_regions = mock.Mock(return_value=[])
mock_load_system_paasta_config.return_value = mock.Mock(
get_cluster_boost_enabled=mock.Mock(return_value=False),
get_boost_regions=mock_get_regions,
)
mock_get_regions.return_value = ["useast1-dev"]
assert not paasta_cluster_boost.paasta_cluster_boost(
action="status", pool="default", boost=1.0, duration=20, override=False
)
mock_load_system_paasta_config.return_value = mock.Mock(
get_cluster_boost_enabled=mock.Mock(return_value=True),
get_boost_regions=mock_get_regions,
)
mock_get_regions.return_value = []
assert not paasta_cluster_boost.paasta_cluster_boost(
action="status", pool="default", boost=1.0, duration=20, override=False
)
mock_load_system_paasta_config.return_value = mock.Mock(
get_cluster_boost_enabled=mock.Mock(return_value=True),
get_boost_regions=mock_get_regions,
)
mock_get_regions.return_value = ["useast1-dev"]
assert paasta_cluster_boost.paasta_cluster_boost(
action="status", pool="default", boost=1.0, duration=20, override=False
)
assert not mock_set_boost_factor.called
assert not mock_clear_boost.called
assert paasta_cluster_boost.paasta_cluster_boost(
action="set", pool="default", boost=1.0, duration=20, override=False
)
mock_set_boost_factor.assert_called_with(
zk_boost_path=mock_get_zk_cluster_boost_path.return_value,
region="useast1-dev",
pool="default",
factor=1.0,
duration_minutes=20,
override=False,
)
assert not mock_clear_boost.called
assert paasta_cluster_boost.paasta_cluster_boost(
action="clear", pool="default", boost=1.0, duration=20, override=False
)
mock_clear_boost.assert_called_with(
zk_boost_path=mock_get_zk_cluster_boost_path.return_value,
region="useast1-dev",
pool="default",
)
|
import os
import fnmatch
DOIT_CONFIG = {
'default_tasks': ['flake8', 'test'],
'reporter': 'executed-only',
}
def recursive_glob(path, pattern):
"""recursively walk path directories and return files matching the pattern"""
for root, dirnames, filenames in os.walk(path, followlinks=True):
for filename in fnmatch.filter(filenames, pattern):
yield os.path.join(root, filename)
def task_flake8():
"""flake8 - static check for python files"""
yield {
'name': os.path.join(os.getcwd(), 'nikola'),
'actions': ['flake8 nikola/'],
}
def task_pydocstyle():
"""pydocstyle -- static check for docstring style"""
yield {
'name': os.path.join(os.getcwd(), 'nikola'),
'actions': ["pydocstyle --count --match-dir='(?!^\\.)(?!data).*' nikola/"],
}
def task_test():
"""run unit-tests using py.test"""
return {
'actions': ['py.test tests/'],
}
def task_coverage():
"""run unit-tests using py.test, with coverage reporting"""
return {
'actions': ['py.test --cov nikola --cov-report term-missing tests/'],
'verbosity': 2,
}
def task_gen_completion():
"""generate tab-completion scripts"""
cmd = 'nikola tabcompletion --shell {0} --hardcode-tasks > _nikola_{0}'
for shell in ('bash', 'zsh'):
yield {
'name': shell,
'actions': [cmd.format(shell)],
'targets': ['_nikola_{0}'.format(shell)],
}
|
import logging
import os
import shutil
import subprocess
import tempfile
import voluptuous as vol
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
_LOGGER = logging.getLogger(__name__)
SUPPORT_LANGUAGES = ["en-US", "en-GB", "de-DE", "es-ES", "fr-FR", "it-IT"]
DEFAULT_LANG = "en-US"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)}
)
def get_engine(hass, config, discovery_info=None):
"""Set up Pico speech component."""
if shutil.which("pico2wave") is None:
_LOGGER.error("'pico2wave' was not found")
return False
return PicoProvider(config[CONF_LANG])
class PicoProvider(Provider):
"""The Pico TTS API provider."""
def __init__(self, lang):
"""Initialize Pico TTS provider."""
self._lang = lang
self.name = "PicoTTS"
@property
def default_language(self):
"""Return the default language."""
return self._lang
@property
def supported_languages(self):
"""Return list of supported languages."""
return SUPPORT_LANGUAGES
def get_tts_audio(self, message, language, options=None):
"""Load TTS using pico2wave."""
with tempfile.NamedTemporaryFile(suffix=".wav", delete=False) as tmpf:
fname = tmpf.name
cmd = ["pico2wave", "--wave", fname, "-l", language, message]
subprocess.call(cmd)
data = None
try:
with open(fname, "rb") as voice:
data = voice.read()
except OSError:
_LOGGER.error("Error trying to read %s", fname)
return (None, None)
finally:
os.remove(fname)
if data:
return ("wav", data)
return (None, None)
|
import diamond.collector
import urllib2
import time
try:
import json
except ImportError:
import simplejson as json
class CelerymonCollector(diamond.collector.Collector):
LastCollectTime = None
def get_default_config_help(self):
config_help = super(CelerymonCollector, self).get_default_config_help()
config_help.update({
'path': 'celerymon',
'host': 'A single hostname to get metrics from',
'port': 'The celerymon port'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(CelerymonCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': '8989'
})
return config
def collect(self):
"""
Overrides the Collector.collect method
"""
# Handle collection time intervals correctly
CollectTime = int(time.time())
time_delta = float(self.config['interval'])
if not self.LastCollectTime:
self.LastCollectTime = CollectTime - time_delta
host = self.config['host']
port = self.config['port']
celerymon_url = "http://%s:%s/api/task/?since=%i" % (
host, port, self.LastCollectTime)
response = urllib2.urlopen(celerymon_url)
body = response.read()
celery_data = json.loads(body)
results = dict()
total_messages = 0
for data in celery_data:
name = str(data[1]['name'])
if name not in results:
results[name] = dict()
state = str(data[1]['state'])
if state not in results[name]:
results[name][state] = 1
else:
results[name][state] += 1
total_messages += 1
# Publish Metric
self.publish('total_messages', total_messages)
for result in results:
for state in results[result]:
metric_value = results[result][state]
metric_name = "%s.%s" % (result, state)
self.publish(metric_name, metric_value)
self.LastCollectTime = CollectTime
|
import logging
from spiderpy.spiderapi import SpiderApi, SpiderApiException, UnauthorizedException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA_USER = vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
)
RESULT_AUTH_FAILED = "auth_failed"
RESULT_CONN_ERROR = "conn_error"
RESULT_SUCCESS = "success"
class SpiderConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Spider config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize the Spider flow."""
self.data = {
CONF_USERNAME: "",
CONF_PASSWORD: "",
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
}
def _try_connect(self):
"""Try to connect and check auth."""
try:
SpiderApi(
self.data[CONF_USERNAME],
self.data[CONF_PASSWORD],
self.data[CONF_SCAN_INTERVAL],
)
except SpiderApiException:
return RESULT_CONN_ERROR
except UnauthorizedException:
return RESULT_AUTH_FAILED
return RESULT_SUCCESS
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
errors = {}
if user_input is not None:
self.data[CONF_USERNAME] = user_input["username"]
self.data[CONF_PASSWORD] = user_input["password"]
result = await self.hass.async_add_executor_job(self._try_connect)
if result == RESULT_SUCCESS:
return self.async_create_entry(
title=DOMAIN,
data=self.data,
)
if result != RESULT_AUTH_FAILED:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return self.async_abort(reason=result)
errors["base"] = "invalid_auth"
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA_USER,
errors=errors,
)
async def async_step_import(self, import_data):
"""Import spider config from configuration.yaml."""
return await self.async_step_user(import_data)
|
from urllib.error import HTTPError
from urllib.error import URLError
from urllib.parse import urlsplit
from urllib.request import urlopen
from bs4 import BeautifulSoup
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.urls import Resolver404
from django.urls import resolve
from django.utils.html import strip_tags
from django.utils.translation import gettext as _
import django_comments as comments
from django_xmlrpc.decorators import xmlrpc_func
from zinnia.flags import PINGBACK
from zinnia.flags import get_user_flagger
from zinnia.models.entry import Entry
from zinnia.settings import PINGBACK_CONTENT_LENGTH
from zinnia.signals import pingback_was_posted
from zinnia.spam_checker import check_is_spam
UNDEFINED_ERROR = 0
SOURCE_DOES_NOT_EXIST = 16
SOURCE_DOES_NOT_LINK = 17
TARGET_DOES_NOT_EXIST = 32
TARGET_IS_NOT_PINGABLE = 33
PINGBACK_ALREADY_REGISTERED = 48
PINGBACK_IS_SPAM = 51
class FakeRequest(object):
META = {}
def generate_pingback_content(soup, target, max_length, trunc_char='...'):
"""
Generate a description text for the pingback.
"""
link = soup.find('a', href=target)
content = strip_tags(str(link.findParent()))
index = content.index(link.string)
if len(content) > max_length:
middle = max_length // 2
start = index - middle
end = index + middle
if start <= 0:
end -= start
extract = content[0:end]
else:
extract = '%s%s' % (trunc_char, content[start:end])
if end < len(content):
extract += trunc_char
return extract
return content
@xmlrpc_func(returns='string', args=['string', 'string'])
def pingback_ping(source, target):
"""
pingback.ping(sourceURI, targetURI) => 'Pingback message'
Notifies the server that a link has been added to sourceURI,
pointing to targetURI.
See: http://hixie.ch/specs/pingback/pingback-1.0
"""
try:
if source == target:
return UNDEFINED_ERROR
site = Site.objects.get_current()
try:
document = ''.join(map(
lambda byte_line: byte_line.decode('utf-8'),
urlopen(source).readlines()))
except (HTTPError, URLError):
return SOURCE_DOES_NOT_EXIST
if target not in document:
return SOURCE_DOES_NOT_LINK
target_splitted = urlsplit(target)
if target_splitted.netloc != site.domain:
return TARGET_DOES_NOT_EXIST
try:
view, args, kwargs = resolve(target_splitted.path)
except Resolver404:
return TARGET_DOES_NOT_EXIST
try:
entry = Entry.published.get(
slug=kwargs['slug'],
publication_date__year=kwargs['year'],
publication_date__month=kwargs['month'],
publication_date__day=kwargs['day'])
if not entry.pingbacks_are_open:
return TARGET_IS_NOT_PINGABLE
except (KeyError, Entry.DoesNotExist):
return TARGET_IS_NOT_PINGABLE
soup = BeautifulSoup(document, 'html.parser')
title = str(soup.find('title'))
title = title and strip_tags(title) or _('No title')
description = generate_pingback_content(soup, target,
PINGBACK_CONTENT_LENGTH)
pingback_klass = comments.get_model()
pingback_datas = {
'content_type': ContentType.objects.get_for_model(Entry),
'object_pk': entry.pk,
'site': site,
'user_url': source,
'user_name': title,
'comment': description
}
pingback = pingback_klass(**pingback_datas)
if check_is_spam(pingback, entry, FakeRequest()):
return PINGBACK_IS_SPAM
pingback_defaults = {'comment': pingback_datas.pop('comment'),
'user_name': pingback_datas.pop('user_name')}
pingback, created = pingback_klass.objects.get_or_create(
defaults=pingback_defaults,
**pingback_datas)
if created:
pingback.flags.create(user=get_user_flagger(), flag=PINGBACK)
pingback_was_posted.send(pingback.__class__,
pingback=pingback,
entry=entry)
return 'Pingback from %s to %s registered.' % (source, target)
return PINGBACK_ALREADY_REGISTERED
except Exception:
return UNDEFINED_ERROR
@xmlrpc_func(returns='string[]', args=['string'])
def pingback_extensions_get_pingbacks(target):
"""
pingback.extensions.getPingbacks(url) => '[url, url, ...]'
Returns an array of URLs that link to the specified url.
See: http://www.aquarionics.com/misc/archives/blogite/0198.html
"""
site = Site.objects.get_current()
target_splitted = urlsplit(target)
if target_splitted.netloc != site.domain:
return TARGET_DOES_NOT_EXIST
try:
view, args, kwargs = resolve(target_splitted.path)
except Resolver404:
return TARGET_DOES_NOT_EXIST
try:
entry = Entry.published.get(
slug=kwargs['slug'],
publication_date__year=kwargs['year'],
publication_date__month=kwargs['month'],
publication_date__day=kwargs['day'])
except (KeyError, Entry.DoesNotExist):
return TARGET_IS_NOT_PINGABLE
return [pingback.user_url for pingback in entry.pingbacks]
|
import unittest
import torch
import torch.nn as tnn
import torch.autograd as autograd
from common import gpu_test
class TestPyTorch(unittest.TestCase):
# PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html
def test_nn(self):
torch.manual_seed(31337)
linear_torch = tnn.Linear(5,3)
data_torch = autograd.Variable(torch.randn(2, 5))
linear_torch(data_torch)
@gpu_test
def test_gpu_computation(self):
cuda = torch.device('cuda')
a = torch.tensor([1., 2.], device=cuda)
result = a.sum()
self.assertEqual(torch.tensor([3.], device=cuda), result)
@gpu_test
def test_cuda_nn(self):
# These throw if cuda is misconfigured
tnn.GRUCell(10,10).cuda()
tnn.RNNCell(10,10).cuda()
tnn.LSTMCell(10,10).cuda()
tnn.GRU(10,10).cuda()
tnn.LSTM(10,10).cuda()
tnn.RNN(10,10).cuda()
|
from __future__ import absolute_import
import threading
version = """$Id: 04-1.html,v 1.3 2006/12/05 17:45:12 majid Exp $"""
class RWLock:
"""
A simple reader-writer lock Several readers can hold the lock
simultaneously, XOR one writer. Write locks have priority over reads to
prevent write starvation.
"""
def __init__(self):
self.rwlock = 0
self.writers_waiting = 0
self.monitor = threading.Lock()
self.readers_ok = threading.Condition(self.monitor)
self.writers_ok = threading.Condition(self.monitor)
def acquire_read(self):
"""
Acquire a read lock. Several threads can hold this typeof lock.
It is exclusive with write locks.
"""
self.monitor.acquire()
while self.rwlock < 0 or self.writers_waiting:
self.readers_ok.wait()
self.rwlock += 1
self.monitor.release()
def acquire_write(self):
"""
Acquire a write lock. Only one thread can hold this lock, and
only when no read locks are also held.
"""
self.monitor.acquire()
while self.rwlock != 0:
self.writers_waiting += 1
self.writers_ok.wait()
self.writers_waiting -= 1
self.rwlock = -1
self.monitor.release()
def release(self):
"""
Release a lock, whether read or write.
"""
self.monitor.acquire()
if self.rwlock < 0:
self.rwlock = 0
else:
self.rwlock -= 1
wake_writers = self.writers_waiting and self.rwlock == 0
wake_readers = self.writers_waiting == 0
self.monitor.release()
if wake_writers:
self.writers_ok.acquire()
self.writers_ok.notify()
self.writers_ok.release()
elif wake_readers:
self.readers_ok.acquire()
self.readers_ok.notifyAll()
self.readers_ok.release()
|
import aiohttp
from homeassistant.components.atag import DOMAIN
from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.components.atag import init_integration
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_config_entry_not_ready(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test configuration entry not ready on library error."""
aioclient_mock.post("http://127.0.0.1:10000/retrieve", exc=aiohttp.ClientError)
entry = await init_integration(hass, aioclient_mock)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_config_entry_empty_reply(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test configuration entry not ready when library returns False."""
with patch("pyatag.AtagOne.update", return_value=False):
entry = await init_integration(hass, aioclient_mock)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the ATAG configuration entry unloading."""
entry = await init_integration(hass, aioclient_mock)
assert hass.data[DOMAIN]
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
|
import os
from weblate.addons.base import BaseAddon
from weblate.utils.render import render_template
from weblate.utils.site import get_site_url
class BaseScriptAddon(BaseAddon):
"""Base class for script executing addons."""
icon = "script.svg"
script = None
add_file = None
alert = "AddonScriptError"
def run_script(self, component=None, translation=None, env=None):
command = [self.script]
if translation:
component = translation.component
command.append(translation.get_filename())
if component.is_repo_link:
target = component.linked_component
else:
target = component
environment = {
"WL_VCS": target.vcs,
"WL_REPO": target.repo,
"WL_PATH": target.full_path,
"WL_FILEMASK": component.filemask,
"WL_TEMPLATE": component.template,
"WL_NEW_BASE": component.new_base,
"WL_FILE_FORMAT": component.file_format,
"WL_BRANCH": component.branch,
"WL_COMPONENT_SLUG": component.slug,
"WL_PROJECT_SLUG": component.project.slug,
"WL_COMPONENT_NAME": component.name,
"WL_PROJECT_NAME": component.project.name,
"WL_COMPONENT_URL": get_site_url(component.get_absolute_url()),
"WL_ENGAGE_URL": component.get_share_url(),
}
if translation:
environment["WL_LANGUAGE"] = translation.language_code
if env is not None:
environment.update(env)
self.execute_process(component, command, environment)
self.trigger_alerts(component)
def post_push(self, component):
self.run_script(component)
def post_update(self, component, previous_head: str, skip_push: bool):
self.run_script(component, env={"WL_PREVIOUS_HEAD": previous_head})
def post_commit(self, component):
self.run_script(component=component)
def pre_commit(self, translation, author):
self.run_script(translation=translation)
if self.add_file:
filename = os.path.join(
self.instance.component.full_path,
render_template(self.add_file, translation=translation),
)
translation.addon_commit_files.append(filename)
def post_add(self, translation):
self.run_script(translation=translation)
|
import logging
import blebox_uniapi
import pytest
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GATE,
DEVICE_CLASS_SHUTTER,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_SUPPORTED_FEATURES,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_SET_COVER_POSITION,
SERVICE_STOP_COVER,
STATE_UNKNOWN,
)
from .conftest import async_setup_entity, mock_feature
from tests.async_mock import AsyncMock, PropertyMock
ALL_COVER_FIXTURES = ["gatecontroller", "shutterbox", "gatebox"]
FIXTURES_SUPPORTING_STOP = ["gatecontroller", "shutterbox"]
@pytest.fixture(name="shutterbox")
def shutterbox_fixture():
"""Return a shutterBox fixture."""
feature = mock_feature(
"covers",
blebox_uniapi.cover.Cover,
unique_id="BleBox-shutterBox-2bee34e750b8-position",
full_name="shutterBox-position",
device_class="shutter",
current=None,
state=None,
has_stop=True,
is_slider=True,
)
product = feature.product
type(product).name = PropertyMock(return_value="My shutter")
type(product).model = PropertyMock(return_value="shutterBox")
return (feature, "cover.shutterbox_position")
@pytest.fixture(name="gatebox")
def gatebox_fixture():
"""Return a gateBox fixture."""
feature = mock_feature(
"covers",
blebox_uniapi.cover.Cover,
unique_id="BleBox-gateBox-1afe34db9437-position",
device_class="gatebox",
full_name="gateBox-position",
current=None,
state=None,
has_stop=False,
is_slider=False,
)
product = feature.product
type(product).name = PropertyMock(return_value="My gatebox")
type(product).model = PropertyMock(return_value="gateBox")
return (feature, "cover.gatebox_position")
@pytest.fixture(name="gatecontroller")
def gate_fixture():
"""Return a gateController fixture."""
feature = mock_feature(
"covers",
blebox_uniapi.cover.Cover,
unique_id="BleBox-gateController-2bee34e750b8-position",
full_name="gateController-position",
device_class="gate",
current=None,
state=None,
has_stop=True,
is_slider=True,
)
product = feature.product
type(product).name = PropertyMock(return_value="My gate controller")
type(product).model = PropertyMock(return_value="gateController")
return (feature, "cover.gatecontroller_position")
async def test_init_gatecontroller(gatecontroller, hass, config):
"""Test gateController default state."""
_, entity_id = gatecontroller
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-gateController-2bee34e750b8-position"
state = hass.states.get(entity_id)
assert state.name == "gateController-position"
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_GATE
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
assert supported_features & SUPPORT_OPEN
assert supported_features & SUPPORT_CLOSE
assert supported_features & SUPPORT_STOP
assert supported_features & SUPPORT_SET_POSITION
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.state == STATE_UNKNOWN
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My gate controller"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "gateController"
assert device.sw_version == "1.23"
async def test_init_shutterbox(shutterbox, hass, config):
"""Test gateBox default state."""
_, entity_id = shutterbox
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-shutterBox-2bee34e750b8-position"
state = hass.states.get(entity_id)
assert state.name == "shutterBox-position"
assert entry.device_class == DEVICE_CLASS_SHUTTER
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
assert supported_features & SUPPORT_OPEN
assert supported_features & SUPPORT_CLOSE
assert supported_features & SUPPORT_STOP
assert supported_features & SUPPORT_SET_POSITION
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.state == STATE_UNKNOWN
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My shutter"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "shutterBox"
assert device.sw_version == "1.23"
async def test_init_gatebox(gatebox, hass, config):
"""Test cover default state."""
_, entity_id = gatebox
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-gateBox-1afe34db9437-position"
state = hass.states.get(entity_id)
assert state.name == "gateBox-position"
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_DOOR
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
assert supported_features & SUPPORT_OPEN
assert supported_features & SUPPORT_CLOSE
# Not available during init since requires fetching state to detect
assert not supported_features & SUPPORT_STOP
assert not supported_features & SUPPORT_SET_POSITION
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.state == STATE_UNKNOWN
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My gatebox"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "gateBox"
assert device.sw_version == "1.23"
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_open(feature, hass, config):
"""Test cover opening."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.state = 3 # manually stopped
def open_gate():
feature_mock.state = 1 # opening
feature_mock.async_update = AsyncMock(side_effect=initial_update)
feature_mock.async_open = AsyncMock(side_effect=open_gate)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_CLOSED
feature_mock.async_update = AsyncMock()
await hass.services.async_call(
"cover",
SERVICE_OPEN_COVER,
{"entity_id": entity_id},
blocking=True,
)
assert hass.states.get(entity_id).state == STATE_OPENING
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_close(feature, hass, config):
"""Test cover closing."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.state = 4 # open
def close():
feature_mock.state = 0 # closing
feature_mock.async_update = AsyncMock(side_effect=initial_update)
feature_mock.async_close = AsyncMock(side_effect=close)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_OPEN
feature_mock.async_update = AsyncMock()
await hass.services.async_call(
"cover", SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
)
assert hass.states.get(entity_id).state == STATE_CLOSING
def opening_to_stop_feature_mock(feature_mock):
"""Return an mocked feature which can be updated and stopped."""
def initial_update():
feature_mock.state = 1 # opening
def stop():
feature_mock.state = 2 # manually stopped
feature_mock.async_update = AsyncMock(side_effect=initial_update)
feature_mock.async_stop = AsyncMock(side_effect=stop)
@pytest.mark.parametrize("feature", FIXTURES_SUPPORTING_STOP, indirect=["feature"])
async def test_stop(feature, hass, config):
"""Test cover stopping."""
feature_mock, entity_id = feature
opening_to_stop_feature_mock(feature_mock)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_OPENING
feature_mock.async_update = AsyncMock()
await hass.services.async_call(
"cover", SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True
)
assert hass.states.get(entity_id).state == STATE_OPEN
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_update(feature, hass, config):
"""Test cover updating."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.current = 29 # inverted
feature_mock.state = 2 # manually stopped
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.attributes[ATTR_CURRENT_POSITION] == 71 # 100 - 29
assert state.state == STATE_OPEN
@pytest.mark.parametrize(
"feature", ["gatecontroller", "shutterbox"], indirect=["feature"]
)
async def test_set_position(feature, hass, config):
"""Test cover position setting."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.state = 3 # closed
def set_position(position):
assert position == 99 # inverted
feature_mock.state = 1 # opening
# feature_mock.current = position
feature_mock.async_update = AsyncMock(side_effect=initial_update)
feature_mock.async_set_position = AsyncMock(side_effect=set_position)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_CLOSED
feature_mock.async_update = AsyncMock()
await hass.services.async_call(
"cover",
SERVICE_SET_COVER_POSITION,
{"entity_id": entity_id, ATTR_POSITION: 1},
blocking=True,
) # almost closed
assert hass.states.get(entity_id).state == STATE_OPENING
async def test_unknown_position(shutterbox, hass, config):
"""Test cover position setting."""
feature_mock, entity_id = shutterbox
def initial_update():
feature_mock.state = 4 # opening
feature_mock.current = -1
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.state == STATE_OPEN
assert ATTR_CURRENT_POSITION not in state.attributes
async def test_with_stop(gatebox, hass, config):
"""Test stop capability is available."""
feature_mock, entity_id = gatebox
opening_to_stop_feature_mock(feature_mock)
feature_mock.has_stop = True
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
assert supported_features & SUPPORT_STOP
async def test_with_no_stop(gatebox, hass, config):
"""Test stop capability is not available."""
feature_mock, entity_id = gatebox
opening_to_stop_feature_mock(feature_mock)
feature_mock.has_stop = False
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
assert not supported_features & SUPPORT_STOP
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_update_failure(feature, hass, config, caplog):
"""Test that update failures are logged."""
caplog.set_level(logging.ERROR)
feature_mock, entity_id = feature
feature_mock.async_update = AsyncMock(side_effect=blebox_uniapi.error.ClientError)
await async_setup_entity(hass, config, entity_id)
assert f"Updating '{feature_mock.full_name}' failed: " in caplog.text
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_opening_state(feature, hass, config):
"""Test that entity properties work."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.state = 1 # opening
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_OPENING
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_closing_state(feature, hass, config):
"""Test that entity properties work."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.state = 0 # closing
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_CLOSING
@pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"])
async def test_closed_state(feature, hass, config):
"""Test that entity properties work."""
feature_mock, entity_id = feature
def initial_update():
feature_mock.state = 3 # closed
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
assert hass.states.get(entity_id).state == STATE_CLOSED
|
from twilio.rest import Client
from twilio.twiml import TwiML
import voluptuous as vol
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.helpers import config_entry_flow
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
CONF_ACCOUNT_SID = "account_sid"
CONF_AUTH_TOKEN = "auth_token"
DATA_TWILIO = DOMAIN
RECEIVED_DATA = f"{DOMAIN}_data_received"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Required(CONF_ACCOUNT_SID): cv.string,
vol.Required(CONF_AUTH_TOKEN): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Twilio component."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
hass.data[DATA_TWILIO] = Client(
conf.get(CONF_ACCOUNT_SID), conf.get(CONF_AUTH_TOKEN)
)
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook from Twilio for inbound messages and calls."""
data = dict(await request.post())
data["webhook_id"] = webhook_id
hass.bus.async_fire(RECEIVED_DATA, dict(data))
return TwiML().to_xml()
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "Twilio", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
return True
async_remove_entry = config_entry_flow.webhook_async_remove_entry
|
import logging
from gi.repository import Gio, GLib, GObject, GtkSource
from meld.conf import _
from meld.settings import bind_settings
log = logging.getLogger(__name__)
class MeldBuffer(GtkSource.Buffer):
__gtype_name__ = "MeldBuffer"
__gsettings_bindings__ = (
('highlight-syntax', 'highlight-syntax'),
)
def __init__(self):
super().__init__()
bind_settings(self)
self.data = MeldBufferData()
self.undo_sequence = None
def do_begin_user_action(self, *args):
if self.undo_sequence:
self.undo_sequence.begin_group()
def do_end_user_action(self, *args):
if self.undo_sequence:
self.undo_sequence.end_group()
def get_iter_at_line_or_eof(self, line):
"""Return a Gtk.TextIter at the given line, or the end of the buffer.
This method is like get_iter_at_line, but if asked for a position past
the end of the buffer, this returns the end of the buffer; the
get_iter_at_line behaviour is to return the start of the last line in
the buffer.
"""
if line >= self.get_line_count():
return self.get_end_iter()
return self.get_iter_at_line(line)
def insert_at_line(self, line, text):
"""Insert text at the given line, or the end of the buffer.
This method is like insert, but if asked to insert something past the
last line in the buffer, this will insert at the end, and will add a
linebreak before the inserted text. The last line in a Gtk.TextBuffer
is guaranteed never to have a newline, so we need to handle this.
"""
if line >= self.get_line_count():
# TODO: We need to insert a linebreak here, but there is no
# way to be certain what kind of linebreak to use.
text = "\n" + text
it = self.get_iter_at_line_or_eof(line)
self.insert(it, text)
return it
class MeldBufferData(GObject.GObject):
@GObject.Signal('file-changed')
def file_changed_signal(self) -> None:
...
encoding = GObject.Property(
type=GtkSource.Encoding,
nick="The file encoding of the linked GtkSourceFile",
default=GtkSource.Encoding.get_utf8(),
)
def __init__(self):
super().__init__()
self._gfile = None
self._label = None
self._monitor = None
self._sourcefile = None
self.reset(gfile=None)
def reset(self, gfile):
same_file = gfile and self._gfile and gfile.equal(self._gfile)
self.gfile = gfile
if same_file:
self.label = self._label
else:
self.label = gfile.get_parse_name() if gfile else None
self.loaded = False
self.savefile = None
def __del__(self):
self.disconnect_monitor()
@property
def label(self):
# TRANSLATORS: This is the label of a new, currently-unnamed file.
return self._label or _("<unnamed>")
@label.setter
def label(self, value):
if not value:
return
if not isinstance(value, str):
log.warning('Invalid label ignored "%r"', value)
return
self._label = value
def connect_monitor(self):
if not self._gfile:
return
monitor = self._gfile.monitor_file(Gio.FileMonitorFlags.NONE, None)
handler_id = monitor.connect('changed', self._handle_file_change)
self._monitor = monitor, handler_id
def disconnect_monitor(self):
if not self._monitor:
return
monitor, handler_id = self._monitor
monitor.disconnect(handler_id)
monitor.cancel()
self._monitor = None
def _query_mtime(self, gfile):
try:
time_query = ",".join((Gio.FILE_ATTRIBUTE_TIME_MODIFIED,
Gio.FILE_ATTRIBUTE_TIME_MODIFIED_USEC))
info = gfile.query_info(time_query, 0, None)
except GLib.GError:
return None
mtime = info.get_modification_time()
return (mtime.tv_sec, mtime.tv_usec)
def _handle_file_change(self, monitor, f, other_file, event_type):
mtime = self._query_mtime(f)
if self._disk_mtime and mtime and mtime > self._disk_mtime:
self.file_changed_signal.emit()
self._disk_mtime = mtime or self._disk_mtime
@property
def gfile(self):
return self._gfile
@gfile.setter
def gfile(self, value):
self.disconnect_monitor()
self._gfile = value
self._sourcefile = GtkSource.File()
self._sourcefile.set_location(value)
self._sourcefile.bind_property(
'encoding', self, 'encoding', GObject.BindingFlags.DEFAULT)
self.update_mtime()
self.connect_monitor()
@property
def sourcefile(self):
return self._sourcefile
@property
def gfiletarget(self):
return self.savefile or self.gfile
@property
def is_special(self):
try:
info = self._gfile.query_info(
Gio.FILE_ATTRIBUTE_STANDARD_TYPE, 0, None)
return info.get_file_type() == Gio.FileType.SPECIAL
except (AttributeError, GLib.GError):
return False
@property
def writable(self):
try:
info = self.gfiletarget.query_info(
Gio.FILE_ATTRIBUTE_ACCESS_CAN_WRITE, 0, None)
except GLib.GError as err:
if err.code == Gio.IOErrorEnum.NOT_FOUND:
return True
return False
except AttributeError:
return False
return info.get_attribute_boolean(Gio.FILE_ATTRIBUTE_ACCESS_CAN_WRITE)
def update_mtime(self):
if self._gfile:
self._disk_mtime = self._query_mtime(self._gfile)
self._mtime = self._disk_mtime
def current_on_disk(self):
return self._mtime == self._disk_mtime
class BufferLines:
"""Gtk.TextBuffer shim with line-based access and optional filtering
This class allows a Gtk.TextBuffer to be treated as a list of lines of
possibly-filtered text. If no filter is given, the raw output from the
Gtk.TextBuffer is used.
"""
def __init__(self, buf, textfilter=None):
self.buf = buf
if textfilter is not None:
self.textfilter = textfilter
else:
self.textfilter = lambda x, buf, start_iter, end_iter: x
def __getitem__(self, key):
if isinstance(key, slice):
lo, hi, _ = key.indices(self.buf.get_line_count())
line_start = self.buf.get_iter_at_line_or_eof(lo)
end = self.buf.get_iter_at_line_or_eof(hi)
lines = []
while line_start.compare(end) < 0:
line_end = line_start.copy()
if not line_end.ends_line():
line_end.forward_to_line_end()
txt = self.buf.get_text(line_start, line_end, False)
filter_txt = self.textfilter(txt, self.buf, line_start, end)
lines.append(filter_txt)
line_start.forward_visible_line()
return lines
elif isinstance(key, int):
if key >= len(self):
raise IndexError
line_start = self.buf.get_iter_at_line_or_eof(key)
line_end = line_start.copy()
if not line_end.ends_line():
line_end.forward_to_line_end()
txt = self.buf.get_text(line_start, line_end, False)
return self.textfilter(txt, self.buf, line_start, line_end)
def __len__(self):
return self.buf.get_line_count()
class BufferAction:
"""A helper to undo/redo text insertion/deletion into/from a text buffer"""
def __init__(self, buf, offset, text):
self.buffer = buf
self.offset = offset
self.text = text
def delete(self):
start = self.buffer.get_iter_at_offset(self.offset)
end = self.buffer.get_iter_at_offset(self.offset + len(self.text))
self.buffer.delete(start, end)
self.buffer.place_cursor(end)
return [self]
def insert(self):
start = self.buffer.get_iter_at_offset(self.offset)
self.buffer.place_cursor(start)
self.buffer.insert(start, self.text)
return [self]
class BufferInsertionAction(BufferAction):
undo = BufferAction.delete
redo = BufferAction.insert
class BufferDeletionAction(BufferAction):
undo = BufferAction.insert
redo = BufferAction.delete
|
from typing import List, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
)
CMD_ACTION_TYPES = {"open", "close", "stop", "open_tilt", "close_tilt"}
POSITION_ACTION_TYPES = {"set_position", "set_tilt_position"}
CMD_ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(CMD_ACTION_TYPES),
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
}
)
POSITION_ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(POSITION_ACTION_TYPES),
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
vol.Required("position"): vol.All(vol.Coerce(int), vol.Range(min=0, max=100)),
}
)
ACTION_SCHEMA = vol.Any(CMD_ACTION_SCHEMA, POSITION_ACTION_SCHEMA)
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device actions for Cover devices."""
registry = await entity_registry.async_get_registry(hass)
actions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
state = hass.states.get(entry.entity_id)
if not state or ATTR_SUPPORTED_FEATURES not in state.attributes:
continue
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
# Add actions for each entity that belongs to this integration
if supported_features & SUPPORT_SET_POSITION:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "set_position",
}
)
else:
if supported_features & SUPPORT_OPEN:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "open",
}
)
if supported_features & SUPPORT_CLOSE:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "close",
}
)
if supported_features & SUPPORT_STOP:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "stop",
}
)
if supported_features & SUPPORT_SET_TILT_POSITION:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "set_tilt_position",
}
)
else:
if supported_features & SUPPORT_OPEN_TILT:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "open_tilt",
}
)
if supported_features & SUPPORT_CLOSE_TILT:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "close_tilt",
}
)
return actions
async def async_get_action_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List action capabilities."""
if config[CONF_TYPE] not in POSITION_ACTION_TYPES:
return {}
return {
"extra_fields": vol.Schema(
{
vol.Optional("position", default=0): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
)
}
)
}
async def async_call_action_from_config(
hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context]
) -> None:
"""Execute a device action."""
config = ACTION_SCHEMA(config)
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
if config[CONF_TYPE] == "open":
service = SERVICE_OPEN_COVER
elif config[CONF_TYPE] == "close":
service = SERVICE_CLOSE_COVER
elif config[CONF_TYPE] == "stop":
service = SERVICE_STOP_COVER
elif config[CONF_TYPE] == "open_tilt":
service = SERVICE_OPEN_COVER_TILT
elif config[CONF_TYPE] == "close_tilt":
service = SERVICE_CLOSE_COVER_TILT
elif config[CONF_TYPE] == "set_position":
service = SERVICE_SET_COVER_POSITION
service_data[ATTR_POSITION] = config["position"]
elif config[CONF_TYPE] == "set_tilt_position":
service = SERVICE_SET_COVER_TILT_POSITION
service_data[ATTR_TILT_POSITION] = config["position"]
await hass.services.async_call(
DOMAIN, service, service_data, blocking=True, context=context
)
|
from homeassistant.const import DEVICE_CLASS_POWER, ENERGY_WATT_HOUR, POWER_WATT, VOLT
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
TREND_SENSORS = {
"total_power": [
"Total consumption - Active power",
None,
POWER_WATT,
"total_power",
DEVICE_CLASS_POWER,
True, # both cloud and local
],
"alwayson": [
"Always on - Active power",
None,
POWER_WATT,
"alwayson",
DEVICE_CLASS_POWER,
False, # cloud only
],
"power_today": [
"Total consumption - Today",
"mdi:power-plug",
ENERGY_WATT_HOUR,
"power_today",
None,
False, # cloud only
],
"power_current_hour": [
"Total consumption - Current hour",
"mdi:power-plug",
ENERGY_WATT_HOUR,
"power_current_hour",
None,
False, # cloud only
],
"power_last_5_minutes": [
"Total consumption - Last 5 minutes",
"mdi:power-plug",
ENERGY_WATT_HOUR,
"power_last_5_minutes",
None,
False, # cloud only
],
"alwayson_today": [
"Always on - Today",
"mdi:sleep",
ENERGY_WATT_HOUR,
"alwayson_today",
None,
False, # cloud only
],
}
REACTIVE_SENSORS = {
"total_reactive_power": [
"Total consumption - Reactive power",
None,
POWER_WATT,
"total_reactive_power",
DEVICE_CLASS_POWER,
]
}
SOLAR_SENSORS = {
"solar_power": [
"Total production - Active power",
None,
POWER_WATT,
"solar_power",
DEVICE_CLASS_POWER,
True, # both cloud and local
],
"solar_today": [
"Total production - Today",
"mdi:white-balance-sunny",
ENERGY_WATT_HOUR,
"solar_today",
None,
False, # cloud only
],
"solar_current_hour": [
"Total production - Current hour",
"mdi:white-balance-sunny",
ENERGY_WATT_HOUR,
"solar_current_hour",
None,
False, # cloud only
],
}
VOLTAGE_SENSORS = {
"phase_voltages_a": [
"Phase voltages - A",
"mdi:flash",
VOLT,
"phase_voltage_a",
None,
["ONE", "TWO", "THREE_STAR", "THREE_DELTA"],
],
"phase_voltages_b": [
"Phase voltages - B",
"mdi:flash",
VOLT,
"phase_voltage_b",
None,
["TWO", "THREE_STAR", "THREE_DELTA"],
],
"phase_voltages_c": [
"Phase voltages - C",
"mdi:flash",
VOLT,
"phase_voltage_c",
None,
["THREE_STAR"],
],
"line_voltages_a": [
"Line voltages - A",
"mdi:flash",
VOLT,
"line_voltage_a",
None,
["ONE", "TWO", "THREE_STAR", "THREE_DELTA"],
],
"line_voltages_b": [
"Line voltages - B",
"mdi:flash",
VOLT,
"line_voltage_b",
None,
["TWO", "THREE_STAR", "THREE_DELTA"],
],
"line_voltages_c": [
"Line voltages - C",
"mdi:flash",
VOLT,
"line_voltage_c",
None,
["THREE_STAR", "THREE_DELTA"],
],
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Smappee sensor."""
smappee_base = hass.data[DOMAIN][config_entry.entry_id]
entities = []
for service_location in smappee_base.smappee.service_locations.values():
# Add all basic sensors (realtime values and aggregators)
# Some are available in local only env
for sensor in TREND_SENSORS:
if not service_location.local_polling or TREND_SENSORS[sensor][5]:
entities.append(
SmappeeSensor(
smappee_base=smappee_base,
service_location=service_location,
sensor=sensor,
attributes=TREND_SENSORS[sensor],
)
)
if service_location.has_reactive_value:
for reactive_sensor in REACTIVE_SENSORS:
entities.append(
SmappeeSensor(
smappee_base=smappee_base,
service_location=service_location,
sensor=reactive_sensor,
attributes=REACTIVE_SENSORS[reactive_sensor],
)
)
# Add solar sensors (some are available in local only env)
if service_location.has_solar_production:
for sensor in SOLAR_SENSORS:
if not service_location.local_polling or SOLAR_SENSORS[sensor][5]:
entities.append(
SmappeeSensor(
smappee_base=smappee_base,
service_location=service_location,
sensor=sensor,
attributes=SOLAR_SENSORS[sensor],
)
)
# Add all CT measurements
for measurement_id, measurement in service_location.measurements.items():
entities.append(
SmappeeSensor(
smappee_base=smappee_base,
service_location=service_location,
sensor="load",
attributes=[
measurement.name,
None,
POWER_WATT,
measurement_id,
DEVICE_CLASS_POWER,
],
)
)
# Add phase- and line voltages if available
if service_location.has_voltage_values:
for sensor_name, sensor in VOLTAGE_SENSORS.items():
if service_location.phase_type in sensor[5]:
entities.append(
SmappeeSensor(
smappee_base=smappee_base,
service_location=service_location,
sensor=sensor_name,
attributes=sensor,
)
)
# Add Gas and Water sensors
for sensor_id, sensor in service_location.sensors.items():
for channel in sensor.channels:
gw_icon = "mdi:gas-cylinder"
if channel.get("type") == "water":
gw_icon = "mdi:water"
entities.append(
SmappeeSensor(
smappee_base=smappee_base,
service_location=service_location,
sensor="sensor",
attributes=[
channel.get("name"),
gw_icon,
channel.get("uom"),
f"{sensor_id}-{channel.get('channel')}",
None,
],
)
)
async_add_entities(entities, True)
class SmappeeSensor(Entity):
"""Implementation of a Smappee sensor."""
def __init__(self, smappee_base, service_location, sensor, attributes):
"""Initialize the Smappee sensor."""
self._smappee_base = smappee_base
self._service_location = service_location
self._sensor = sensor
self.data = None
self._state = None
self._name = attributes[0]
self._icon = attributes[1]
self._unit_of_measurement = attributes[2]
self._sensor_id = attributes[3]
self._device_class = attributes[4]
@property
def name(self):
"""Return the name for this sensor."""
if self._sensor in ["sensor", "load"]:
return (
f"{self._service_location.service_location_name} - "
f"{self._sensor.title()} - {self._name}"
)
return f"{self._service_location.service_location_name} - {self._name}"
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return self._device_class
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def unique_id(
self,
):
"""Return the unique ID for this sensor."""
if self._sensor in ["load", "sensor"]:
return (
f"{self._service_location.device_serial_number}-"
f"{self._service_location.service_location_id}-"
f"{self._sensor}-{self._sensor_id}"
)
return (
f"{self._service_location.device_serial_number}-"
f"{self._service_location.service_location_id}-"
f"{self._sensor}"
)
@property
def device_info(self):
"""Return the device info for this sensor."""
return {
"identifiers": {(DOMAIN, self._service_location.device_serial_number)},
"name": self._service_location.service_location_name,
"manufacturer": "Smappee",
"model": self._service_location.device_model,
"sw_version": self._service_location.firmware_version,
}
async def async_update(self):
"""Get the latest data from Smappee and update the state."""
await self._smappee_base.async_update()
if self._sensor == "total_power":
self._state = self._service_location.total_power
elif self._sensor == "total_reactive_power":
self._state = self._service_location.total_reactive_power
elif self._sensor == "solar_power":
self._state = self._service_location.solar_power
elif self._sensor == "alwayson":
self._state = self._service_location.alwayson
elif self._sensor in [
"phase_voltages_a",
"phase_voltages_b",
"phase_voltages_c",
]:
phase_voltages = self._service_location.phase_voltages
if phase_voltages is not None:
if self._sensor == "phase_voltages_a":
self._state = phase_voltages[0]
elif self._sensor == "phase_voltages_b":
self._state = phase_voltages[1]
elif self._sensor == "phase_voltages_c":
self._state = phase_voltages[2]
elif self._sensor in ["line_voltages_a", "line_voltages_b", "line_voltages_c"]:
line_voltages = self._service_location.line_voltages
if line_voltages is not None:
if self._sensor == "line_voltages_a":
self._state = line_voltages[0]
elif self._sensor == "line_voltages_b":
self._state = line_voltages[1]
elif self._sensor == "line_voltages_c":
self._state = line_voltages[2]
elif self._sensor in [
"power_today",
"power_current_hour",
"power_last_5_minutes",
"solar_today",
"solar_current_hour",
"alwayson_today",
]:
trend_value = self._service_location.aggregated_values.get(self._sensor)
self._state = round(trend_value) if trend_value is not None else None
elif self._sensor == "load":
self._state = self._service_location.measurements.get(
self._sensor_id
).active_total
elif self._sensor == "sensor":
sensor_id, channel_id = self._sensor_id.split("-")
sensor = self._service_location.sensors.get(int(sensor_id))
for channel in sensor.channels:
if channel.get("channel") == int(channel_id):
self._state = channel.get("value_today")
|
import argparse
import logging
import os
import pkgutil
import subprocess
import sys
import warnings
from typing import Any
from typing import List
from typing import Tuple
import argcomplete
import paasta_tools
from paasta_tools.cli import cmds
def load_method(module_name, method_name):
"""Return a function given a module and method name.
:param module_name: a string
:param method_name: a string
:return: a function
"""
module = __import__(module_name, fromlist=[method_name])
method = getattr(module, method_name)
return method
def modules_in_pkg(pkg):
"""Return the list of modules in a python package (a module with a
__init__.py file.)
:return: a list of strings such as `['list', 'check']` that correspond to
the module names in the package.
"""
for _, module_name, _ in pkgutil.walk_packages(pkg.__path__):
yield module_name
class PrintsHelpOnErrorArgumentParser(argparse.ArgumentParser):
"""Overriding the error method allows us to print the whole help page,
otherwise the python arg parser prints a not-so-useful usage message that
is way too terse"""
def error(self, message):
print(f"Argument parse error: {message}\n")
self.print_help()
sys.exit(1)
def list_external_commands():
p = subprocess.check_output(["/bin/bash", "-p", "-c", "compgen -A command paasta-"])
lines = p.decode("utf-8").strip().split("\n")
return {l.replace("paasta-", "", 1) for l in lines}
def calling_external_command():
if len(sys.argv) > 1:
return sys.argv[1] in list_external_commands()
else:
return False
def exec_subcommand(argv):
command = sys.argv[1]
os.execlp(f"paasta-{command}", *argv[1:])
def add_subparser(command, subparsers):
"""Given a command name, paasta_cmd, execute the add_subparser method
implemented in paasta_cmd.py.
Each paasta client command must implement a method called add_subparser.
This allows the client to dynamically add subparsers to its subparser, which
provides the benefits of argcomplete/argparse but gets it done in a modular
fashion.
:param command: a simple string - e.g. 'list'
:param subparsers: an ArgumentParser object"""
module_name = "paasta_tools.cli.cmds.%s" % command
add_subparser_fn = load_method(module_name, "add_subparser")
add_subparser_fn(subparsers)
PAASTA_SUBCOMMANDS = {
"autoscale": "autoscale",
"boost": "boost",
"check": "check",
"cook-image": "cook_image",
"get-docker-image": "get_docker_image",
"get-latest-deployment": "get_latest_deployment",
"info": "info",
"itest": "itest",
"list-clusters": "list_clusters",
"list-deploy-queue": "list_deploy_queue",
"list": "list",
"local-run": "local_run",
"logs": "logs",
"mark-for-deployment": "mark_for_deployment",
"metastatus": "metastatus",
"pause_service_autoscaler": "pause_service_autoscaler",
"performance-check": "performance_check",
"push-to-registry": "push_to_registry",
"remote-run": "remote_run",
"rollback": "rollback",
"secret": "secret",
"security-check": "security_check",
"spark-run": "spark_run",
"start": "start_stop_restart",
"stop": "start_stop_restart",
"restart": "start_stop_restart",
"status": "status",
"sysdig": "sysdig",
"validate": "validate",
"wait-for-deployment": "wait_for_deployment",
}
def get_argparser(commands=None):
"""Create and return argument parser for a set of subcommands.
:param commands: Union[None, List[str]] If `commands` argument is `None`,
add full parsers for all subcommands, if `commands` is empty list -
add thin parsers for all subcommands, otherwise - add full parsers for
subcommands in the argument.
"""
parser = PrintsHelpOnErrorArgumentParser(
description=(
"The PaaSTA command line tool. The 'paasta' command is the entry point "
"to multiple subcommands, see below.\n\n"
"You can see more help for individual commands by appending them with '--help', "
"for example, 'paasta status --help' or see the man page with 'man paasta status'."
),
epilog=(
"The 'paasta' command line tool is designed to be used by humans, and therefore has "
"command line completion for almost all options and uses pretty formatting when "
"possible."
),
# Suppressing usage prevents it from being printed twice upon print_help
usage=argparse.SUPPRESS,
)
# http://stackoverflow.com/a/8521644/812183
parser.add_argument(
"-V",
"--version",
action="version",
version=f"paasta-tools {paasta_tools.__version__}",
)
subparsers = parser.add_subparsers(dest="command", metavar="")
subparsers.required = True
# Adding a separate help subparser allows us to respond to "help" without --help
help_parser = subparsers.add_parser(
"help", help=f"run `paasta <subcommand> -h` for help"
)
help_parser.set_defaults(command=None)
# Build a list of subcommands to add them in alphabetical order later
command_choices: List[Tuple[str, Any]] = []
if commands is None:
for command in sorted(modules_in_pkg(cmds)):
command_choices.append(
(command, (add_subparser, [command, subparsers], {}))
)
elif commands:
for command in commands:
if command not in PAASTA_SUBCOMMANDS:
# could be external subcommand
continue
command_choices.append(
(
command,
(add_subparser, [PAASTA_SUBCOMMANDS[command], subparsers], {}),
)
)
else:
for command in PAASTA_SUBCOMMANDS.keys():
command_choices.append(
(
command,
(subparsers.add_parser, [command], dict(help="", add_help=False)),
)
)
for command in list_external_commands():
command_choices.append(
(command, (subparsers.add_parser, [command], dict(help="")))
)
for (_, (fn, args, kwds)) in sorted(command_choices, key=lambda e: e[0]):
fn(*args, **kwds)
return parser
def parse_args(argv):
"""Initialize autocompletion and configure the argument parser.
:return: an argparse.Namespace object mapping parameter names to the inputs
from sys.argv
"""
parser = get_argparser(commands=[])
argcomplete.autocomplete(parser)
args, _ = parser.parse_known_args(argv)
if args.command:
parser = get_argparser(commands=[args.command])
argcomplete.autocomplete(parser)
return parser.parse_args(argv), parser
def main(argv=None):
"""Perform a paasta call. Read args from sys.argv and pass parsed args onto
appropriate command in paasta_cli/cmds directory.
Ensure we kill any child pids before we quit
"""
logging.basicConfig()
warnings.filterwarnings("ignore", category=DeprecationWarning)
# if we are an external command, we need to exec out early.
# The reason we exec out early is so we don't bother trying to parse
# "foreign" arguments, which would cause a stack trace.
if calling_external_command():
exec_subcommand(sys.argv)
try:
args, parser = parse_args(argv)
if args.command is None:
parser.print_help()
return_code = 0
else:
return_code = args.command(args)
except KeyboardInterrupt:
return_code = 1
sys.exit(return_code)
if __name__ == "__main__":
main()
|
import asyncio
from collections import OrderedDict
import logging
import async_timeout
from pypoint import PointSession
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.core import callback
from .const import DOMAIN
AUTH_CALLBACK_PATH = "/api/minut"
AUTH_CALLBACK_NAME = "api:minut"
DATA_FLOW_IMPL = "point_flow_implementation"
_LOGGER = logging.getLogger(__name__)
@callback
def register_flow_implementation(hass, domain, client_id, client_secret):
"""Register a flow implementation.
domain: Domain of the component responsible for the implementation.
name: Name of the component.
client_id: Client id.
client_secret: Client secret.
"""
if DATA_FLOW_IMPL not in hass.data:
hass.data[DATA_FLOW_IMPL] = OrderedDict()
hass.data[DATA_FLOW_IMPL][domain] = {
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
}
@config_entries.HANDLERS.register("point")
class PointFlowHandler(config_entries.ConfigFlow):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize flow."""
self.flow_impl = None
async def async_step_import(self, user_input=None):
"""Handle external yaml configuration."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_setup")
self.flow_impl = DOMAIN
return await self.async_step_auth()
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
flows = self.hass.data.get(DATA_FLOW_IMPL, {})
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_setup")
if not flows:
_LOGGER.debug("no flows")
return self.async_abort(reason="no_flows")
if len(flows) == 1:
self.flow_impl = list(flows)[0]
return await self.async_step_auth()
if user_input is not None:
self.flow_impl = user_input["flow_impl"]
return await self.async_step_auth()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required("flow_impl"): vol.In(list(flows))}),
)
async def async_step_auth(self, user_input=None):
"""Create an entry for auth."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="external_setup")
errors = {}
if user_input is not None:
errors["base"] = "follow_link"
try:
with async_timeout.timeout(10):
url = await self._get_authorization_url()
except asyncio.TimeoutError:
return self.async_abort(reason="authorize_url_timeout")
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error generating auth url")
return self.async_abort(reason="authorize_url_fail")
return self.async_show_form(
step_id="auth",
description_placeholders={"authorization_url": url},
errors=errors,
)
async def _get_authorization_url(self):
"""Create Minut Point session and get authorization url."""
flow = self.hass.data[DATA_FLOW_IMPL][self.flow_impl]
client_id = flow[CONF_CLIENT_ID]
client_secret = flow[CONF_CLIENT_SECRET]
point_session = PointSession(
self.hass.helpers.aiohttp_client.async_get_clientsession(),
client_id,
client_secret,
)
self.hass.http.register_view(MinutAuthCallbackView())
return point_session.get_authorization_url
async def async_step_code(self, code=None):
"""Received code for authentication."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_setup")
if code is None:
return self.async_abort(reason="no_code")
_LOGGER.debug(
"Should close all flows below %s",
self.hass.config_entries.flow.async_progress(),
)
# Remove notification if no other discovery config entries in progress
return await self._async_create_session(code)
async def _async_create_session(self, code):
"""Create point session and entries."""
flow = self.hass.data[DATA_FLOW_IMPL][DOMAIN]
client_id = flow[CONF_CLIENT_ID]
client_secret = flow[CONF_CLIENT_SECRET]
point_session = PointSession(
self.hass.helpers.aiohttp_client.async_get_clientsession(),
client_id,
client_secret,
)
token = await point_session.get_access_token(code)
_LOGGER.debug("Got new token")
if not point_session.is_authorized:
_LOGGER.error("Authentication Error")
return self.async_abort(reason="auth_error")
_LOGGER.info("Successfully authenticated Point")
user_email = (await point_session.user()).get("email") or ""
return self.async_create_entry(
title=user_email,
data={
"token": token,
"refresh_args": {
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
},
},
)
class MinutAuthCallbackView(HomeAssistantView):
"""Minut Authorization Callback View."""
requires_auth = False
url = AUTH_CALLBACK_PATH
name = AUTH_CALLBACK_NAME
@staticmethod
async def get(request):
"""Receive authorization code."""
hass = request.app["hass"]
if "code" in request.query:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": "code"}, data=request.query["code"]
)
)
return "OK!"
|
import json
import time
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import managed_memory_store
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
from perfkitbenchmarker.providers.azure import azure_network
FLAGS = flags.FLAGS
# 15min timeout for issuing az redis delete command.
TIMEOUT = 900
EXISTS_RETRY_TIMES = 3
EXISTS_RETRY_POLL = 30
REDIS_VERSION = '3.2'
class AzureRedisCache(managed_memory_store.BaseManagedMemoryStore):
"""Object representing an Azure Redis Cache."""
CLOUD = azure.CLOUD
MEMORY_STORE = managed_memory_store.REDIS
# Azure redis could take up to an hour to create
READY_TIMEOUT = 60 * 60 # 60 minutes
def __init__(self, spec):
super(AzureRedisCache, self).__init__(spec)
self.redis_region = FLAGS.redis_region
self.resource_group = azure_network.GetResourceGroup(self.redis_region)
self.azure_redis_size = FLAGS.azure_redis_size
self.failover_style = FLAGS.redis_failover_style
if self.failover_style == managed_memory_store.Failover.FAILOVER_SAME_REGION:
self.azure_tier = 'Premium'
else:
self.azure_tier = 'Basic'
def GetResourceMetadata(self):
"""Returns a dict containing metadata about the cache.
Returns:
dict mapping string property key to value.
"""
result = {
'cloud_redis_failover_style': self.failover_style,
'cloud_redis_region': self.redis_region,
'cloud_redis_azure_tier': self.azure_tier,
'cloud_redis_azure_redis_size': self.azure_redis_size,
'cloud_redis_version': REDIS_VERSION,
}
return result
@staticmethod
def CheckPrerequisites(benchmark_config):
"""Check benchmark prerequisites on the input flag parameters.
Args:
benchmark_config: Unused.
Raises:
errors.Config.InvalidValue: Input flag parameters are invalid.
"""
if FLAGS.managed_memory_store_version:
raise errors.Config.InvalidValue(
'Custom Redis version not supported on Azure Redis. '
'Redis version is {0}.'.format(REDIS_VERSION))
if FLAGS.redis_failover_style in [
managed_memory_store.Failover.FAILOVER_SAME_ZONE]:
raise errors.Config.InvalidValue(
'Azure redis with failover in the same zone is not supported.')
def _Create(self):
"""Creates the cache."""
cmd = [
azure.AZURE_PATH, 'redis', 'create',
'--resource-group', self.resource_group.name,
'--location', self.redis_region,
'--name', self.name,
'--sku', self.azure_tier,
'--vm-size', self.azure_redis_size,
'--enable-non-ssl-port',
]
vm_util.IssueCommand(cmd, timeout=TIMEOUT)
def _Delete(self):
"""Deletes the cache."""
cmd = [
azure.AZURE_PATH, 'redis', 'delete',
'--resource-group', self.resource_group.name,
'--name', self.name,
'--yes',
]
vm_util.IssueCommand(cmd, timeout=TIMEOUT)
def DescribeCache(self):
"""Calls show on the cache to get information about it.
Returns:
stdout, stderr and retcode.
"""
stdout, stderr, retcode = vm_util.IssueCommand([
azure.AZURE_PATH, 'redis', 'show',
'--resource-group', self.resource_group.name,
'--name', self.name,
], raise_on_failure=False)
return stdout, stderr, retcode
def _Exists(self):
"""Returns True if the cache exists.
Returns:
True if cache exists and false otherwise.
"""
# Retry to ensure there is no transient error in describe cache
for _ in range(EXISTS_RETRY_TIMES):
_, _, retcode = self.DescribeCache()
if retcode == 0:
return True
time.sleep(EXISTS_RETRY_POLL)
return retcode == 0
def _IsReady(self):
"""Returns True if the cache is ready.
Returns:
True if cache is ready and false otherwise.
"""
stdout, _, retcode = self.DescribeCache()
if (retcode == 0 and
json.loads(stdout).get('provisioningState', None) == 'Succeeded'):
return True
return False
def GetMemoryStorePassword(self):
"""See base class."""
if not self._password:
self._PopulateEndpoint()
return self._password
@vm_util.Retry(max_retries=5)
def _PopulateEndpoint(self):
"""Populates endpoint information for the instance.
Raises:
errors.Resource.RetryableGetError:
Failed to retrieve information on cache.
"""
stdout, _, retcode = self.DescribeCache()
if retcode != 0:
raise errors.Resource.RetryableGetError(
'Failed to retrieve information on %s.', self.name)
response = json.loads(stdout)
self._ip = response['hostName']
self._port = response['port']
stdout, _, retcode = vm_util.IssueCommand([
azure.AZURE_PATH, 'redis', 'list-keys',
'--resource-group', self.resource_group.name,
'--name', self.name,
], raise_on_failure=False)
if retcode != 0:
raise errors.Resource.RetryableGetError(
'Failed to retrieve information on %s.', self.name)
response = json.loads(stdout)
self._password = response['primaryKey']
|
from typing import Any, Dict
# pylint: disable=import-error, no-member
import switchbot
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_MAC, CONF_NAME, CONF_PASSWORD
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
DEFAULT_NAME = "Switchbot"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Perform the setup for Switchbot devices."""
name = config.get(CONF_NAME)
mac_addr = config[CONF_MAC]
password = config.get(CONF_PASSWORD)
add_entities([SwitchBot(mac_addr, name, password)])
class SwitchBot(SwitchEntity, RestoreEntity):
"""Representation of a Switchbot."""
def __init__(self, mac, name, password) -> None:
"""Initialize the Switchbot."""
self._state = None
self._last_run_success = None
self._name = name
self._mac = mac
self._device = switchbot.Switchbot(mac=mac, password=password)
async def async_added_to_hass(self):
"""Run when entity about to be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if not state:
return
self._state = state.state == "on"
def turn_on(self, **kwargs) -> None:
"""Turn device on."""
if self._device.turn_on():
self._state = True
self._last_run_success = True
else:
self._last_run_success = False
def turn_off(self, **kwargs) -> None:
"""Turn device off."""
if self._device.turn_off():
self._state = False
self._last_run_success = True
else:
self._last_run_success = False
@property
def assumed_state(self) -> bool:
"""Return true if unable to access real state of entity."""
return True
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return self._state
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return self._mac.replace(":", "")
@property
def name(self) -> str:
"""Return the name of the switch."""
return self._name
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes."""
return {"last_run_success": self._last_run_success}
|
from arcam.fmj.client import Client
from arcam.fmj.state import State
import pytest
from homeassistant.components.arcam_fmj.const import DEFAULT_NAME
from homeassistant.components.arcam_fmj.media_player import ArcamFmj
from homeassistant.const import CONF_HOST, CONF_PORT
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
MOCK_HOST = "127.0.0.1"
MOCK_PORT = 50000
MOCK_TURN_ON = {
"service": "switch.turn_on",
"data": {"entity_id": "switch.test"},
}
MOCK_ENTITY_ID = "media_player.arcam_fmj_127_0_0_1_zone_1"
MOCK_UUID = "456789abcdef"
MOCK_UDN = f"uuid:01234567-89ab-cdef-0123-{MOCK_UUID}"
MOCK_NAME = f"{DEFAULT_NAME} ({MOCK_HOST})"
MOCK_CONFIG_ENTRY = {CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}
@pytest.fixture(name="client")
def client_fixture():
"""Get a mocked client."""
client = Mock(Client)
client.host = MOCK_HOST
client.port = MOCK_PORT
return client
@pytest.fixture(name="state_1")
def state_1_fixture(client):
"""Get a mocked state."""
state = Mock(State)
state.client = client
state.zn = 1
state.get_power.return_value = True
state.get_volume.return_value = 0.0
state.get_source_list.return_value = []
state.get_incoming_audio_format.return_value = (0, 0)
state.get_mute.return_value = None
return state
@pytest.fixture(name="state_2")
def state_2_fixture(client):
"""Get a mocked state."""
state = Mock(State)
state.client = client
state.zn = 2
state.get_power.return_value = True
state.get_volume.return_value = 0.0
state.get_source_list.return_value = []
state.get_incoming_audio_format.return_value = (0, 0)
state.get_mute.return_value = None
return state
@pytest.fixture(name="state")
def state_fixture(state_1):
"""Get a mocked state."""
return state_1
@pytest.fixture(name="player")
def player_fixture(hass, state):
"""Get standard player."""
player = ArcamFmj(MOCK_NAME, state, MOCK_UUID)
player.entity_id = MOCK_ENTITY_ID
player.hass = hass
player.async_write_ha_state = Mock()
return player
@pytest.fixture(name="player_setup")
async def player_setup_fixture(hass, state_1, state_2, client):
"""Get standard player."""
config_entry = MockConfigEntry(
domain="arcam_fmj", data=MOCK_CONFIG_ENTRY, title=MOCK_NAME
)
config_entry.add_to_hass(hass)
def state_mock(cli, zone):
if zone == 1:
return state_1
if zone == 2:
return state_2
with patch("homeassistant.components.arcam_fmj.Client", return_value=client), patch(
"homeassistant.components.arcam_fmj.media_player.State", side_effect=state_mock
), patch("homeassistant.components.arcam_fmj._run_client", return_value=None):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
yield MOCK_ENTITY_ID
|
import pytest
from molecule.command import login
@pytest.fixture
def _instance(config_instance):
config_instance.state.change_state('created', True)
return login.Login(config_instance)
def test_execute(mocker, _instance):
_instance._config.command_args = {'host': 'instance-1'}
m = mocker.patch('molecule.command.login.Login._get_login')
_instance.execute()
m.assert_called_once_with('instance-1')
@pytest.mark.parametrize(
'config_instance', ['command_driver_delegated_managed_section_data'],
indirect=True)
def test_execute_raises_when_not_created(patched_logger_critical, _instance):
_instance._config.state.change_state('created', False)
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
msg = 'Instances not created. Please create instances first.'
patched_logger_critical.assert_called_once_with(msg)
def test_get_hostname_does_not_match(patched_logger_critical, _instance):
_instance._config.command_args = {'host': 'invalid'}
hosts = ['instance-1']
with pytest.raises(SystemExit) as e:
_instance._get_hostname(hosts)
assert 1 == e.value.code
msg = ("There are no hosts that match 'invalid'. You "
'can only login to valid hosts.')
patched_logger_critical.assert_called_once_with(msg)
def test_get_hostname_exact_match_with_one_host(_instance):
_instance._config.command_args = {'host': 'instance-1'}
hosts = ['instance-1']
assert 'instance-1' == _instance._get_hostname(hosts)
def test_get_hostname_partial_match_with_one_host(_instance):
_instance._config.command_args = {'host': 'inst'}
hosts = ['instance-1']
assert 'instance-1' == _instance._get_hostname(hosts)
def test_get_hostname_exact_match_with_multiple_hosts(_instance):
_instance._config.command_args = {'host': 'instance-1'}
hosts = ['instance-1', 'instance-2']
assert 'instance-1' == _instance._get_hostname(hosts)
def test_get_hostname_partial_match_with_multiple_hosts(_instance):
_instance._config.command_args = {'host': 'foo'}
hosts = ['foo', 'fooo']
assert 'foo' == _instance._get_hostname(hosts)
def test_get_hostname_partial_match_with_multiple_hosts_raises(
patched_logger_critical, _instance):
_instance._config.command_args = {'host': 'inst'}
hosts = ['instance-1', 'instance-2']
with pytest.raises(SystemExit) as e:
_instance._get_hostname(hosts)
assert 1 == e.value.code
msg = ("There are 2 hosts that match 'inst'. "
'You can only login to one at a time.\n\n'
'Available hosts:\n'
'instance-1\n'
'instance-2')
patched_logger_critical.assert_called_once_with(msg)
def test_get_hostname_no_host_flag_specified_on_cli(_instance):
_instance._config.command_args = {}
hosts = ['instance-1']
_instance._get_hostname(hosts)
assert 'instance-1' == _instance._get_hostname(hosts)
def test_get_hostname_no_host_flag_specified_on_cli_with_multiple_hosts_raises(
patched_logger_critical, _instance):
_instance._config.command_args = {}
hosts = ['instance-1', 'instance-2']
with pytest.raises(SystemExit) as e:
_instance._get_hostname(hosts)
assert 1 == e.value.code
msg = ('There are 2 running hosts. Please specify '
'which with --host.\n\n'
'Available hosts:\n'
'instance-1\n'
'instance-2')
patched_logger_critical.assert_called_once_with(msg)
|
import cherrypy
from cherrypy.test import helper
class SessionAuthenticateTest(helper.CPWebCase):
@staticmethod
def setup_server():
def check(username, password):
# Dummy check_username_and_password function
if username != 'test' or password != 'password':
return 'Wrong login/password'
def augment_params():
# A simple tool to add some things to request.params
# This is to check to make sure that session_auth can handle
# request params (ticket #780)
cherrypy.request.params['test'] = 'test'
cherrypy.tools.augment_params = cherrypy.Tool(
'before_handler', augment_params, None, priority=30)
class Test:
_cp_config = {
'tools.sessions.on': True,
'tools.session_auth.on': True,
'tools.session_auth.check_username_and_password': check,
'tools.augment_params.on': True,
}
@cherrypy.expose
def index(self, **kwargs):
return 'Hi %s, you are logged in' % cherrypy.request.login
cherrypy.tree.mount(Test())
def testSessionAuthenticate(self):
# request a page and check for login form
self.getPage('/')
self.assertInBody('<form method="post" action="do_login">')
# setup credentials
login_body = 'username=test&password=password&from_page=/'
# attempt a login
self.getPage('/do_login', method='POST', body=login_body)
self.assertStatus((302, 303))
# get the page now that we are logged in
self.getPage('/', self.cookies)
self.assertBody('Hi test, you are logged in')
# do a logout
self.getPage('/do_logout', self.cookies, method='POST')
self.assertStatus((302, 303))
# verify we are logged out
self.getPage('/', self.cookies)
self.assertInBody('<form method="post" action="do_login">')
|
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME, CONF_PIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from .const import CONF_DIFFERENTIAL, CONF_PIN_MODE, DOMAIN
from .entity import FirmataPinEntity
from .pin import FirmataAnalogInput, FirmataPinUsedException
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Firmata sensors."""
new_entities = []
board = hass.data[DOMAIN][config_entry.entry_id]
for sensor in board.sensors:
pin = sensor[CONF_PIN]
pin_mode = sensor[CONF_PIN_MODE]
differential = sensor[CONF_DIFFERENTIAL]
api = FirmataAnalogInput(board, pin, pin_mode, differential)
try:
api.setup()
except FirmataPinUsedException:
_LOGGER.error(
"Could not setup sensor on pin %s since pin already in use",
sensor[CONF_PIN],
)
continue
name = sensor[CONF_NAME]
sensor_entity = FirmataSensor(api, config_entry, name, pin)
new_entities.append(sensor_entity)
if new_entities:
async_add_entities(new_entities)
class FirmataSensor(FirmataPinEntity, Entity):
"""Representation of a sensor on a Firmata board."""
async def async_added_to_hass(self) -> None:
"""Set up a sensor."""
await self._api.start_pin(self.async_write_ha_state)
async def async_will_remove_from_hass(self) -> None:
"""Stop reporting a sensor."""
await self._api.stop_pin()
@property
def state(self) -> int:
"""Return sensor state."""
return self._api.state
|
import logging
from aiofreepybox.exceptions import AuthorizationError, HttpRequestError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from .const import DOMAIN # pylint: disable=unused-import
from .router import get_api
_LOGGER = logging.getLogger(__name__)
class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize Freebox config flow."""
self._host = None
self._port = None
def _show_setup_form(self, user_input=None, errors=None):
"""Show the setup form to the user."""
if user_input is None:
user_input = {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
vol.Required(CONF_PORT, default=user_input.get(CONF_PORT, "")): int,
}
),
errors=errors or {},
)
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is None:
return self._show_setup_form(user_input, errors)
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
# Check if already configured
await self.async_set_unique_id(self._host)
self._abort_if_unique_id_configured()
return await self.async_step_link()
async def async_step_link(self, user_input=None):
"""Attempt to link with the Freebox router.
Given a configured host, will ask the user to press the button
to connect to the router.
"""
if user_input is None:
return self.async_show_form(step_id="link")
errors = {}
fbx = await get_api(self.hass, self._host)
try:
# Open connection and check authentication
await fbx.open(self._host, self._port)
# Check permissions
await fbx.system.get_config()
await fbx.lan.get_hosts_list()
await self.hass.async_block_till_done()
# Close connection
await fbx.close()
return self.async_create_entry(
title=self._host,
data={CONF_HOST: self._host, CONF_PORT: self._port},
)
except AuthorizationError as error:
_LOGGER.error(error)
errors["base"] = "register_failed"
except HttpRequestError:
_LOGGER.error("Error connecting to the Freebox router at %s", self._host)
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Unknown error connecting with Freebox router at %s", self._host
)
errors["base"] = "unknown"
return self.async_show_form(step_id="link", errors=errors)
async def async_step_import(self, user_input=None):
"""Import a config entry."""
return await self.async_step_user(user_input)
async def async_step_discovery(self, discovery_info):
"""Initialize step from discovery."""
return await self.async_step_user(discovery_info)
|
import unittest
from perfkitbenchmarker import stages
class RunStageParserTestCase(unittest.TestCase):
def setUp(self):
self._parser = stages.RunStageParser()
def testEmpty(self):
with self.assertRaises(ValueError):
self._parser.parse('')
def testInvalidItem(self):
with self.assertRaises(ValueError):
self._parser.parse('provision,fake_stage')
def testAllAndIndividualStages(self):
with self.assertRaises(ValueError):
self._parser.parse('provision,all')
def testIncorrectOrder(self):
with self.assertRaises(ValueError):
self._parser.parse('provision,run')
with self.assertRaises(ValueError):
self._parser.parse('teardown,provision')
def testAll(self):
self.assertEqual(self._parser.parse('all'),
['provision', 'prepare', 'run', 'cleanup', 'teardown'])
def testIndividual(self):
self.assertEqual(self._parser.parse('prepare'), ['prepare'])
def testMultiple(self):
self.assertEqual(self._parser.parse('prepare,run'), ['prepare', 'run'])
def testList(self):
self.assertEqual(self._parser.parse(['prepare', 'run']), ['prepare', 'run'])
if __name__ == '__main__':
unittest.main()
|
import json
import pytest_bdd as bdd
bdd.scenarios('misc.feature')
@bdd.then(bdd.parsers.parse('the PDF {filename} should exist in the tmpdir'))
def pdf_exists(quteproc, tmpdir, filename):
path = tmpdir / filename
data = path.read_binary()
assert data.startswith(b'%PDF')
@bdd.when(bdd.parsers.parse('I set up "{lists}" as block lists'))
def set_up_blocking(quteproc, lists, server):
url = 'http://localhost:{}/data/adblock/'.format(server.port)
urls = [url + item.strip() for item in lists.split(',')]
quteproc.set_setting('content.host_blocking.lists', json.dumps(urls))
|
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_LOCK,
SERVICE_UNLOCK,
STATE_LOCKED,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from tests.components.august.mocks import (
_create_august_with_devices,
_mock_activities_from_fixture,
_mock_doorsense_enabled_august_lock_detail,
_mock_lock_from_fixture,
)
async def test_lock_device_registry(hass):
"""Test creation of a lock with doorsense and bridge ands up in the registry."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
await _create_august_with_devices(hass, [lock_one])
device_registry = await hass.helpers.device_registry.async_get_registry()
reg_device = device_registry.async_get_device(
identifiers={("august", "online_with_doorsense")}, connections=set()
)
assert reg_device.model == "AUG-MD01"
assert reg_device.sw_version == "undefined-4.3.0-1.8.14"
assert reg_device.name == "online_with_doorsense Name"
assert reg_device.manufacturer == "August Home Inc."
async def test_lock_changed_by(hass):
"""Test creation of a lock with doorsense and bridge."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json")
await _create_august_with_devices(hass, [lock_one], activities=activities)
lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name")
assert lock_online_with_doorsense_name.state == STATE_LOCKED
assert (
lock_online_with_doorsense_name.attributes.get("changed_by")
== "Your favorite elven princess"
)
async def test_one_lock_operation(hass):
"""Test creation of a lock with doorsense and bridge."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
await _create_august_with_devices(hass, [lock_one])
lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name")
assert lock_online_with_doorsense_name.state == STATE_LOCKED
assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92
assert (
lock_online_with_doorsense_name.attributes.get("friendly_name")
== "online_with_doorsense Name"
)
data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"}
assert await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True
)
await hass.async_block_till_done()
lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name")
assert lock_online_with_doorsense_name.state == STATE_UNLOCKED
assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92
assert (
lock_online_with_doorsense_name.attributes.get("friendly_name")
== "online_with_doorsense Name"
)
assert await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True
)
await hass.async_block_till_done()
lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name")
assert lock_online_with_doorsense_name.state == STATE_LOCKED
# No activity means it will be unavailable until the activity feed has data
entity_registry = await hass.helpers.entity_registry.async_get_registry()
lock_operator_sensor = entity_registry.async_get(
"sensor.online_with_doorsense_name_operator"
)
assert lock_operator_sensor
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").state
== STATE_UNAVAILABLE
)
async def test_one_lock_unknown_state(hass):
"""Test creation of a lock with doorsense and bridge."""
lock_one = await _mock_lock_from_fixture(
hass,
"get_lock.online.unknown_state.json",
)
await _create_august_with_devices(hass, [lock_one])
lock_brokenid_name = hass.states.get("lock.brokenid_name")
assert lock_brokenid_name.state == STATE_UNKNOWN
|
import itertools
import posixpath
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import disk
from perfkitbenchmarker import hpc_util
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import ior
FLAGS = flags.FLAGS
flags.DEFINE_integer(
'ior_num_procs', 256,
'The number of MPI processes to use for IOR.')
flags.DEFINE_string(
'ior_script', 'default_ior_script',
'The IOR script to run. See '
'https://github.com/hpc/ior/blob/master/doc/sphinx/userDoc/skripts.rst '
'for more info.')
flags.DEFINE_integer(
'mdtest_num_procs', 32,
'The number of MPI processes to use for mdtest.')
flags.DEFINE_list(
'mdtest_args', ['-n 1000 -u'],
'Command line arguments to be passed to mdtest. '
'Each set of args in the list will be run separately.')
flags.DEFINE_boolean(
'mdtest_drop_caches', True,
'Whether to drop caches between the create/stat/delete phases. '
'If this is set, mdtest will be run 3 times with the -C, -T, and -r '
'options and the client page caches will be dropped between runs. '
'When False, a Full Sweep (Create, Stat, Delete) is run.')
BENCHMARK_NAME = 'ior'
BENCHMARK_CONFIG = """
ior:
description: Runs IOR and mdtest benchmarks.
flags:
data_disk_type: nfs
data_disk_size: 2048
vm_groups:
default:
vm_spec: *default_dual_core
disk_spec: *default_500_gb
vm_count: null
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Install IOR on the vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
master_vm = vms[0]
vm_util.RunThreaded(lambda vm: vm.Install('ior'), benchmark_spec.vms)
hpc_util.CreateMachineFile(vms)
master_vm.AuthenticateVm()
def Run(benchmark_spec):
"""Run the IOR benchmark on the vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
master_vm = benchmark_spec.vms[0]
results = []
# Run IOR benchmark.
if FLAGS.ior_num_procs and FLAGS.ior_script:
remote_script_path = posixpath.join(master_vm.scratch_disks[0].mount_point,
FLAGS.ior_script)
master_vm.PushDataFile(
FLAGS.ior_script,
remote_script_path,
# SCP directly to SMB returns an error, so first copy to disk.
should_double_copy=(FLAGS.data_disk_type == disk.SMB))
results += ior.RunIOR(master_vm, FLAGS.ior_num_procs, remote_script_path)
# Run mdtest benchmark.
phase_args = ('-C', '-T', '-r') if FLAGS.mdtest_drop_caches else ('-C -T -r',)
mdtest_args = (' '.join(args) for args in
itertools.product(FLAGS.mdtest_args, phase_args))
for args in mdtest_args:
results += ior.RunMdtest(master_vm, FLAGS.mdtest_num_procs, args)
if FLAGS.mdtest_drop_caches:
vm_util.RunThreaded(lambda vm: vm.DropCaches(), benchmark_spec.vms)
return results
def Cleanup(unused_benchmark_spec):
"""Cleanup the IOR benchmark.
Args:
unused_benchmark_spec: The benchmark specification. Contains all data that
is required to run the benchmark.
"""
pass
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import time
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
import six
_RESOURCE_REGISTRY = {}
def GetResourceClass(base_class, **kwargs):
"""Returns the subclass with the corresponding attributes.
Args:
base_class: The base class of the resource to return
(e.g. BaseVirtualMachine).
**kwargs: Every attribute/value of the subclass's REQUIRED_ATTRS that were
used to register the subclass.
Raises:
Exception: If no class could be found with matching attributes.
"""
key = [base_class.__name__]
key += sorted(kwargs.items())
if tuple(key) not in _RESOURCE_REGISTRY:
raise errors.Resource.SubclassNotFoundError(
'No %s subclass defined with the attributes: %s' %
(base_class.__name__, kwargs))
return _RESOURCE_REGISTRY.get(tuple(key))
class AutoRegisterResourceMeta(abc.ABCMeta):
"""Metaclass which allows resources to automatically be registered."""
def __init__(cls, name, bases, dct):
if (all(hasattr(cls, attr) for attr in cls.REQUIRED_ATTRS) and
cls.RESOURCE_TYPE):
unset_attrs = [
attr for attr in cls.REQUIRED_ATTRS if getattr(cls, attr) is None]
if unset_attrs:
raise Exception(
'Subclasses of %s must have the following attrs set: %s. For %s '
'the following attrs were not set: %s.' %
(cls.RESOURCE_TYPE, cls.REQUIRED_ATTRS, cls.__name__, unset_attrs))
key = [cls.RESOURCE_TYPE]
key += sorted([(attr, getattr(cls, attr)) for attr in cls.REQUIRED_ATTRS])
_RESOURCE_REGISTRY[tuple(key)] = cls
super(AutoRegisterResourceMeta, cls).__init__(name, bases, dct)
class BaseResource(six.with_metaclass(AutoRegisterResourceMeta, object)):
"""An object representing a cloud resource.
Attributes:
created: True if the resource has been created.
pkb_managed: Whether the resource is managed (created and deleted) by PKB.
"""
# The name of the base class (e.g. BaseVirtualMachine) that will be extended
# with auto-registered subclasses.
RESOURCE_TYPE = None
# A list of attributes that are used to register Resource subclasses
# (e.g. CLOUD).
REQUIRED_ATTRS = ['CLOUD']
# Timeout in seconds for resource to be ready.
READY_TIMEOUT = None
# Time between retries.
POLL_INTERVAL = 5
def __init__(self, user_managed=False):
super(BaseResource, self).__init__()
self.created = user_managed
self.deleted = user_managed
self.user_managed = user_managed
# Creation and deletion time information
# that we may make use of later.
self.create_start_time = None
self.delete_start_time = None
self.create_end_time = None
self.delete_end_time = None
self.resource_ready_time = None
self.metadata = dict()
def GetResourceMetadata(self):
"""Returns a dictionary of metadata about the resource."""
return self.metadata.copy()
@abc.abstractmethod
def _Create(self):
"""Creates the underlying resource."""
raise NotImplementedError()
@abc.abstractmethod
def _Delete(self):
"""Deletes the underlying resource.
Implementations of this method should be idempotent since it may
be called multiple times, even if the resource has already been
deleted.
"""
raise NotImplementedError()
def _Exists(self):
"""Returns true if the underlying resource exists.
Supplying this method is optional. If it is not implemented then the
default is to assume success when _Create and _Delete do not raise
exceptions.
"""
raise NotImplementedError()
def _IsReady(self):
"""Return true if the underlying resource is ready.
Supplying this method is optional. Use it when a resource can exist
without being ready. If the subclass does not implement
it then it just returns true.
Returns:
True if the resource was ready in time, False if the wait timed out.
"""
return True
def _IsDeleting(self):
"""Return true if the underlying resource is getting deleted.
Supplying this method is optional. Potentially use when the resource has an
asynchronous deletion operation to avoid rerunning the deletion command and
track the deletion time correctly. If the subclass does not implement it
then it just returns false.
Returns:
True if the resource was being deleted, False if the resource was in a non
deleting state.
"""
return False
def _PreDelete(self):
"""Method that will be called once before _DeleteResource() is called.
Supplying this method is optional. If it is supplied, it will be called
once, before attempting to delete the resource. It is intended to allow
data about the resource to be collected right before it is deleted.
"""
pass
def _PostCreate(self):
"""Method that will be called once after _CreateResource() is called.
Supplying this method is optional. If it is supplied, it will be called
once, after the resource is confirmed to exist. It is intended to allow
data about the resource to be collected or for the resource to be tagged.
"""
pass
def _CreateDependencies(self):
"""Method that will be called once before _CreateResource() is called.
Supplying this method is optional. It is intended to allow additional
flexibility in creating resource dependencies separately from _Create().
"""
pass
def _DeleteDependencies(self):
"""Method that will be called once after _DeleteResource() is called.
Supplying this method is optional. It is intended to allow additional
flexibility in deleting resource dependencies separately from _Delete().
"""
pass
@vm_util.Retry(retryable_exceptions=(errors.Resource.RetryableCreationError,))
def _CreateResource(self):
"""Reliably creates the underlying resource."""
if self.created:
return
# Overwrite create_start_time each time this is called,
# with the assumption that multple calls to Create() imply
# that the resource was not actually being created on the
# backend during previous failed attempts.
self.create_start_time = time.time()
self._Create()
try:
if not self._Exists():
raise errors.Resource.RetryableCreationError(
'Creation of %s failed.' % type(self).__name__)
except NotImplementedError:
pass
self.created = True
self.create_end_time = time.time()
@vm_util.Retry(retryable_exceptions=(errors.Resource.RetryableDeletionError,),
timeout=3600)
def _DeleteResource(self):
"""Reliably deletes the underlying resource."""
# Retryable method which allows waiting for deletion of the resource.
@vm_util.Retry(poll_interval=self.POLL_INTERVAL, fuzz=0, timeout=3600,
retryable_exceptions=(
errors.Resource.RetryableDeletionError,))
def WaitUntilDeleted():
if self._IsDeleting():
raise errors.Resource.RetryableDeletionError('Not yet deleted')
if self.deleted:
return
if not self.delete_start_time:
self.delete_start_time = time.time()
self._Delete()
WaitUntilDeleted()
try:
if self._Exists():
raise errors.Resource.RetryableDeletionError(
'Deletion of %s failed.' % type(self).__name__)
except NotImplementedError:
pass
def Create(self):
"""Creates a resource and its dependencies."""
@vm_util.Retry(poll_interval=self.POLL_INTERVAL, fuzz=0,
timeout=self.READY_TIMEOUT,
retryable_exceptions=(
errors.Resource.RetryableCreationError,))
def WaitUntilReady():
if not self._IsReady():
raise errors.Resource.RetryableCreationError('Not yet ready')
if self.user_managed:
return
self._CreateDependencies()
self._CreateResource()
WaitUntilReady()
if not self.resource_ready_time:
self.resource_ready_time = time.time()
self._PostCreate()
def Delete(self):
"""Deletes a resource and its dependencies."""
if self.user_managed:
return
self._PreDelete()
self._DeleteResource()
self.deleted = True
self.delete_end_time = time.time()
self._DeleteDependencies()
|
from ipaddress import ip_address
import logging
from aiohttp.hdrs import X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO
from aiohttp.web import HTTPBadRequest, middleware
from homeassistant.core import callback
_LOGGER = logging.getLogger(__name__)
# mypy: allow-untyped-defs
@callback
def async_setup_forwarded(app, trusted_proxies):
"""Create forwarded middleware for the app.
Process IP addresses, proto and host information in the forwarded for headers.
`X-Forwarded-For: <client>, <proxy1>, <proxy2>`
e.g., `X-Forwarded-For: 203.0.113.195, 70.41.3.18, 150.172.238.178`
We go through the list from the right side, and skip all entries that are in our
trusted proxies list. The first non-trusted IP is used as the client IP. If all
items in the X-Forwarded-For are trusted, including the most left item (client),
the most left item is used. In the latter case, the client connection originated
from an IP that is also listed as a trusted proxy IP or network.
`X-Forwarded-Proto: <client>, <proxy1>, <proxy2>`
e.g., `X-Forwarded-Proto: https, http, http`
OR `X-Forwarded-Proto: https` (one entry, even with multiple proxies)
The X-Forwarded-Proto is determined based on the corresponding entry of the
X-Forwarded-For header that is used/chosen as the client IP. However,
some proxies, for example, Kubernetes NGINX ingress, only retain one element
in the X-Forwarded-Proto header. In that case, we'll just use what we have.
`X-Forwarded-Host: <host>`
e.g., `X-Forwarded-Host: example.com`
If the previous headers are processed successfully, and the X-Forwarded-Host is
present, it will be used.
Additionally:
- If no X-Forwarded-For header is found, the processing of all headers is skipped.
- Log a warning when untrusted connected peer provides X-Forwarded-For headers.
- If multiple instances of X-Forwarded-For, X-Forwarded-Proto or
X-Forwarded-Host are found, an HTTP 400 status code is thrown.
- If malformed or invalid (IP) data in X-Forwarded-For header is found,
an HTTP 400 status code is thrown.
- The connected client peer on the socket of the incoming connection,
must be trusted for any processing to take place.
- If the number of elements in X-Forwarded-Proto does not equal 1 or
is equal to the number of elements in X-Forwarded-For, an HTTP 400
status code is thrown.
- If an empty X-Forwarded-Host is provided, an HTTP 400 status code is thrown.
- If an empty X-Forwarded-Proto is provided, or an empty element in the list,
an HTTP 400 status code is thrown.
"""
@middleware
async def forwarded_middleware(request, handler):
"""Process forwarded data by a reverse proxy."""
overrides = {}
# Handle X-Forwarded-For
forwarded_for_headers = request.headers.getall(X_FORWARDED_FOR, [])
if not forwarded_for_headers:
# No forwarding headers, continue as normal
return await handler(request)
# Ensure the IP of the connected peer is trusted
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
if not any(connected_ip in trusted_proxy for trusted_proxy in trusted_proxies):
_LOGGER.warning(
"Received X-Forwarded-For header from untrusted proxy %s, headers not processed",
connected_ip,
)
# Not trusted, continue as normal
return await handler(request)
# Multiple X-Forwarded-For headers
if len(forwarded_for_headers) > 1:
_LOGGER.error(
"Too many headers for X-Forwarded-For: %s", forwarded_for_headers
)
raise HTTPBadRequest
# Process X-Forwarded-For from the right side (by reversing the list)
forwarded_for_split = list(reversed(forwarded_for_headers[0].split(",")))
try:
forwarded_for = [ip_address(addr.strip()) for addr in forwarded_for_split]
except ValueError as err:
_LOGGER.error(
"Invalid IP address in X-Forwarded-For: %s", forwarded_for_headers[0]
)
raise HTTPBadRequest from err
# Find the last trusted index in the X-Forwarded-For list
forwarded_for_index = 0
for forwarded_ip in forwarded_for:
if any(forwarded_ip in trusted_proxy for trusted_proxy in trusted_proxies):
forwarded_for_index += 1
continue
overrides["remote"] = str(forwarded_ip)
break
else:
# If all the IP addresses are from trusted networks, take the left-most.
forwarded_for_index = -1
overrides["remote"] = str(forwarded_for[-1])
# Handle X-Forwarded-Proto
forwarded_proto_headers = request.headers.getall(X_FORWARDED_PROTO, [])
if forwarded_proto_headers:
if len(forwarded_proto_headers) > 1:
_LOGGER.error(
"Too many headers for X-Forward-Proto: %s", forwarded_proto_headers
)
raise HTTPBadRequest
forwarded_proto_split = list(
reversed(forwarded_proto_headers[0].split(","))
)
forwarded_proto = [proto.strip() for proto in forwarded_proto_split]
# Catch empty values
if "" in forwarded_proto:
_LOGGER.error(
"Empty item received in X-Forward-Proto header: %s",
forwarded_proto_headers[0],
)
raise HTTPBadRequest
# The X-Forwarded-Proto contains either one element, or the equals number
# of elements as X-Forwarded-For
if len(forwarded_proto) not in (1, len(forwarded_for)):
_LOGGER.error(
"Incorrect number of elements in X-Forward-Proto. Expected 1 or %d, got %d: %s",
len(forwarded_for),
len(forwarded_proto),
forwarded_proto_headers[0],
)
raise HTTPBadRequest
# Ideally this should take the scheme corresponding to the entry
# in X-Forwarded-For that was chosen, but some proxies only retain
# one element. In that case, use what we have.
overrides["scheme"] = forwarded_proto[-1]
if len(forwarded_proto) != 1:
overrides["scheme"] = forwarded_proto[forwarded_for_index]
# Handle X-Forwarded-Host
forwarded_host_headers = request.headers.getall(X_FORWARDED_HOST, [])
if forwarded_host_headers:
# Multiple X-Forwarded-Host headers
if len(forwarded_host_headers) > 1:
_LOGGER.error(
"Too many headers for X-Forwarded-Host: %s", forwarded_host_headers
)
raise HTTPBadRequest
forwarded_host = forwarded_host_headers[0].strip()
if not forwarded_host:
_LOGGER.error("Empty value received in X-Forward-Host header")
raise HTTPBadRequest
overrides["host"] = forwarded_host
# Done, create a new request based on gathered data.
request = request.clone(**overrides)
return await handler(request)
app.middlewares.append(forwarded_middleware)
|
import pytest
from plumbum import PuttyMachine, SshMachine
from plumbum._testtools import xfail_on_pypy
@pytest.fixture(params=['default', '322'])
def ssh_port(request):
return request.param
class TestPuttyMachine:
@xfail_on_pypy
def test_putty_command(self, mocker, ssh_port):
local = mocker.patch('plumbum.machines.ssh_machine.local')
init = mocker.spy(SshMachine, '__init__')
mocker.patch('plumbum.machines.ssh_machine.BaseRemoteMachine')
host = mocker.MagicMock()
user = local.env.user
port = keyfile = None
ssh_command = local["plink"]
scp_command = local["pscp"]
ssh_opts = ["-ssh"]
if ssh_port == 'default':
putty_port = None
scp_opts = ()
else:
putty_port = int(ssh_port)
ssh_opts.extend(['-P', ssh_port])
scp_opts = ['-P', ssh_port]
encoding = mocker.MagicMock()
connect_timeout = 20
new_session = True
PuttyMachine(
host,
port=putty_port,
connect_timeout=connect_timeout,
new_session=new_session,
encoding=encoding,
)
init.assert_called_with(
mocker.ANY,
host,
user,
port,
keyfile=keyfile,
ssh_command=ssh_command,
scp_command=scp_command,
ssh_opts=ssh_opts,
scp_opts=scp_opts,
encoding=encoding,
connect_timeout=connect_timeout,
new_session=new_session,
)
def test_putty_str(self, mocker):
local = mocker.patch('plumbum.machines.ssh_machine.local')
mocker.patch('plumbum.machines.ssh_machine.BaseRemoteMachine')
host = mocker.MagicMock()
user = local.env.user
machine = PuttyMachine(host)
assert str(machine) == 'putty-ssh://{0}@{1}'.format(user, host)
|
from zope.index.text.lexicon import Lexicon
from zope.index.text.textindex import TextIndex
from zope.index.text.cosineindex import CosineIndex
from zope.index.text.setops import mass_weightedUnion
from BTrees.Length import Length
import math
import numpy
import logging
logger = logging.getLogger(__name__)
class CanopyIndex(TextIndex): # pragma: no cover
def __init__(self):
lexicon = CanopyLexicon()
self.index = CosineIndex(lexicon)
self.lexicon = lexicon
def initSearch(self):
N = len(self.index._docweight)
threshold = int(max(1000, N * 0.05))
self._wids_dict = {}
bucket = self.index.family.IF.Bucket
for wid, docs in self.index._wordinfo.items():
if len(docs) > threshold:
word = self.lexicon._words[wid]
logger.info('Removing stop word {}'.format(word))
del self.index._wordinfo[wid]
continue
if isinstance(docs, dict):
docs = bucket(docs)
idf = numpy.log1p(N / len(docs))
self.index._wordinfo[wid] = docs
term = self.lexicon._words[wid]
self._wids_dict[term] = (wid, idf)
def apply(self, query_list, threshold, start=0, count=None):
_wids_dict = self._wids_dict
_wordinfo = self.index._wordinfo
l_pow = float.__pow__
L = []
qw = 0
for term in query_list:
wid, weight = _wids_dict.get(term, (None, None))
if wid is None:
continue
docs = _wordinfo[wid]
L.append((docs, weight))
qw += l_pow(weight, 2)
results = mass_weightedUnion(L)
qw = math.sqrt(qw)
results = results.byValue(qw * threshold)
return results
class CanopyLexicon(Lexicon): # pragma: no cover
def sourceToWordIds(self, last):
if last is None:
last = []
if not isinstance(self.wordCount, Length):
self.wordCount = Length(self.wordCount())
self.wordCount._p_deactivate()
return list(map(self._getWordIdCreate, last))
|
import unittest
from perfkitbenchmarker import beam_pipeline_options
class BeamArgsOptionsTestCase(unittest.TestCase):
def testNoFlagsPassed(self):
options_list = beam_pipeline_options.GenerateAllPipelineOptions(
None, None, [], [])
self.assertListEqual(options_list, [])
def testAllFlagsPassed(self):
options_list = beam_pipeline_options.GenerateAllPipelineOptions(
"--itargone=anarg,--itargtwo=anotherarg",
"[\"--project=testProj\","
"\"--gcpTempLocation=gs://test-bucket/staging\"]",
[{"postgresUsername": "postgres"}, {"postgresPassword": "mypass"}],
[{"name": "aTestVal", "type": "TestValue", "value": "this_is_a_test"},
{"name": "testier", "type": "TestValue", "value": "another_test"}]
)
self.assertListEqual(options_list,
["\"--itargone=anarg\"",
"\"--itargtwo=anotherarg\"",
"\"--project=testProj\"",
"\"--gcpTempLocation=gs://test-bucket/staging\"",
"\"--aTestVal=this_is_a_test\"",
"\"--testier=another_test\"",
"\"--postgresUsername=postgres\"",
"\"--postgresPassword=mypass\""])
def testItOptionsWithSpaces(self):
options_list = beam_pipeline_options.GenerateAllPipelineOptions(
None,
"[\"--project=testProj\", "
"\"--gcpTempLocation=gs://test-bucket/staging\"]",
[],
[])
self.assertListEqual(options_list,
["\"--project=testProj\"",
"\"--gcpTempLocation=gs://test-bucket/staging\""])
def testDynamicPipelineOpionsWithFormat(self):
dynamic_options = [
{
"name": "test_value_A",
"type": "TestValue",
"value": "a_value",
"format": "other representation of {{TestValue}}",
},
{
"name": "test_value_B",
"type": "TestValue",
"value": "b_value"
}
]
self.assertListEqual(
beam_pipeline_options.EvaluateDynamicPipelineOptions(dynamic_options),
[
("test_value_A", "other representation of a_value"),
("test_value_B", "b_value"),
]
)
def dynamicPipelineOptions(self):
beam_pipeline_options.EvaluateDynamicPipelineOptions()
if __name__ == '__main__':
unittest.main()
|
from weblate.lang.models import Language, Plural
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
help = "Move all content from one language to other"
def add_arguments(self, parser):
parser.add_argument("source", help="Source language code")
parser.add_argument("target", help="Target language code")
def handle(self, *args, **options):
source = Language.objects.get(code=options["source"])
target = Language.objects.get(code=options["target"])
for translation in source.translation_set.iterator():
other = translation.component.translation_set.filter(language=target)
if other.exists():
self.stderr.write(f"Already exists: {translation}")
continue
translation.language = target
translation.save()
source.announcement_set.update(language=target)
for profile in source.profile_set.iterator():
profile.languages.remove(source)
profile.languages.add(target)
for profile in source.secondary_profile_set.iterator():
profile.secondary_languages.remove(source)
profile.secondary_languages.add(target)
source.component_set.update(source_language=target)
for group in source.group_set.iterator():
group.languages.remove(source)
group.languages.add(target)
source.term_set.update(language=target)
for plural in source.plural_set.iterator():
try:
new_plural = target.plural_set.get(formula=plural.formula)
plural.translation_set.update(plural=new_plural)
except Plural.DoesNotExist:
plural.language = target
plural.save()
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_HUMIDITY,
ATTR_MODE,
DOMAIN,
SERVICE_SET_HUMIDITY,
SERVICE_SET_MODE,
)
_LOGGER = logging.getLogger(__name__)
async def _async_reproduce_states(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
async def call_service(service: str, keys: Iterable, data=None):
"""Call service with set of attributes given."""
data = data or {}
data["entity_id"] = state.entity_id
for key in keys:
if key in state.attributes:
data[key] = state.attributes[key]
await hass.services.async_call(
DOMAIN, service, data, blocking=True, context=context
)
if state.state == STATE_OFF:
# Ensure the device is off if it needs to be and exit
if cur_state.state != STATE_OFF:
await call_service(SERVICE_TURN_OFF, [])
return
if state.state != STATE_ON:
# we can't know how to handle this
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# First of all, turn on if needed, because the device might not
# be able to set mode and humidity while being off
if cur_state.state != STATE_ON:
await call_service(SERVICE_TURN_ON, [])
# refetch the state as turning on might allow us to see some more values
cur_state = hass.states.get(state.entity_id)
# Then set the mode before target humidity, because switching modes
# may invalidate target humidity
if ATTR_MODE in state.attributes and state.attributes[
ATTR_MODE
] != cur_state.attributes.get(ATTR_MODE):
await call_service(SERVICE_SET_MODE, [ATTR_MODE])
# Next, restore target humidity for the current mode
if ATTR_HUMIDITY in state.attributes and state.attributes[
ATTR_HUMIDITY
] != cur_state.attributes.get(ATTR_HUMIDITY):
await call_service(SERVICE_SET_HUMIDITY, [ATTR_HUMIDITY])
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
await asyncio.gather(
*(
_async_reproduce_states(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
import argparse
import re
import chainer
from chainer import Link
import chainer.links.caffe.caffe_function as caffe
"""
Please download a weight from here.
http://www.robots.ox.ac.uk/%7Evgg/software/very_deep/caffe/VGG_ILSVRC_16_layers.caffemodel
"""
def rename(name):
m = re.match(r'conv(\d+)_(\d+)$', name)
if m:
i, j = map(int, m.groups())
return 'conv{:d}_{:d}/conv'.format(i, j)
return name
class VGGCaffeFunction(caffe.CaffeFunction):
def __init__(self, model_path):
print('loading weights from {:s} ... '.format(model_path))
super(VGGCaffeFunction, self).__init__(model_path)
def __setattr__(self, name, value):
if self.within_init_scope and isinstance(value, Link):
new_name = rename(name)
if new_name == 'conv1_1/conv':
# BGR -> RGB
value.W.array[:, ::-1] = value.W.array
print('{:s} -> {:s} (BGR -> RGB)'.format(name, new_name))
else:
print('{:s} -> {:s}'.format(name, new_name))
else:
new_name = name
super(VGGCaffeFunction, self).__setattr__(new_name, value)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('caffemodel')
parser.add_argument('output')
args = parser.parse_args()
model = VGGCaffeFunction(args.caffemodel)
chainer.serializers.save_npz(args.output, model)
if __name__ == '__main__':
main()
|
from homeassistant.components.rflink import EVENT_BUTTON_PRESSED
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import CoreState, State, callback
from tests.common import mock_restore_cache
from tests.components.rflink.test_init import mock_rflink
DOMAIN = "switch"
CONFIG = {
"rflink": {
"port": "/dev/ttyABC0",
"ignore_devices": ["ignore_wildcard_*", "ignore_sensor"],
},
DOMAIN: {
"platform": "rflink",
"devices": {"protocol_0_0": {"name": "test", "aliases": ["test_alias_0_0"]}},
},
}
async def test_default_setup(hass, monkeypatch):
"""Test all basic functionality of the rflink switch component."""
# setup mocking rflink module
event_callback, create, protocol, _ = await mock_rflink(
hass, CONFIG, DOMAIN, monkeypatch
)
# make sure arguments are passed
assert create.call_args_list[0][1]["ignore"]
# test default state of switch loaded from config
switch_initial = hass.states.get("switch.test")
assert switch_initial.state == "off"
assert switch_initial.attributes["assumed_state"]
# switch should follow state of the hardware device by interpreting
# incoming events for its name and aliases
# mock incoming command event for this device
event_callback({"id": "protocol_0_0", "command": "on"})
await hass.async_block_till_done()
switch_after_first_command = hass.states.get("switch.test")
assert switch_after_first_command.state == "on"
# also after receiving first command state not longer has to be assumed
assert not switch_after_first_command.attributes.get("assumed_state")
# mock incoming command event for this device
event_callback({"id": "protocol_0_0", "command": "off"})
await hass.async_block_till_done()
assert hass.states.get("switch.test").state == "off"
# test following aliases
# mock incoming command event for this device alias
event_callback({"id": "test_alias_0_0", "command": "on"})
await hass.async_block_till_done()
assert hass.states.get("switch.test").state == "on"
# The switch component does not support adding new devices for incoming
# events because every new unknown device is added as a light by default.
# test changing state from HA propagates to Rflink
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: f"{DOMAIN}.test"}
)
)
await hass.async_block_till_done()
assert hass.states.get(f"{DOMAIN}.test").state == "off"
assert protocol.send_command_ack.call_args_list[0][0][0] == "protocol_0_0"
assert protocol.send_command_ack.call_args_list[0][0][1] == "off"
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: f"{DOMAIN}.test"}
)
)
await hass.async_block_till_done()
assert hass.states.get(f"{DOMAIN}.test").state == "on"
assert protocol.send_command_ack.call_args_list[1][0][1] == "on"
async def test_group_alias(hass, monkeypatch):
"""Group aliases should only respond to group commands (allon/alloff)."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "group_aliases": ["test_group_0_0"]}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
assert hass.states.get(f"{DOMAIN}.test").state == "off"
# test sending group command to group alias
event_callback({"id": "test_group_0_0", "command": "allon"})
await hass.async_block_till_done()
assert hass.states.get(f"{DOMAIN}.test").state == "on"
# test sending group command to group alias
event_callback({"id": "test_group_0_0", "command": "off"})
await hass.async_block_till_done()
assert hass.states.get(f"{DOMAIN}.test").state == "on"
async def test_nogroup_alias(hass, monkeypatch):
"""Non group aliases should not respond to group commands."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {
"name": "test",
"nogroup_aliases": ["test_nogroup_0_0"],
}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
assert hass.states.get(f"{DOMAIN}.test").state == "off"
# test sending group command to nogroup alias
event_callback({"id": "test_nogroup_0_0", "command": "allon"})
await hass.async_block_till_done()
# should not affect state
assert hass.states.get(f"{DOMAIN}.test").state == "off"
# test sending group command to nogroup alias
event_callback({"id": "test_nogroup_0_0", "command": "on"})
await hass.async_block_till_done()
# should affect state
assert hass.states.get(f"{DOMAIN}.test").state == "on"
async def test_nogroup_device_id(hass, monkeypatch):
"""Device id that do not respond to group commands (allon/alloff)."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {"test_nogroup_0_0": {"name": "test", "group": False}},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
assert hass.states.get(f"{DOMAIN}.test").state == "off"
# test sending group command to nogroup
event_callback({"id": "test_nogroup_0_0", "command": "allon"})
await hass.async_block_till_done()
# should not affect state
assert hass.states.get(f"{DOMAIN}.test").state == "off"
# test sending group command to nogroup
event_callback({"id": "test_nogroup_0_0", "command": "on"})
await hass.async_block_till_done()
# should affect state
assert hass.states.get(f"{DOMAIN}.test").state == "on"
async def test_device_defaults(hass, monkeypatch):
"""Event should fire if device_defaults config says so."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"device_defaults": {"fire_event": True},
"devices": {
"protocol_0_0": {"name": "test", "aliases": ["test_alias_0_0"]}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
calls = []
@callback
def listener(event):
calls.append(event)
hass.bus.async_listen_once(EVENT_BUTTON_PRESSED, listener)
# test event for new unconfigured sensor
event_callback({"id": "protocol_0_0", "command": "off"})
await hass.async_block_till_done()
await hass.async_block_till_done()
assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"}
async def test_not_firing_default(hass, monkeypatch):
"""By default no bus events should be fired."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "aliases": ["test_alias_0_0"]}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
calls = []
@callback
def listener(event):
calls.append(event)
hass.bus.async_listen_once(EVENT_BUTTON_PRESSED, listener)
# test event for new unconfigured sensor
event_callback({"id": "protocol_0_0", "command": "off"})
await hass.async_block_till_done()
assert not calls, "an event has been fired"
async def test_restore_state(hass, monkeypatch):
"""Ensure states are restored on startup."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"test": {"name": "s1", "aliases": ["test_alias_0_0"]},
"switch_test": {"name": "s2"},
"switch_s3": {"name": "s3"},
},
},
}
mock_restore_cache(
hass, (State(f"{DOMAIN}.s1", STATE_ON), State(f"{DOMAIN}.s2", STATE_OFF))
)
hass.state = CoreState.starting
# setup mocking rflink module
_, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
state = hass.states.get(f"{DOMAIN}.s1")
assert state
assert state.state == STATE_ON
state = hass.states.get(f"{DOMAIN}.s2")
assert state
assert state.state == STATE_OFF
# not cached switch must default values
state = hass.states.get(f"{DOMAIN}.s3")
assert state
assert state.state == STATE_OFF
assert state.attributes["assumed_state"]
|
import asyncio
import logging
from hatasmota.const import (
CONF_MAC,
CONF_MANUFACTURER,
CONF_MODEL,
CONF_NAME,
CONF_SW_VERSION,
)
from hatasmota.discovery import clear_discovery_topic
from hatasmota.mqtt import TasmotaMQTTClient
import voluptuous as vol
from homeassistant.components import mqtt, websocket_api
from homeassistant.components.mqtt.subscription import (
async_subscribe_topics,
async_unsubscribe_topics,
)
from homeassistant.core import callback
from homeassistant.helpers.device_registry import (
CONNECTION_NETWORK_MAC,
EVENT_DEVICE_REGISTRY_UPDATED,
async_entries_for_config_entry,
)
from homeassistant.helpers.typing import HomeAssistantType
from . import device_automation, discovery
from .const import (
CONF_DISCOVERY_PREFIX,
DATA_REMOVE_DISCOVER_COMPONENT,
DATA_UNSUB,
DOMAIN,
PLATFORMS,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistantType, config: dict):
"""Set up the Tasmota component."""
return True
async def async_setup_entry(hass, entry):
"""Set up Tasmota from a config entry."""
websocket_api.async_register_command(hass, websocket_remove_device)
hass.data[DATA_UNSUB] = []
def _publish(*args, **kwds):
mqtt.async_publish(hass, *args, **kwds)
async def _subscribe_topics(sub_state, topics):
# Optionally mark message handlers as callback
for topic in topics.values():
if "msg_callback" in topic and "event_loop_safe" in topic:
topic["msg_callback"] = callback(topic["msg_callback"])
return await async_subscribe_topics(hass, sub_state, topics)
async def _unsubscribe_topics(sub_state):
return await async_unsubscribe_topics(hass, sub_state)
tasmota_mqtt = TasmotaMQTTClient(_publish, _subscribe_topics, _unsubscribe_topics)
device_registry = await hass.helpers.device_registry.async_get_registry()
def async_discover_device(config, mac):
"""Discover and add a Tasmota device."""
async_setup_device(hass, mac, config, entry, tasmota_mqtt, device_registry)
async def async_device_removed(event):
"""Handle the removal of a device."""
device_registry = await hass.helpers.device_registry.async_get_registry()
if event.data["action"] != "remove":
return
device = device_registry.deleted_devices[event.data["device_id"]]
if entry.entry_id not in device.config_entries:
return
macs = [c[1] for c in device.connections if c[0] == CONNECTION_NETWORK_MAC]
for mac in macs:
clear_discovery_topic(mac, entry.data[CONF_DISCOVERY_PREFIX], tasmota_mqtt)
hass.data[DATA_UNSUB].append(
hass.bus.async_listen(EVENT_DEVICE_REGISTRY_UPDATED, async_device_removed)
)
async def start_platforms():
await device_automation.async_setup_entry(hass, entry)
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(entry, component)
for component in PLATFORMS
]
)
discovery_prefix = entry.data[CONF_DISCOVERY_PREFIX]
await discovery.async_start(
hass, discovery_prefix, entry, tasmota_mqtt, async_discover_device
)
hass.async_create_task(start_platforms())
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
# cleanup platforms
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
# disable discovery
await discovery.async_stop(hass)
# cleanup subscriptions
for unsub in hass.data[DATA_UNSUB]:
unsub()
hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format("device_automation"))()
for component in PLATFORMS:
hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format(component))()
# deattach device triggers
device_registry = await hass.helpers.device_registry.async_get_registry()
devices = async_entries_for_config_entry(device_registry, entry.entry_id)
for device in devices:
await device_automation.async_remove_automations(hass, device.id)
return True
def _remove_device(hass, config_entry, mac, tasmota_mqtt, device_registry):
"""Remove device from device registry."""
device = device_registry.async_get_device(set(), {(CONNECTION_NETWORK_MAC, mac)})
if device is None:
return
_LOGGER.debug("Removing tasmota device %s", mac)
device_registry.async_remove_device(device.id)
clear_discovery_topic(mac, config_entry.data[CONF_DISCOVERY_PREFIX], tasmota_mqtt)
def _update_device(hass, config_entry, config, device_registry):
"""Add or update device registry."""
config_entry_id = config_entry.entry_id
device_info = {
"connections": {(CONNECTION_NETWORK_MAC, config[CONF_MAC])},
"manufacturer": config[CONF_MANUFACTURER],
"model": config[CONF_MODEL],
"name": config[CONF_NAME],
"sw_version": config[CONF_SW_VERSION],
"config_entry_id": config_entry_id,
}
_LOGGER.debug("Adding or updating tasmota device %s", config[CONF_MAC])
device_registry.async_get_or_create(**device_info)
def async_setup_device(hass, mac, config, config_entry, tasmota_mqtt, device_registry):
"""Set up the Tasmota device."""
if not config:
_remove_device(hass, config_entry, mac, tasmota_mqtt, device_registry)
else:
_update_device(hass, config_entry, config, device_registry)
@websocket_api.websocket_command(
{vol.Required("type"): "tasmota/device/remove", vol.Required("device_id"): str}
)
@websocket_api.async_response
async def websocket_remove_device(hass, connection, msg):
"""Delete device."""
device_id = msg["device_id"]
dev_registry = await hass.helpers.device_registry.async_get_registry()
device = dev_registry.async_get(device_id)
if not device:
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Device not found"
)
return
for config_entry in device.config_entries:
config_entry = hass.config_entries.async_get_entry(config_entry)
# Only delete the device if it belongs to a Tasmota device entry
if config_entry.domain == DOMAIN:
dev_registry.async_remove_device(device_id)
connection.send_message(websocket_api.result_message(msg["id"]))
return
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Non Tasmota device"
)
|
import asyncio
import copy
import hmac
import json
import logging
from aiohttp.hdrs import AUTHORIZATION
from aiohttp.web import Request, Response
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA
from homeassistant.components.http import HomeAssistantView
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_ACCESS_TOKEN,
CONF_BINARY_SENSORS,
CONF_DEVICES,
CONF_HOST,
CONF_ID,
CONF_NAME,
CONF_PIN,
CONF_PORT,
CONF_SENSORS,
CONF_SWITCHES,
CONF_TYPE,
CONF_ZONE,
HTTP_BAD_REQUEST,
HTTP_NOT_FOUND,
HTTP_UNAUTHORIZED,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from .config_flow import ( # Loading the config flow file will register the flow
CONF_DEFAULT_OPTIONS,
CONF_IO,
CONF_IO_BIN,
CONF_IO_DIG,
CONF_IO_SWI,
OPTIONS_SCHEMA,
)
from .const import (
CONF_ACTIVATION,
CONF_API_HOST,
CONF_BLINK,
CONF_DISCOVERY,
CONF_INVERSE,
CONF_MOMENTARY,
CONF_PAUSE,
CONF_POLL_INTERVAL,
CONF_REPEAT,
DOMAIN,
PIN_TO_ZONE,
STATE_HIGH,
STATE_LOW,
UNDO_UPDATE_LISTENER,
UPDATE_ENDPOINT,
ZONE_TO_PIN,
ZONES,
)
from .handlers import HANDLERS
from .panel import AlarmPanel
_LOGGER = logging.getLogger(__name__)
def ensure_pin(value):
"""Check if valid pin and coerce to string."""
if value is None:
raise vol.Invalid("pin value is None")
if PIN_TO_ZONE.get(str(value)) is None:
raise vol.Invalid("pin not valid")
return str(value)
def ensure_zone(value):
"""Check if valid zone and coerce to string."""
if value is None:
raise vol.Invalid("zone value is None")
if str(value) not in ZONES is None:
raise vol.Invalid("zone not valid")
return str(value)
def import_device_validator(config):
"""Validate zones and reformat for import."""
config = copy.deepcopy(config)
io_cfgs = {}
# Replace pins with zones
for conf_platform, conf_io in (
(CONF_BINARY_SENSORS, CONF_IO_BIN),
(CONF_SENSORS, CONF_IO_DIG),
(CONF_SWITCHES, CONF_IO_SWI),
):
for zone in config.get(conf_platform, []):
if zone.get(CONF_PIN):
zone[CONF_ZONE] = PIN_TO_ZONE[zone[CONF_PIN]]
del zone[CONF_PIN]
io_cfgs[zone[CONF_ZONE]] = conf_io
# Migrate config_entry data into default_options structure
config[CONF_IO] = io_cfgs
config[CONF_DEFAULT_OPTIONS] = OPTIONS_SCHEMA(config)
# clean up fields migrated to options
config.pop(CONF_BINARY_SENSORS, None)
config.pop(CONF_SENSORS, None)
config.pop(CONF_SWITCHES, None)
config.pop(CONF_BLINK, None)
config.pop(CONF_DISCOVERY, None)
config.pop(CONF_API_HOST, None)
config.pop(CONF_IO, None)
return config
def import_validator(config):
"""Reformat for import."""
config = copy.deepcopy(config)
# push api_host into device configs
for device in config.get(CONF_DEVICES, []):
device[CONF_API_HOST] = config.get(CONF_API_HOST, "")
return config
# configuration.yaml schemas (legacy)
BINARY_SENSOR_SCHEMA_YAML = vol.All(
vol.Schema(
{
vol.Exclusive(CONF_ZONE, "s_io"): ensure_zone,
vol.Exclusive(CONF_PIN, "s_io"): ensure_pin,
vol.Required(CONF_TYPE): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_INVERSE, default=False): cv.boolean,
}
),
cv.has_at_least_one_key(CONF_PIN, CONF_ZONE),
)
SENSOR_SCHEMA_YAML = vol.All(
vol.Schema(
{
vol.Exclusive(CONF_ZONE, "s_io"): ensure_zone,
vol.Exclusive(CONF_PIN, "s_io"): ensure_pin,
vol.Required(CONF_TYPE): vol.All(vol.Lower, vol.In(["dht", "ds18b20"])),
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_POLL_INTERVAL, default=3): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
),
cv.has_at_least_one_key(CONF_PIN, CONF_ZONE),
)
SWITCH_SCHEMA_YAML = vol.All(
vol.Schema(
{
vol.Exclusive(CONF_ZONE, "s_io"): ensure_zone,
vol.Exclusive(CONF_PIN, "s_io"): ensure_pin,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ACTIVATION, default=STATE_HIGH): vol.All(
vol.Lower, vol.Any(STATE_HIGH, STATE_LOW)
),
vol.Optional(CONF_MOMENTARY): vol.All(vol.Coerce(int), vol.Range(min=10)),
vol.Optional(CONF_PAUSE): vol.All(vol.Coerce(int), vol.Range(min=10)),
vol.Optional(CONF_REPEAT): vol.All(vol.Coerce(int), vol.Range(min=-1)),
}
),
cv.has_at_least_one_key(CONF_PIN, CONF_ZONE),
)
DEVICE_SCHEMA_YAML = vol.All(
vol.Schema(
{
vol.Required(CONF_ID): cv.matches_regex("[0-9a-f]{12}"),
vol.Optional(CONF_BINARY_SENSORS): vol.All(
cv.ensure_list, [BINARY_SENSOR_SCHEMA_YAML]
),
vol.Optional(CONF_SENSORS): vol.All(cv.ensure_list, [SENSOR_SCHEMA_YAML]),
vol.Optional(CONF_SWITCHES): vol.All(cv.ensure_list, [SWITCH_SCHEMA_YAML]),
vol.Inclusive(CONF_HOST, "host_info"): cv.string,
vol.Inclusive(CONF_PORT, "host_info"): cv.port,
vol.Optional(CONF_BLINK, default=True): cv.boolean,
vol.Optional(CONF_API_HOST, default=""): vol.Any("", cv.url),
vol.Optional(CONF_DISCOVERY, default=True): cv.boolean,
}
),
import_device_validator,
)
# pylint: disable=no-value-for-parameter
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
import_validator,
vol.Schema(
{
vol.Required(CONF_ACCESS_TOKEN): cv.string,
vol.Optional(CONF_API_HOST): vol.Url(),
vol.Optional(CONF_DEVICES): vol.All(
cv.ensure_list, [DEVICE_SCHEMA_YAML]
),
}
),
)
},
extra=vol.ALLOW_EXTRA,
)
YAML_CONFIGS = "yaml_configs"
PLATFORMS = ["binary_sensor", "sensor", "switch"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Konnected platform."""
cfg = config.get(DOMAIN)
if cfg is None:
cfg = {}
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {
CONF_ACCESS_TOKEN: cfg.get(CONF_ACCESS_TOKEN),
CONF_API_HOST: cfg.get(CONF_API_HOST),
CONF_DEVICES: {},
}
hass.http.register_view(KonnectedView)
# Check if they have yaml configured devices
if CONF_DEVICES not in cfg:
return True
for device in cfg.get(CONF_DEVICES, []):
# Attempt to importing the cfg. Use
# hass.async_add_job to avoid a deadlock.
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=device
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up panel from a config entry."""
client = AlarmPanel(hass, entry)
# creates a panel data store in hass.data[DOMAIN][CONF_DEVICES]
await client.async_save_data()
# if the cfg entry was created we know we could connect to the panel at some point
# async_connect will handle retries until it establishes a connection
await client.async_connect()
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
# config entry specific data to enable unload
hass.data[DOMAIN][entry.entry_id] = {
UNDO_UPDATE_LISTENER: entry.add_update_listener(async_entry_updated)
}
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
if unload_ok:
hass.data[DOMAIN][CONF_DEVICES].pop(entry.data[CONF_ID])
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def async_entry_updated(hass: HomeAssistant, entry: ConfigEntry):
"""Reload the config entry when options change."""
await hass.config_entries.async_reload(entry.entry_id)
class KonnectedView(HomeAssistantView):
"""View creates an endpoint to receive push updates from the device."""
url = UPDATE_ENDPOINT
name = "api:konnected"
requires_auth = False # Uses access token from configuration
def __init__(self):
"""Initialize the view."""
@staticmethod
def binary_value(state, activation):
"""Return binary value for GPIO based on state and activation."""
if activation == STATE_HIGH:
return 1 if state == STATE_ON else 0
return 0 if state == STATE_ON else 1
async def update_sensor(self, request: Request, device_id) -> Response:
"""Process a put or post."""
hass = request.app["hass"]
data = hass.data[DOMAIN]
auth = request.headers.get(AUTHORIZATION)
tokens = []
if hass.data[DOMAIN].get(CONF_ACCESS_TOKEN):
tokens.extend([hass.data[DOMAIN][CONF_ACCESS_TOKEN]])
tokens.extend(
[
entry.data[CONF_ACCESS_TOKEN]
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.data.get(CONF_ACCESS_TOKEN)
]
)
if auth is None or not next(
(True for token in tokens if hmac.compare_digest(f"Bearer {token}", auth)),
False,
):
return self.json_message("unauthorized", status_code=HTTP_UNAUTHORIZED)
try: # Konnected 2.2.0 and above supports JSON payloads
payload = await request.json()
except json.decoder.JSONDecodeError:
_LOGGER.error(
"Your Konnected device software may be out of "
"date. Visit https://help.konnected.io for "
"updating instructions"
)
device = data[CONF_DEVICES].get(device_id)
if device is None:
return self.json_message(
"unregistered device", status_code=HTTP_BAD_REQUEST
)
panel = device.get("panel")
if panel is not None:
# connect if we haven't already
hass.async_create_task(panel.async_connect())
try:
zone_num = str(payload.get(CONF_ZONE) or PIN_TO_ZONE[payload[CONF_PIN]])
payload[CONF_ZONE] = zone_num
zone_data = device[CONF_BINARY_SENSORS].get(zone_num) or next(
(s for s in device[CONF_SENSORS] if s[CONF_ZONE] == zone_num), None
)
except KeyError:
zone_data = None
if zone_data is None:
return self.json_message(
"unregistered sensor/actuator", status_code=HTTP_BAD_REQUEST
)
zone_data["device_id"] = device_id
for attr in ["state", "temp", "humi", "addr"]:
value = payload.get(attr)
handler = HANDLERS.get(attr)
if value is not None and handler:
hass.async_create_task(handler(hass, zone_data, payload))
return self.json_message("ok")
async def get(self, request: Request, device_id) -> Response:
"""Return the current binary state of a switch."""
hass = request.app["hass"]
data = hass.data[DOMAIN]
device = data[CONF_DEVICES].get(device_id)
if not device:
return self.json_message(
f"Device {device_id} not configured", status_code=HTTP_NOT_FOUND
)
panel = device.get("panel")
if panel is not None:
# connect if we haven't already
hass.async_create_task(panel.async_connect())
# Our data model is based on zone ids but we convert from/to pin ids
# based on whether they are specified in the request
try:
zone_num = str(
request.query.get(CONF_ZONE) or PIN_TO_ZONE[request.query[CONF_PIN]]
)
zone = next(
switch
for switch in device[CONF_SWITCHES]
if switch[CONF_ZONE] == zone_num
)
except StopIteration:
zone = None
except KeyError:
zone = None
zone_num = None
if not zone:
target = request.query.get(
CONF_ZONE, request.query.get(CONF_PIN, "unknown")
)
return self.json_message(
f"Switch on zone or pin {target} not configured",
status_code=HTTP_NOT_FOUND,
)
resp = {}
if request.query.get(CONF_ZONE):
resp[CONF_ZONE] = zone_num
else:
resp[CONF_PIN] = ZONE_TO_PIN[zone_num]
# Make sure entity is setup
zone_entity_id = zone.get(ATTR_ENTITY_ID)
if zone_entity_id:
resp["state"] = self.binary_value(
hass.states.get(zone_entity_id).state, zone[CONF_ACTIVATION]
)
return self.json(resp)
_LOGGER.warning("Konnected entity not yet setup, returning default")
resp["state"] = self.binary_value(STATE_OFF, zone[CONF_ACTIVATION])
return self.json(resp)
async def put(self, request: Request, device_id) -> Response:
"""Receive a sensor update via PUT request and async set state."""
return await self.update_sensor(request, device_id)
async def post(self, request: Request, device_id) -> Response:
"""Receive a sensor update via POST request and async set state."""
return await self.update_sensor(request, device_id)
|
import socket
import diamond.collector
import os.path
import urlparse
import time
class OpenVPNCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OpenVPNCollector, self).get_default_config_help()
config_help.update({
'instances': 'List of instances to collect stats from',
'timeout': 'network timeout'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OpenVPNCollector, self).get_default_config()
config.update({
'path': 'openvpn',
'instances': 'file:///var/log/openvpn/status.log',
'timeout': '10',
})
return config
def parse_url(self, uri):
"""
Convert urlparse from a python 2.4 layout to a python 2.7 layout
"""
parsed = urlparse.urlparse(uri)
if 'scheme' not in parsed:
class Object(object):
pass
newparsed = Object()
newparsed.scheme = parsed[0]
newparsed.netloc = parsed[1]
newparsed.path = parsed[2]
newparsed.params = parsed[3]
newparsed.query = parsed[4]
newparsed.fragment = parsed[5]
newparsed.username = ''
newparsed.password = ''
newparsed.hostname = ''
newparsed.port = ''
parsed = newparsed
return parsed
def collect(self):
if isinstance(self.config['instances'], basestring):
instances = [self.config['instances']]
else:
instances = self.config['instances']
for uri in instances:
parsed = self.parse_url(uri)
collect = getattr(self, 'collect_%s' % (parsed.scheme,), None)
if collect:
collect(uri)
else:
self.log.error('OpenVPN no handler for %s', uri)
def collect_file(self, uri):
parsed = self.parse_url(uri)
filename = parsed.path
if '?' in filename:
filename, name = filename.split('?')
else:
name = os.path.splitext(os.path.basename(filename))[0]
if not os.access(filename, os.R_OK):
self.log.error('OpenVPN collect failed: unable to read "%s"',
filename)
return
else:
self.log.info('OpenVPN parsing "%s" file: %s', name, filename)
fd = open(filename, 'r')
lines = fd.readlines()
fd.close()
self.parse(name, lines)
def collect_tcp(self, uri):
parsed = self.parse_url(uri)
try:
host, port = parsed.netloc.split(':')
port = int(port)
except ValueError:
self.log.error('OpenVPN expected host:port in URI, got "%s"',
parsed.netloc)
return
if '?' in parsed.path:
name = parsed.path[1:]
else:
name = host.replace('.', '_')
self.log.info('OpenVPN parsing "%s" tcp: %s:%d', name, host, port)
try:
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.settimeout(int(self.config['timeout']))
server.connect((host, port))
fd = server.makefile('rb')
line = fd.readline()
if not line.startswith('>INFO:OpenVPN'):
self.log.debug('OpenVPN received: %s', line.rstrip())
self.log.error('OpenVPN protocol error')
server.close()
return
server.send('status\r\n')
lines = []
while True:
line = fd.readline()
lines.append(line)
if line.strip() == 'END':
break
# Hand over data to the parser
self.parse(name, lines)
# Bye
server.close()
except socket.error as e:
self.log.error('OpenVPN management connection error: %s', e)
return
def parse(self, name, lines):
for line in lines:
self.log.debug('OpenVPN: %s', line.rstrip())
time.sleep(0.5)
number_connected_clients = 0
section = ''
heading = []
for line in lines:
if line.strip() == 'END':
break
elif line.lower().startswith('openvpn statistics'):
section = 'statistics'
elif line.lower().startswith('openvpn client list'):
section = 'clients'
elif line.lower().startswith('routing table'):
# ignored
section = ''
elif line.lower().startswith('global stats'):
section = 'global'
elif ',' in line:
key, value = line.split(',', 1)
if key.lower() == 'updated':
continue
if section == 'statistics':
# All values here are numeric
self.publish_number('.'.join([
name,
'global',
key, ]), value)
elif section == 'clients':
# Clients come with a heading
if not heading:
heading = line.strip().split(',')
else:
info = {}
number_connected_clients += 1
for k, v in zip(heading, line.strip().split(',')):
info[k.lower()] = v
self.publish_number('.'.join([
name,
section,
info['common name'].replace('.', '_'),
'bytes_rx']), info['bytes received'])
self.publish_number('.'.join([
name,
section,
info['common name'].replace('.', '_'),
'bytes_tx']), info['bytes sent'])
elif section == 'global':
# All values here are numeric
self.publish_number('.'.join([
name,
section,
key, ]), value)
elif line.startswith('END'):
break
self.publish('%s.clients.connected' % name, number_connected_clients)
def publish_number(self, key, value):
key = key.replace('/', '-').replace(' ', '_').lower()
try:
value = long(value)
except ValueError:
self.log.error('OpenVPN expected a number for "%s", got "%s"',
key, value)
return
else:
self.publish(key, value)
|
import asyncio
import json
import logging
import aiohttp
from aiohttp.hdrs import CONTENT_TYPE
import async_timeout
import voluptuous as vol
from homeassistant.const import ATTR_NAME, CONF_API_KEY, CONF_TIMEOUT, CONTENT_TYPE_JSON
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
_LOGGER = logging.getLogger(__name__)
ATTR_CAMERA_ENTITY = "camera_entity"
ATTR_GROUP = "group"
ATTR_PERSON = "person"
CONF_AZURE_REGION = "azure_region"
DATA_MICROSOFT_FACE = "microsoft_face"
DEFAULT_TIMEOUT = 10
DOMAIN = "microsoft_face"
FACE_API_URL = "api.cognitive.microsoft.com/face/v1.0/{0}"
SERVICE_CREATE_GROUP = "create_group"
SERVICE_CREATE_PERSON = "create_person"
SERVICE_DELETE_GROUP = "delete_group"
SERVICE_DELETE_PERSON = "delete_person"
SERVICE_FACE_PERSON = "face_person"
SERVICE_TRAIN_GROUP = "train_group"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_AZURE_REGION, default="westus"): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SCHEMA_GROUP_SERVICE = vol.Schema({vol.Required(ATTR_NAME): cv.string})
SCHEMA_PERSON_SERVICE = SCHEMA_GROUP_SERVICE.extend(
{vol.Required(ATTR_GROUP): cv.slugify}
)
SCHEMA_FACE_SERVICE = vol.Schema(
{
vol.Required(ATTR_PERSON): cv.string,
vol.Required(ATTR_GROUP): cv.slugify,
vol.Required(ATTR_CAMERA_ENTITY): cv.entity_id,
}
)
SCHEMA_TRAIN_SERVICE = vol.Schema({vol.Required(ATTR_GROUP): cv.slugify})
async def async_setup(hass, config):
"""Set up Microsoft Face."""
entities = {}
face = MicrosoftFace(
hass,
config[DOMAIN].get(CONF_AZURE_REGION),
config[DOMAIN].get(CONF_API_KEY),
config[DOMAIN].get(CONF_TIMEOUT),
entities,
)
try:
# read exists group/person from cloud and create entities
await face.update_store()
except HomeAssistantError as err:
_LOGGER.error("Can't load data from face api: %s", err)
return False
hass.data[DATA_MICROSOFT_FACE] = face
async def async_create_group(service):
"""Create a new person group."""
name = service.data[ATTR_NAME]
g_id = slugify(name)
try:
await face.call_api("put", f"persongroups/{g_id}", {"name": name})
face.store[g_id] = {}
entities[g_id] = MicrosoftFaceGroupEntity(hass, face, g_id, name)
entities[g_id].async_write_ha_state()
except HomeAssistantError as err:
_LOGGER.error("Can't create group '%s' with error: %s", g_id, err)
hass.services.async_register(
DOMAIN, SERVICE_CREATE_GROUP, async_create_group, schema=SCHEMA_GROUP_SERVICE
)
async def async_delete_group(service):
"""Delete a person group."""
g_id = slugify(service.data[ATTR_NAME])
try:
await face.call_api("delete", f"persongroups/{g_id}")
face.store.pop(g_id)
entity = entities.pop(g_id)
hass.states.async_remove(entity.entity_id, service.context)
except HomeAssistantError as err:
_LOGGER.error("Can't delete group '%s' with error: %s", g_id, err)
hass.services.async_register(
DOMAIN, SERVICE_DELETE_GROUP, async_delete_group, schema=SCHEMA_GROUP_SERVICE
)
async def async_train_group(service):
"""Train a person group."""
g_id = service.data[ATTR_GROUP]
try:
await face.call_api("post", f"persongroups/{g_id}/train")
except HomeAssistantError as err:
_LOGGER.error("Can't train group '%s' with error: %s", g_id, err)
hass.services.async_register(
DOMAIN, SERVICE_TRAIN_GROUP, async_train_group, schema=SCHEMA_TRAIN_SERVICE
)
async def async_create_person(service):
"""Create a person in a group."""
name = service.data[ATTR_NAME]
g_id = service.data[ATTR_GROUP]
try:
user_data = await face.call_api(
"post", f"persongroups/{g_id}/persons", {"name": name}
)
face.store[g_id][name] = user_data["personId"]
entities[g_id].async_write_ha_state()
except HomeAssistantError as err:
_LOGGER.error("Can't create person '%s' with error: %s", name, err)
hass.services.async_register(
DOMAIN, SERVICE_CREATE_PERSON, async_create_person, schema=SCHEMA_PERSON_SERVICE
)
async def async_delete_person(service):
"""Delete a person in a group."""
name = service.data[ATTR_NAME]
g_id = service.data[ATTR_GROUP]
p_id = face.store[g_id].get(name)
try:
await face.call_api("delete", f"persongroups/{g_id}/persons/{p_id}")
face.store[g_id].pop(name)
entities[g_id].async_write_ha_state()
except HomeAssistantError as err:
_LOGGER.error("Can't delete person '%s' with error: %s", p_id, err)
hass.services.async_register(
DOMAIN, SERVICE_DELETE_PERSON, async_delete_person, schema=SCHEMA_PERSON_SERVICE
)
async def async_face_person(service):
"""Add a new face picture to a person."""
g_id = service.data[ATTR_GROUP]
p_id = face.store[g_id].get(service.data[ATTR_PERSON])
camera_entity = service.data[ATTR_CAMERA_ENTITY]
camera = hass.components.camera
try:
image = await camera.async_get_image(hass, camera_entity)
await face.call_api(
"post",
f"persongroups/{g_id}/persons/{p_id}/persistedFaces",
image.content,
binary=True,
)
except HomeAssistantError as err:
_LOGGER.error(
"Can't add an image of a person '%s' with error: %s", p_id, err
)
hass.services.async_register(
DOMAIN, SERVICE_FACE_PERSON, async_face_person, schema=SCHEMA_FACE_SERVICE
)
return True
class MicrosoftFaceGroupEntity(Entity):
"""Person-Group state/data Entity."""
def __init__(self, hass, api, g_id, name):
"""Initialize person/group entity."""
self.hass = hass
self._api = api
self._id = g_id
self._name = name
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def entity_id(self):
"""Return entity id."""
return f"{DOMAIN}.{self._id}"
@property
def state(self):
"""Return the state of the entity."""
return len(self._api.store[self._id])
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attr = {}
for name, p_id in self._api.store[self._id].items():
attr[name] = p_id
return attr
class MicrosoftFace:
"""Microsoft Face api for Home Assistant."""
def __init__(self, hass, server_loc, api_key, timeout, entities):
"""Initialize Microsoft Face api."""
self.hass = hass
self.websession = async_get_clientsession(hass)
self.timeout = timeout
self._api_key = api_key
self._server_url = f"https://{server_loc}.{FACE_API_URL}"
self._store = {}
self._entities = entities
@property
def store(self):
"""Store group/person data and IDs."""
return self._store
async def update_store(self):
"""Load all group/person data into local store."""
groups = await self.call_api("get", "persongroups")
tasks = []
for group in groups:
g_id = group["personGroupId"]
self._store[g_id] = {}
self._entities[g_id] = MicrosoftFaceGroupEntity(
self.hass, self, g_id, group["name"]
)
persons = await self.call_api("get", f"persongroups/{g_id}/persons")
for person in persons:
self._store[g_id][person["name"]] = person["personId"]
tasks.append(self._entities[g_id].async_update_ha_state())
if tasks:
await asyncio.wait(tasks)
async def call_api(self, method, function, data=None, binary=False, params=None):
"""Make an api call."""
headers = {"Ocp-Apim-Subscription-Key": self._api_key}
url = self._server_url.format(function)
payload = None
if binary:
headers[CONTENT_TYPE] = "application/octet-stream"
payload = data
else:
headers[CONTENT_TYPE] = CONTENT_TYPE_JSON
if data is not None:
payload = json.dumps(data).encode()
else:
payload = None
try:
with async_timeout.timeout(self.timeout):
response = await getattr(self.websession, method)(
url, data=payload, headers=headers, params=params
)
answer = await response.json()
_LOGGER.debug("Read from microsoft face api: %s", answer)
if response.status < 300:
return answer
_LOGGER.warning(
"Error %d microsoft face api %s", response.status, response.url
)
raise HomeAssistantError(answer["error"]["message"])
except aiohttp.ClientError:
_LOGGER.warning("Can't connect to microsoft face api")
except asyncio.TimeoutError:
_LOGGER.warning("Timeout from microsoft face api %s", response.url)
raise HomeAssistantError("Network error on microsoft face api.")
|
from datetime import timedelta
import pytest
from homeassistant.components import rfxtrx
from homeassistant.components.rfxtrx import DOMAIN
from homeassistant.util.dt import utcnow
from tests.async_mock import patch
from tests.common import MockConfigEntry, async_fire_time_changed
from tests.components.light.conftest import mock_light_profiles # noqa
def create_rfx_test_cfg(device="abcd", automatic_add=False, devices=None):
"""Create rfxtrx config entry data."""
return {
"device": device,
"host": None,
"port": None,
"automatic_add": automatic_add,
"debug": False,
"devices": devices,
}
@pytest.fixture(autouse=True, name="rfxtrx")
async def rfxtrx_fixture(hass):
"""Fixture that cleans up threads from integration."""
with patch("RFXtrx.Connect") as connect, patch("RFXtrx.DummyTransport2"):
rfx = connect.return_value
async def _signal_event(packet_id):
event = rfxtrx.get_rfx_object(packet_id)
await hass.async_add_executor_job(
rfx.event_callback,
event,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
return event
rfx.signal = _signal_event
yield rfx
@pytest.fixture(name="rfxtrx_automatic")
async def rfxtrx_automatic_fixture(hass, rfxtrx):
"""Fixture that starts up with automatic additions."""
entry_data = create_rfx_test_cfg(automatic_add=True, devices={})
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.async_start()
yield rfxtrx
@pytest.fixture
async def timestep(hass):
"""Step system time forward."""
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = utcnow()
async def delay(seconds):
"""Trigger delay in system."""
mock_utcnow.return_value += timedelta(seconds=seconds)
async_fire_time_changed(hass, mock_utcnow.return_value)
await hass.async_block_till_done()
yield delay
|
import tensornetwork as tn
import pytest
import numpy as np
from tensornetwork.backends.abstract_backend import AbstractBackend
import tensornetwork.linalg
import tensornetwork.linalg.node_linalg
def test_replicate_nodes(backend):
a = tn.Node(np.random.rand(10, 10), backend=backend)
b = tn.Node(np.random.rand(10, 10), backend=backend)
c = tn.Node(np.random.rand(10, 10), backend=backend)
tn.connect(a[1], b[0])
tn.connect(b[1], c[0])
[a_copy, b_copy] = tn.replicate_nodes([a, b])
assert b_copy in tn.reachable([a_copy])
assert not set([a_copy, b_copy]).issubset(tn.reachable([c]))
assert len(b_copy.get_all_dangling()) == 1
def test_split_node_full_svd_names(backend):
a = tn.Node(np.random.rand(10, 10), backend=backend)
e1 = a[0]
e2 = a[1]
left, s, right, _, = tn.split_node_full_svd(
a, [e1], [e2],
left_name='left',
middle_name='center',
right_name='right',
left_edge_name='left_edge',
right_edge_name='right_edge')
assert left.name == 'left'
assert s.name == 'center'
assert right.name == 'right'
assert left.edges[-1].name == 'left_edge'
assert s[0].name == 'left_edge'
assert s[1].name == 'right_edge'
assert right.edges[0].name == 'right_edge'
def test_split_node_relative_tolerance(backend):
absolute = tn.Node(np.diag([2.0, 1.0, 0.2, 0.1]), backend=backend)
relative = tn.Node(np.diag([2.0, 1.0, 0.2, 0.1]), backend=backend)
max_truncation_err = 0.2
_, _, trunc_sv_absolute, = tn.split_node(
node=absolute,
left_edges=[absolute[0]],
right_edges=[absolute[1]],
max_truncation_err=max_truncation_err,
relative=False)
_, _, trunc_sv_relative, = tn.split_node(
node=relative,
left_edges=[relative[0]],
right_edges=[relative[1]],
max_truncation_err=max_truncation_err,
relative=True)
np.testing.assert_almost_equal(trunc_sv_absolute, [0.1])
np.testing.assert_almost_equal(trunc_sv_relative, [0.2, 0.1])
def test_split_node_full_svd_relative_tolerance(backend):
absolute = tn.Node(np.diag([2.0, 1.0, 0.2, 0.1]), backend=backend)
relative = tn.Node(np.diag([2.0, 1.0, 0.2, 0.1]), backend=backend)
max_truncation_err = 0.2
_, _, _, trunc_sv_absolute, = tn.split_node_full_svd(
node=absolute,
left_edges=[absolute[0]],
right_edges=[absolute[1]],
max_truncation_err=max_truncation_err,
relative=False)
_, _, _, trunc_sv_relative, = tn.split_node_full_svd(
node=relative,
left_edges=[relative[0]],
right_edges=[relative[1]],
max_truncation_err=max_truncation_err,
relative=True)
np.testing.assert_almost_equal(trunc_sv_absolute, [0.1])
np.testing.assert_almost_equal(trunc_sv_relative, [0.2, 0.1])
def test_split_node_rq_names(backend):
a = tn.Node(np.zeros((2, 3, 4, 5, 6)), backend=backend)
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_rq(
a,
left_edges,
right_edges,
left_name='left',
right_name='right',
edge_name='edge')
assert left.name == 'left'
assert right.name == 'right'
assert left.edges[-1].name == 'edge'
assert right.edges[0].name == 'edge'
def test_split_node_qr_names(backend):
a = tn.Node(np.zeros((2, 3, 4, 5, 6)), backend=backend)
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_qr(
a,
left_edges,
right_edges,
left_name='left',
right_name='right',
edge_name='edge')
assert left.name == 'left'
assert right.name == 'right'
assert left.edges[-1].name == 'edge'
assert right.edges[0].name == 'edge'
def test_split_node_names(backend):
a = tn.Node(np.zeros((2, 3, 4, 5, 6)), backend=backend)
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right, _ = tn.split_node(
a,
left_edges,
right_edges,
left_name='left',
right_name='right',
edge_name='edge')
assert left.name == 'left'
assert right.name == 'right'
assert left.edges[-1].name == 'edge'
assert right.edges[0].name == 'edge'
def test_split_node_rq_unitarity_complex(backend):
if backend == "pytorch":
pytest.skip("Complex numbers currently not supported in PyTorch")
if backend == "jax":
pytest.skip("Complex QR crashes jax")
a = tn.Node(np.random.rand(3, 3) + 1j * np.random.rand(3, 3), backend=backend)
r, q = tn.split_node_rq(a, [a[0]], [a[1]])
r[1] | q[0]
qbar = tn.linalg.node_linalg.conj(q)
q[1] ^ qbar[1]
u1 = q @ qbar
qbar[0] ^ q[0]
u2 = qbar @ q
np.testing.assert_almost_equal(u1.tensor, np.eye(3))
np.testing.assert_almost_equal(u2.tensor, np.eye(3))
def test_split_node_rq_unitarity_float(backend):
a = tn.Node(np.random.rand(3, 3), backend=backend)
r, q = tn.split_node_rq(a, [a[0]], [a[1]])
r[1] | q[0]
qbar = tn.linalg.node_linalg.conj(q)
q[1] ^ qbar[1]
u1 = q @ qbar
qbar[0] ^ q[0]
u2 = qbar @ q
np.testing.assert_almost_equal(u1.tensor, np.eye(3))
np.testing.assert_almost_equal(u2.tensor, np.eye(3))
def test_split_node_rq(backend):
a = tn.Node(np.random.rand(2, 3, 4, 5, 6), backend=backend)
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_rq(a, left_edges, right_edges)
tn.check_correct([left, right])
np.testing.assert_allclose(a.tensor, tn.contract(left[3]).tensor)
def test_split_node_qr_unitarity_complex(backend):
if backend == "pytorch":
pytest.skip("Complex numbers currently not supported in PyTorch")
if backend == "jax":
pytest.skip("Complex QR crashes jax")
a = tn.Node(np.random.rand(3, 3) + 1j * np.random.rand(3, 3), backend=backend)
q, r = tn.split_node_qr(a, [a[0]], [a[1]])
q[1] | r[0]
qbar = tn.linalg.node_linalg.conj(q)
q[1] ^ qbar[1]
u1 = q @ qbar
qbar[0] ^ q[0]
u2 = qbar @ q
np.testing.assert_almost_equal(u1.tensor, np.eye(3))
np.testing.assert_almost_equal(u2.tensor, np.eye(3))
def test_split_node_qr_unitarity_float(backend):
a = tn.Node(np.random.rand(3, 3), backend=backend)
q, r = tn.split_node_qr(a, [a[0]], [a[1]])
q[1] | r[0]
qbar = tn.linalg.node_linalg.conj(q)
q[1] ^ qbar[1]
u1 = q @ qbar
qbar[0] ^ q[0]
u2 = qbar @ q
np.testing.assert_almost_equal(u1.tensor, np.eye(3))
np.testing.assert_almost_equal(u2.tensor, np.eye(3))
def test_split_node_qr(backend):
a = tn.Node(np.random.rand(2, 3, 4, 5, 6), backend=backend)
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_qr(a, left_edges, right_edges)
tn.check_correct([left, right])
np.testing.assert_allclose(a.tensor, tn.contract(left[3]).tensor)
def test_reachable(backend):
nodes = [tn.Node(np.random.rand(2, 2, 2), backend=backend) for _ in range(10)]
_ = [nodes[n][0] ^ nodes[n + 1][1] for n in range(len(nodes) - 1)]
assert set(nodes) == tn.reachable(nodes[0])
def test_reachable_2(backend):
a = tn.Node(np.zeros((3, 5)), backend=backend)
b = tn.Node(np.zeros((3, 4, 5)), backend=backend)
e1 = tn.connect(a[0], b[0])
e2 = tn.connect(a[1], b[2])
nodes = [a, b]
edges = [e1, e2]
assert set(nodes) == tn.reachable(edges[0])
assert set(nodes) == tn.reachable(edges)
def test_reachable_disconnected_1(backend):
nodes = [tn.Node(np.random.rand(2, 2, 2), backend=backend) for _ in range(4)]
nodes[0][1] ^ nodes[1][0]
nodes[2][1] ^ nodes[3][0]
assert set(tn.reachable([nodes[0], nodes[2]])) == set(nodes)
assert set(tn.reachable([nodes[0]])) == {nodes[0], nodes[1]}
assert set(tn.reachable([nodes[1]])) == {nodes[0], nodes[1]}
assert set(tn.reachable([nodes[0], nodes[1]])) == {nodes[0], nodes[1]}
assert set(tn.reachable([nodes[2]])) == {nodes[2], nodes[3]}
assert set(tn.reachable([nodes[3]])) == {nodes[2], nodes[3]}
assert set(tn.reachable([nodes[2], nodes[3]])) == {nodes[2], nodes[3]}
assert set(tn.reachable([nodes[0], nodes[1], nodes[2]])) == set(nodes)
assert set(tn.reachable([nodes[0], nodes[1], nodes[3]])) == set(nodes)
assert set(tn.reachable([nodes[0], nodes[2], nodes[3]])) == set(nodes)
assert set(tn.reachable([nodes[1], nodes[2], nodes[3]])) == set(nodes)
def test_reachable_disconnected_2(backend):
nodes = [tn.Node(np.random.rand(2, 2, 2), backend=backend) for _ in range(4)]
nodes[1][1] ^ nodes[2][0] # connect 2nd and third node
assert set(tn.reachable([nodes[0],
nodes[1]])) == {nodes[0], nodes[1], nodes[2]}
nodes[2][1] ^ nodes[3][0] # connect third and fourth node
assert set(tn.reachable([nodes[0], nodes[1]])) == set(nodes)
def test_subgraph_sanity(backend):
a = tn.Node(np.eye(2), backend=backend)
b = tn.Node(np.eye(2), backend=backend)
a[0] ^ b[0]
edges = tn.get_subgraph_dangling({a})
assert edges == {a[0], a[1]}
def test_subgraph_disconnected_nodes(backend):
a = tn.Node(np.eye(2), backend=backend)
b = tn.Node(np.eye(2), backend=backend)
c = tn.Node(np.eye(2), backend=backend)
a[0] ^ b[0]
b[1] ^ c[1]
edges = tn.get_subgraph_dangling({a, c})
assert edges == {a[0], a[1], c[0], c[1]}
def test_full_graph_subgraph_dangling(backend):
a = tn.Node(np.eye(2), backend=backend)
b = tn.Node(np.eye(2), backend=backend)
c = tn.Node(np.eye(2), backend=backend)
a[0] ^ b[0]
b[1] ^ c[1]
edges = tn.get_subgraph_dangling({a, b, c})
assert edges == {a[1], c[0]}
def test_reduced_density(backend):
a = tn.Node(np.random.rand(3, 3, 3), name="A", backend=backend)
b = tn.Node(np.random.rand(3, 3, 3), name="B", backend=backend)
c = tn.Node(np.random.rand(3, 3, 3), name="C", backend=backend)
edges = tn.get_all_edges({a, b, c})
node_dict, edge_dict = tn.reduced_density([a[0], b[1], c[2]])
assert not a[0].is_dangling()
assert not b[1].is_dangling()
assert not c[2].is_dangling()
assert a[1].is_dangling() & a[2].is_dangling()
assert b[0].is_dangling() & b[2].is_dangling()
assert c[0].is_dangling() & c[1].is_dangling()
for node in {a, b, c}:
assert node_dict[node].name == node.name
for edge in edges:
assert edge_dict[edge].name == edge.name
def test_reduced_density_nondangling(backend):
a = tn.Node(np.random.rand(3, 3, 3), name="A", backend=backend)
b = tn.Node(np.random.rand(3, 3, 3), name="B", backend=backend)
c = tn.Node(np.random.rand(3, 3, 3), name="C", backend=backend)
a[0] ^ b[1]
b[2] ^ c[1]
err_msg = "traced_out_edges must only include dangling edges!"
with pytest.raises(ValueError, match=err_msg):
tn.reduced_density([a[0], b[1], c[1]])
def test_reduced_density_contraction(backend):
if backend == "pytorch":
pytest.skip("pytorch doesn't support complex numbers")
a = tn.Node(
np.array([[0.0, 1.0j], [-1.0j, 0.0]], dtype=np.complex64),
backend=backend)
tn.reduced_density([a[0]])
result = tn.contractors.greedy(tn.reachable(a), ignore_edge_order=True)
np.testing.assert_allclose(result.tensor, np.eye(2))
def test_switch_backend(backend):
a = tn.Node(np.random.rand(3, 3, 3), name="A", backend="numpy")
b = tn.Node(np.random.rand(3, 3, 3), name="B", backend="numpy")
c = tn.Node(np.random.rand(3, 3, 3), name="C", backend="numpy")
nodes = [a, b, c]
tn.switch_backend(nodes, backend)
assert nodes[0].backend.name == backend
def test_split_node_of_node_without_backend_raises_error():
node = np.random.rand(3, 3, 3)
with pytest.raises(AttributeError):
tn.split_node(node, left_edges=[], right_edges=[])
def test_split_node_qr_of_node_without_backend_raises_error():
node = np.random.rand(3, 3, 3)
with pytest.raises(AttributeError):
tn.split_node_qr(node, left_edges=[], right_edges=[])
def test_split_node_rq_of_node_without_backend_raises_error():
node = np.random.rand(3, 3, 3)
with pytest.raises(AttributeError):
tn.split_node_rq(node, left_edges=[], right_edges=[])
def test_split_node_full_svd_of_node_without_backend_raises_error():
node = np.random.rand(3, 3, 3)
with pytest.raises(AttributeError):
tn.split_node_full_svd(node, left_edges=[], right_edges=[])
def test_reachable_raises_value_error():
with pytest.raises(ValueError):
tn.reachable({})
def test_check_correct_raises_value_error_1(backend):
a = tn.Node(np.random.rand(3, 3, 3), backend=backend)
b = tn.Node(np.random.rand(3, 3, 3), backend=backend)
edge = a.edges[0]
edge.node1 = b
edge.node2 = b
with pytest.raises(ValueError):
tn.check_correct({a, b})
def test_check_correct_raises_value_error_2(backend):
a = tn.Node(np.random.rand(3, 3, 3), backend=backend)
b = tn.Node(np.random.rand(3, 3, 3), backend=backend)
edge = a.edges[0]
edge.axis1 = -1
with pytest.raises(ValueError):
tn.check_correct({a, b})
def test_get_all_nodes(backend):
a = tn.Node(np.random.rand(3, 3, 3), backend=backend)
b = tn.Node(np.random.rand(3, 3, 3), backend=backend)
edge = tn.connect(a[0], b[0])
assert tn.get_all_nodes({edge}) == {a, b}
def test_contract_trace_edges(backend):
a = tn.Node(np.random.rand(3, 3, 3), backend=backend)
with pytest.raises(ValueError):
tn.contract_trace_edges(a)
def test_switch_backend_raises_error(backend):
a = tn.Node(np.random.rand(3, 3, 3))
a.backend = AbstractBackend()
with pytest.raises(NotImplementedError):
tn.switch_backend({a}, backend)
def test_split_node_orig_shape(backend):
n1 = tn.Node(np.random.rand(3, 4, 5), backend=backend)
tn.split_node(n1, [n1[0], n1[2]], [n1[1]])
np.testing.assert_allclose(n1.shape, (3, 4, 5))
def test_split_node_full_svd_orig_shape(backend):
n1 = tn.Node(np.random.rand(3, 4, 5), backend=backend)
tn.split_node_full_svd(n1, [n1[0], n1[2]], [n1[1]])
np.testing.assert_allclose(n1.shape, (3, 4, 5))
def test_split_node_rq_orig_shape(backend):
n1 = tn.Node(np.random.rand(3, 4, 5), backend=backend)
tn.split_node_rq(n1, [n1[0], n1[2]], [n1[1]])
np.testing.assert_allclose(n1.shape, (3, 4, 5))
def test_split_node_qr_orig_shape(backend):
n1 = tn.Node(np.random.rand(3, 4, 5), backend=backend)
tn.split_node_qr(n1, [n1[0], n1[2]], [n1[1]])
np.testing.assert_allclose(n1.shape, (3, 4, 5))
def test_get_neighbors(backend):
with tn.DefaultBackend(backend):
a = tn.Node(np.ones((2, 2)))
b = tn.Node(np.ones((2, 2, 2, 2)))
c = tn.Node(np.ones((2, 2, 2)))
d = tn.Node(np.ones((2, 2)))
b[0] ^ a[1]
b[3] ^ c[2]
a[0] ^ d[1]
b[1] ^ b[2]
result = tn.get_neighbors(b)
assert result == [a, c]
def test_get_neighbors_no_duplicates(backend):
with tn.DefaultBackend(backend):
a = tn.Node(np.ones((2, 2, 2)))
b = tn.Node(np.ones((2, 2, 2, 2, 2)))
c = tn.Node(np.ones((2, 2, 2)))
d = tn.Node(np.ones((2, 2)))
b[0] ^ a[0]
b[1] ^ a[1]
b[2] ^ c[0]
a[2] ^ d[1]
b[3] ^ b[4]
result = tn.get_neighbors(b)
assert result == [a, c]
|
import urwid
class QuestionnaireItem(urwid.WidgetWrap):
def __init__(self):
self.options = []
unsure = urwid.RadioButton(self.options, u"Unsure")
yes = urwid.RadioButton(self.options, u"Yes")
no = urwid.RadioButton(self.options, u"No")
display_widget = urwid.GridFlow([unsure, yes, no], 15, 3, 1, 'left')
urwid.WidgetWrap.__init__(self, display_widget)
def get_state(self):
for o in self.options:
if o.get_state() is True:
return o.get_label()
|
from ... import event
from . import Widget
class IFrame(Widget):
""" An iframe element, i.e. a container to show web-content.
Note that some websites do not allow themselves to be rendered in
a cross-source iframe.
The ``node`` of this widget is a
`<iframe> <https://developer.mozilla.org/docs/Web/HTML/Element/iframe>`_.
"""
DEFAULT_MIN_SIZE = 10, 10
CSS = """
.flx-IFrame {
border: none;
}
"""
url = event.StringProp('', settable=True, doc="""
The url to show. 'http://' is automatically prepended if the url
does not have '://' in it.
""")
def _create_dom(self):
global document
return document.createElement('iframe')
@event.reaction('size')
def __on_size(self, *events):
self.node.width = self.size[0]
@event.reaction('url')
def _update_url(self, *events):
url = self.url
if url and '://' not in url:
url = 'http://' + url
self.node.src = url
|
from collections import defaultdict
import six
from chainercv.chainer_experimental.datasets.sliceable.sliceable_dataset \
import _as_tuple
from chainercv.chainer_experimental.datasets.sliceable import SliceableDataset
class TupleDataset(SliceableDataset):
"""A sliceable version of :class:`chainer.datasets.TupleDataset`.
Here is an example.
>>> # omit keys
>>> dataset = TupleDataset([0, 1, 2], [0, 1, 4])
>>> dataset.keys # (None, None)
>>> dataset.slice[:, 0][:] # [0, 1, 2]
>>>
>>> dataset_more = TupleDataset(dataset, [0, 1, 8])
>>> dataset_more.keys # (None, None, None)
>>> dataset_more.slice[:, [1, 2]][:] # [(0, 0), (1, 1), (4, 8)]
>>>
>>> # specify the name of a key
>>> named_dataset = TupleDataset(('feat0', [0, 1, 2]), [0, 1, 4])
>>> named_dataset.keys # ('feat0', None)
>>> # slice takes both key and index (or their mixture)
>>> named_dataset.slice[:, ['feat0', 1]][:] # [(0, 0), (1, 1), (2, 4)]
Args:
datasets: The underlying datasets.
The following datasets are acceptable.
* An inheritance of \
:class:~chainer.datasets.sliceable.SliceableDataset`.
* A tuple of a name and a data array. \
The data array should be list or :class:`numpy.ndarray`.
* A data array. In this case, the name of key is :obj:`None`.
"""
def __init__(self, *datasets):
if len(datasets) == 0:
raise ValueError('At least one dataset is required')
self._len = None
self._keys = []
self._datasets = []
for dataset in datasets:
if isinstance(dataset, SliceableDataset):
self._datasets.append(dataset)
for key_index, key in enumerate(_as_tuple(dataset.keys)):
self._keys.append(
(key, len(self._datasets) - 1, key_index))
else:
if isinstance(dataset, tuple):
key, dataset = dataset
else:
key = None
self._datasets.append(dataset)
self._keys.append((key, len(self._datasets) - 1, None))
if self._len is None:
self._len = len(dataset)
if not len(dataset) == self._len:
raise ValueError(
'All datasets should have the same length')
def __len__(self):
return self._len
@property
def keys(self):
return tuple(key for key, _, _ in self._keys)
def get_example_by_keys(self, index, key_indices):
datasets_key_indices = defaultdict(set)
for key_index in key_indices:
_, dataset_index, key_index = self._keys[key_index]
if key_index is None:
datasets_key_indices[dataset_index] = None
else:
datasets_key_indices[dataset_index].add(key_index)
values = {}
for dataset_index, dataset_key_indices in \
six.iteritems(datasets_key_indices):
dataset = self._datasets[dataset_index]
if dataset_key_indices is None:
values[(dataset_index, None)] = dataset[index]
else:
dataset_key_indices = tuple(dataset_key_indices)
values.update(six.moves.zip(
((dataset_index, key_index)
for key_index in dataset_key_indices),
dataset.get_example_by_keys(index, dataset_key_indices)))
return tuple(
values[self._keys[key_index][1:]] for key_index in key_indices)
|
import asyncio
from unittest.mock import Mock, patch
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.logi_circle import config_flow
from homeassistant.components.logi_circle.config_flow import (
DOMAIN,
AuthorizationFailed,
LogiCircleAuthCallbackView,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import AsyncMock
from tests.common import mock_coro
class MockRequest:
"""Mock request passed to HomeAssistantView."""
def __init__(self, hass, query):
"""Init request object."""
self.app = {"hass": hass}
self.query = query
def init_config_flow(hass):
"""Init a configuration flow."""
config_flow.register_flow_implementation(
hass,
DOMAIN,
client_id="id",
client_secret="secret",
api_key="123",
redirect_uri="http://example.com",
sensors=None,
)
flow = config_flow.LogiCircleFlowHandler()
flow._get_authorization_url = Mock( # pylint: disable=protected-access
return_value="http://example.com"
)
flow.hass = hass
return flow
@pytest.fixture
def mock_logi_circle():
"""Mock logi_circle."""
with patch(
"homeassistant.components.logi_circle.config_flow.LogiCircle"
) as logi_circle:
LogiCircle = logi_circle()
LogiCircle.authorize = AsyncMock(return_value=True)
LogiCircle.close = AsyncMock(return_value=True)
LogiCircle.account = mock_coro(return_value={"accountId": "testId"})
LogiCircle.authorize_url = "http://authorize.url"
yield LogiCircle
async def test_step_import(
hass, mock_logi_circle # pylint: disable=redefined-outer-name
):
"""Test that we trigger import when configuring with client."""
flow = init_config_flow(hass)
result = await flow.async_step_import()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
async def test_full_flow_implementation(
hass, mock_logi_circle # pylint: disable=redefined-outer-name
):
"""Test registering an implementation and finishing flow works."""
config_flow.register_flow_implementation(
hass,
"test-other",
client_id=None,
client_secret=None,
api_key=None,
redirect_uri=None,
sensors=None,
)
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user({"flow_impl": "test-other"})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert result["description_placeholders"] == {
"authorization_url": "http://example.com"
}
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Logi Circle ({})".format("testId")
async def test_we_reprompt_user_to_follow_link(hass):
"""Test we prompt user to follow link if previously prompted."""
flow = init_config_flow(hass)
result = await flow.async_step_auth("dummy")
assert result["errors"]["base"] == "follow_link"
async def test_abort_if_no_implementation_registered(hass):
"""Test we abort if no implementation is registered."""
flow = config_flow.LogiCircleFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "missing_configuration"
async def test_abort_if_already_setup(hass):
"""Test we abort if Logi Circle is already setup."""
flow = init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_import()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_code()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_auth()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "external_setup"
@pytest.mark.parametrize(
"side_effect,error",
[
(asyncio.TimeoutError, "authorize_url_timeout"),
(AuthorizationFailed, "invalid_auth"),
],
)
async def test_abort_if_authorize_fails(
hass, mock_logi_circle, side_effect, error
): # pylint: disable=redefined-outer-name
"""Test we abort if authorizing fails."""
flow = init_config_flow(hass)
mock_logi_circle.authorize.side_effect = side_effect
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "external_error"
result = await flow.async_step_auth()
assert result["errors"]["base"] == error
async def test_not_pick_implementation_if_only_one(hass):
"""Test we bypass picking implementation if we have one flow_imp."""
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
async def test_gen_auth_url(
hass, mock_logi_circle
): # pylint: disable=redefined-outer-name
"""Test generating authorize URL from Logi Circle API."""
config_flow.register_flow_implementation(
hass,
"test-auth-url",
client_id="id",
client_secret="secret",
api_key="123",
redirect_uri="http://example.com",
sensors=None,
)
flow = config_flow.LogiCircleFlowHandler()
flow.hass = hass
flow.flow_impl = "test-auth-url"
await async_setup_component(hass, "http", {})
result = flow._get_authorization_url() # pylint: disable=protected-access
assert result == "http://authorize.url"
async def test_callback_view_rejects_missing_code(hass):
"""Test the auth callback view rejects requests with no code."""
view = LogiCircleAuthCallbackView()
resp = await view.get(MockRequest(hass, {}))
assert resp.status == 400
async def test_callback_view_accepts_code(
hass, mock_logi_circle
): # pylint: disable=redefined-outer-name
"""Test the auth callback view handles requests with auth code."""
init_config_flow(hass)
view = LogiCircleAuthCallbackView()
resp = await view.get(MockRequest(hass, {"code": "456"}))
assert resp.status == 200
await hass.async_block_till_done()
mock_logi_circle.authorize.assert_called_with("456")
|
import unittest
import logging
from gensim.sklearn_api import D2VTransformer
from gensim.test.utils import common_texts
class IteratorForIterable:
"""Iterator capable of folding into list."""
def __init__(self, iterable):
self._data = iterable
self._index = 0
def __next__(self):
if len(self._data) > self._index:
result = self._data[self._index]
self._index += 1
return result
raise StopIteration
class IterableWithoutZeroElement:
"""
Iterable, emulating pandas.Series behaviour without 0-th element.
Equivalent to calling `series.index += 1`.
"""
def __init__(self, data):
self.data = data
def __getitem__(self, key):
if key == 0:
raise KeyError("Emulation of absence of item with key 0.")
return self.data[key]
def __iter__(self):
return IteratorForIterable(self.data)
class TestD2VTransformer(unittest.TestCase):
def TestWorksWithIterableNotHavingElementWithZeroIndex(self):
a = IterableWithoutZeroElement(common_texts)
transformer = D2VTransformer(min_count=1, vector_size=5)
transformer.fit(a)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_GAS,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OCCUPANCY,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_SMOKE,
)
from tests.components.homekit_controller.common import setup_test_component
MOTION_DETECTED = ("motion", "motion-detected")
CONTACT_STATE = ("contact", "contact-state")
SMOKE_DETECTED = ("smoke", "smoke-detected")
CARBON_MONOXIDE_DETECTED = ("carbon-monoxide", "carbon-monoxide.detected")
OCCUPANCY_DETECTED = ("occupancy", "occupancy-detected")
LEAK_DETECTED = ("leak", "leak-detected")
def create_motion_sensor_service(accessory):
"""Define motion characteristics as per page 225 of HAP spec."""
service = accessory.add_service(ServicesTypes.MOTION_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.MOTION_DETECTED)
cur_state.value = 0
async def test_motion_sensor_read_state(hass, utcnow):
"""Test that we can read the state of a HomeKit motion sensor accessory."""
helper = await setup_test_component(hass, create_motion_sensor_service)
helper.characteristics[MOTION_DETECTED].value = False
state = await helper.poll_and_get_state()
assert state.state == "off"
helper.characteristics[MOTION_DETECTED].value = True
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["device_class"] == DEVICE_CLASS_MOTION
def create_contact_sensor_service(accessory):
"""Define contact characteristics."""
service = accessory.add_service(ServicesTypes.CONTACT_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.CONTACT_STATE)
cur_state.value = 0
async def test_contact_sensor_read_state(hass, utcnow):
"""Test that we can read the state of a HomeKit contact accessory."""
helper = await setup_test_component(hass, create_contact_sensor_service)
helper.characteristics[CONTACT_STATE].value = 0
state = await helper.poll_and_get_state()
assert state.state == "off"
helper.characteristics[CONTACT_STATE].value = 1
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["device_class"] == DEVICE_CLASS_OPENING
def create_smoke_sensor_service(accessory):
"""Define smoke sensor characteristics."""
service = accessory.add_service(ServicesTypes.SMOKE_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.SMOKE_DETECTED)
cur_state.value = 0
async def test_smoke_sensor_read_state(hass, utcnow):
"""Test that we can read the state of a HomeKit contact accessory."""
helper = await setup_test_component(hass, create_smoke_sensor_service)
helper.characteristics[SMOKE_DETECTED].value = 0
state = await helper.poll_and_get_state()
assert state.state == "off"
helper.characteristics[SMOKE_DETECTED].value = 1
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["device_class"] == DEVICE_CLASS_SMOKE
def create_carbon_monoxide_sensor_service(accessory):
"""Define carbon monoxide sensor characteristics."""
service = accessory.add_service(ServicesTypes.CARBON_MONOXIDE_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.CARBON_MONOXIDE_DETECTED)
cur_state.value = 0
async def test_carbon_monoxide_sensor_read_state(hass, utcnow):
"""Test that we can read the state of a HomeKit contact accessory."""
helper = await setup_test_component(hass, create_carbon_monoxide_sensor_service)
helper.characteristics[CARBON_MONOXIDE_DETECTED].value = 0
state = await helper.poll_and_get_state()
assert state.state == "off"
helper.characteristics[CARBON_MONOXIDE_DETECTED].value = 1
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["device_class"] == DEVICE_CLASS_GAS
def create_occupancy_sensor_service(accessory):
"""Define occupancy characteristics."""
service = accessory.add_service(ServicesTypes.OCCUPANCY_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.OCCUPANCY_DETECTED)
cur_state.value = 0
async def test_occupancy_sensor_read_state(hass, utcnow):
"""Test that we can read the state of a HomeKit occupancy sensor accessory."""
helper = await setup_test_component(hass, create_occupancy_sensor_service)
helper.characteristics[OCCUPANCY_DETECTED].value = False
state = await helper.poll_and_get_state()
assert state.state == "off"
helper.characteristics[OCCUPANCY_DETECTED].value = True
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["device_class"] == DEVICE_CLASS_OCCUPANCY
def create_leak_sensor_service(accessory):
"""Define leak characteristics."""
service = accessory.add_service(ServicesTypes.LEAK_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.LEAK_DETECTED)
cur_state.value = 0
async def test_leak_sensor_read_state(hass, utcnow):
"""Test that we can read the state of a HomeKit leak sensor accessory."""
helper = await setup_test_component(hass, create_leak_sensor_service)
helper.characteristics[LEAK_DETECTED].value = 0
state = await helper.poll_and_get_state()
assert state.state == "off"
helper.characteristics[LEAK_DETECTED].value = 1
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["device_class"] == DEVICE_CLASS_MOISTURE
|
import json
from datetime import timedelta
import arrow
import boto3
from moto import mock_sns, mock_sqs, mock_ses
from lemur.certificates.schemas import certificate_notification_output_schema
from lemur.plugins.lemur_aws.sns import format_message
from lemur.plugins.lemur_aws.sns import publish
from lemur.tests.factories import NotificationFactory, CertificateFactory
from lemur.tests.test_messaging import verify_sender_email
@mock_sns()
def test_format(certificate, endpoint):
data = [certificate_notification_output_schema.dump(certificate).data]
for certificate in data:
expected_message = {
"notification_type": "expiration",
"certificate_name": certificate["name"],
"expires": arrow.get(certificate["validityEnd"]).format("YYYY-MM-DDTHH:mm:ss"),
"endpoints_detected": 0,
"owner": certificate["owner"],
"details": "https://lemur.example.com/#/certificates/{name}".format(name=certificate["name"])
}
assert expected_message == json.loads(format_message(certificate, "expiration"))
@mock_sns()
@mock_sqs()
def create_and_subscribe_to_topic():
sns_client = boto3.client("sns", region_name="us-east-1")
topic_arn = sns_client.create_topic(Name='lemursnstest')["TopicArn"]
sqs_client = boto3.client("sqs", region_name="us-east-1")
queue = sqs_client.create_queue(QueueName="lemursnstestqueue")
queue_url = queue["QueueUrl"]
queue_arn = sqs_client.get_queue_attributes(QueueUrl=queue_url)["Attributes"]["QueueArn"]
sns_client.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=queue_arn)
return [topic_arn, sqs_client, queue_url]
@mock_sns()
@mock_sqs()
def test_publish(certificate, endpoint):
data = [certificate_notification_output_schema.dump(certificate).data]
topic_arn, sqs_client, queue_url = create_and_subscribe_to_topic()
message_ids = publish(topic_arn, data, "expiration", region_name="us-east-1")
assert len(message_ids) == len(data)
received_messages = sqs_client.receive_message(QueueUrl=queue_url)["Messages"]
for certificate in data:
expected_message_id = message_ids[certificate["name"]]
actual_message = next(
(m for m in received_messages if json.loads(m["Body"])["MessageId"] == expected_message_id), None)
actual_json = json.loads(actual_message["Body"])
assert actual_json["Message"] == format_message(certificate, "expiration")
assert actual_json["Subject"] == "Lemur: Expiration Notification"
def get_options():
return [
{"name": "interval", "value": 10},
{"name": "unit", "value": "days"},
{"name": "region", "value": "us-east-1"},
{"name": "accountNumber", "value": "123456789012"},
{"name": "topicName", "value": "lemursnstest"},
]
@mock_sns()
@mock_sqs()
@mock_ses() # because email notifications are also sent
def test_send_expiration_notification():
from lemur.notifications.messaging import send_expiration_notifications
verify_sender_email() # emails are sent to owner and security; SNS only used for configured notification
topic_arn, sqs_client, queue_url = create_and_subscribe_to_topic()
notification = NotificationFactory(plugin_name="aws-sns")
notification.options = get_options()
now = arrow.utcnow()
in_ten_days = now + timedelta(days=10, hours=1) # a bit more than 10 days since we'll check in the future
certificate = CertificateFactory()
certificate.not_after = in_ten_days
certificate.notifications.append(notification)
assert send_expiration_notifications([]) == (3, 0) # owner, SNS, and security
received_messages = sqs_client.receive_message(QueueUrl=queue_url)["Messages"]
assert len(received_messages) == 1
expected_message = format_message(certificate_notification_output_schema.dump(certificate).data, "expiration")
actual_message = json.loads(received_messages[0]["Body"])["Message"]
assert actual_message == expected_message
# Currently disabled as the SNS plugin doesn't support this type of notification
# def test_send_rotation_notification(endpoint, source_plugin):
# from lemur.notifications.messaging import send_rotation_notification
# from lemur.deployment.service import rotate_certificate
#
# notification = NotificationFactory(plugin_name="aws-sns")
# notification.options = get_options()
#
# new_certificate = CertificateFactory()
# rotate_certificate(endpoint, new_certificate)
# assert endpoint.certificate == new_certificate
#
# assert send_rotation_notification(new_certificate)
# Currently disabled as the SNS plugin doesn't support this type of notification
# def test_send_pending_failure_notification(user, pending_certificate, async_issuer_plugin):
# from lemur.notifications.messaging import send_pending_failure_notification
#
# assert send_pending_failure_notification(pending_certificate)
|
import importlib
import platform
import re
import warnings
from contextlib import contextmanager
from distutils import version
from unittest import mock # noqa: F401
import numpy as np
import pytest
from numpy.testing import assert_array_equal # noqa: F401
from pandas.testing import assert_frame_equal # noqa: F401
import xarray.testing
from xarray.core import utils
from xarray.core.duck_array_ops import allclose_or_equiv # noqa: F401
from xarray.core.indexing import ExplicitlyIndexed
from xarray.core.options import set_options
from xarray.testing import ( # noqa: F401
assert_chunks_equal,
assert_duckarray_allclose,
assert_duckarray_equal,
)
# import mpl and change the backend before other mpl imports
try:
import matplotlib as mpl
# Order of imports is important here.
# Using a different backend makes Travis CI work
mpl.use("Agg")
except ImportError:
pass
arm_xfail = pytest.mark.xfail(
platform.machine() == "aarch64" or "arm" in platform.machine(),
reason="expected failure on ARM",
)
def _importorskip(modname, minversion=None):
try:
mod = importlib.import_module(modname)
has = True
if minversion is not None:
if LooseVersion(mod.__version__) < LooseVersion(minversion):
raise ImportError("Minimum version not satisfied")
except ImportError:
has = False
func = pytest.mark.skipif(not has, reason=f"requires {modname}")
return has, func
def LooseVersion(vstring):
# Our development version is something like '0.10.9+aac7bfc'
# This function just ignored the git commit id.
vstring = vstring.split("+")[0]
return version.LooseVersion(vstring)
has_matplotlib, requires_matplotlib = _importorskip("matplotlib")
has_scipy, requires_scipy = _importorskip("scipy")
has_pydap, requires_pydap = _importorskip("pydap.client")
has_netCDF4, requires_netCDF4 = _importorskip("netCDF4")
has_h5netcdf, requires_h5netcdf = _importorskip("h5netcdf")
has_pynio, requires_pynio = _importorskip("Nio")
has_pseudonetcdf, requires_pseudonetcdf = _importorskip("PseudoNetCDF")
has_cftime, requires_cftime = _importorskip("cftime")
has_cftime_1_1_0, requires_cftime_1_1_0 = _importorskip("cftime", minversion="1.1.0.0")
has_dask, requires_dask = _importorskip("dask")
has_bottleneck, requires_bottleneck = _importorskip("bottleneck")
has_nc_time_axis, requires_nc_time_axis = _importorskip("nc_time_axis")
has_rasterio, requires_rasterio = _importorskip("rasterio")
has_zarr, requires_zarr = _importorskip("zarr")
has_iris, requires_iris = _importorskip("iris")
has_cfgrib, requires_cfgrib = _importorskip("cfgrib")
has_numbagg, requires_numbagg = _importorskip("numbagg")
has_seaborn, requires_seaborn = _importorskip("seaborn")
has_sparse, requires_sparse = _importorskip("sparse")
has_cartopy, requires_cartopy = _importorskip("cartopy")
# Need Pint 0.15 for __dask_tokenize__ tests for Quantity wrapped Dask Arrays
has_pint_0_15, requires_pint_0_15 = _importorskip("pint", minversion="0.15")
# some special cases
has_scipy_or_netCDF4 = has_scipy or has_netCDF4
requires_scipy_or_netCDF4 = pytest.mark.skipif(
not has_scipy_or_netCDF4, reason="requires scipy or netCDF4"
)
# change some global options for tests
set_options(warn_for_unclosed_files=True)
if has_dask:
import dask
dask.config.set(scheduler="single-threaded")
class CountingScheduler:
"""Simple dask scheduler counting the number of computes.
Reference: https://stackoverflow.com/questions/53289286/"""
def __init__(self, max_computes=0):
self.total_computes = 0
self.max_computes = max_computes
def __call__(self, dsk, keys, **kwargs):
self.total_computes += 1
if self.total_computes > self.max_computes:
raise RuntimeError(
"Too many computes. Total: %d > max: %d."
% (self.total_computes, self.max_computes)
)
return dask.get(dsk, keys, **kwargs)
@contextmanager
def dummy_context():
yield None
def raise_if_dask_computes(max_computes=0):
# return a dummy context manager so that this can be used for non-dask objects
if not has_dask:
return dummy_context()
scheduler = CountingScheduler(max_computes)
return dask.config.set(scheduler=scheduler)
flaky = pytest.mark.flaky
network = pytest.mark.network
@contextmanager
def raises_regex(error, pattern):
__tracebackhide__ = True
with pytest.raises(error) as excinfo:
yield
message = str(excinfo.value)
if not re.search(pattern, message):
raise AssertionError(
f"exception {excinfo.value!r} did not match pattern {pattern!r}"
)
class UnexpectedDataAccess(Exception):
pass
class InaccessibleArray(utils.NDArrayMixin, ExplicitlyIndexed):
def __init__(self, array):
self.array = array
def __getitem__(self, key):
raise UnexpectedDataAccess("Tried accessing data")
class ReturnItem:
def __getitem__(self, key):
return key
class IndexerMaker:
def __init__(self, indexer_cls):
self._indexer_cls = indexer_cls
def __getitem__(self, key):
if not isinstance(key, tuple):
key = (key,)
return self._indexer_cls(key)
def source_ndarray(array):
"""Given an ndarray, return the base object which holds its memory, or the
object itself.
"""
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "DatetimeIndex.base")
warnings.filterwarnings("ignore", "TimedeltaIndex.base")
base = getattr(array, "base", np.asarray(array).base)
if base is None:
base = array
return base
# Internal versions of xarray's test functions that validate additional
# invariants
def assert_equal(a, b):
__tracebackhide__ = True
xarray.testing.assert_equal(a, b)
xarray.testing._assert_internal_invariants(a)
xarray.testing._assert_internal_invariants(b)
def assert_identical(a, b):
__tracebackhide__ = True
xarray.testing.assert_identical(a, b)
xarray.testing._assert_internal_invariants(a)
xarray.testing._assert_internal_invariants(b)
def assert_allclose(a, b, **kwargs):
__tracebackhide__ = True
xarray.testing.assert_allclose(a, b, **kwargs)
xarray.testing._assert_internal_invariants(a)
xarray.testing._assert_internal_invariants(b)
|
import logging
from collections import defaultdict
from gensim import utils
from gensim.corpora import Dictionary
from gensim.corpora import IndexedCorpus
from gensim.matutils import MmReader
from gensim.matutils import MmWriter
logger = logging.getLogger(__name__)
class UciReader(MmReader):
"""Reader of UCI format for :class:`gensim.corpora.ucicorpus.UciCorpus`."""
def __init__(self, input):
"""
Parameters
----------
input : str
Path to file in UCI format.
"""
logger.info('Initializing corpus reader from %s', input)
self.input = input
with utils.open(self.input, 'rb') as fin:
self.num_docs = self.num_terms = self.num_nnz = 0
try:
self.num_docs = int(next(fin).strip())
self.num_terms = int(next(fin).strip())
self.num_nnz = int(next(fin).strip())
except StopIteration:
pass
logger.info(
"accepted corpus with %i documents, %i features, %i non-zero entries",
self.num_docs, self.num_terms, self.num_nnz
)
def skip_headers(self, input_file):
"""Skip headers in `input_file`.
Parameters
----------
input_file : file
File object.
"""
for lineno, _ in enumerate(input_file):
if lineno == 2:
break
class UciWriter(MmWriter):
"""Writer of UCI format for :class:`gensim.corpora.ucicorpus.UciCorpus`.
Notes
---------
This corpus format is identical to `Matrix Market format<http://math.nist.gov/MatrixMarket/formats.html>,
except for different file headers. There is no format line, and the first three lines of the file
contain `number_docs`, `num_terms`, and `num_nnz`, one value per line.
"""
MAX_HEADER_LENGTH = 20 # reserve 20 bytes per header value
FAKE_HEADER = utils.to_utf8(' ' * MAX_HEADER_LENGTH + '\n')
def write_headers(self):
"""Write blank header lines. Will be updated later, once corpus stats are known."""
for _ in range(3):
self.fout.write(self.FAKE_HEADER)
self.last_docno = -1
self.headers_written = True
def update_headers(self, num_docs, num_terms, num_nnz):
"""Update headers with actual values."""
offset = 0
values = [utils.to_utf8(str(n)) for n in [num_docs, num_terms, num_nnz]]
for value in values:
if len(value) > len(self.FAKE_HEADER):
raise ValueError('Invalid header: value too large!')
self.fout.seek(offset)
self.fout.write(value)
offset += len(self.FAKE_HEADER)
@staticmethod
def write_corpus(fname, corpus, progress_cnt=1000, index=False):
"""Write corpus in file.
Parameters
----------
fname : str
Path to output file.
corpus: iterable of list of (int, int)
Corpus in BoW format.
progress_cnt : int, optional
Progress counter, write log message each `progress_cnt` documents.
index : bool, optional
If True - return offsets, otherwise - nothing.
Return
------
list of int
Sequence of offsets to documents (in bytes), only if index=True.
"""
writer = UciWriter(fname)
writer.write_headers()
num_terms, num_nnz = 0, 0
docno, poslast = -1, -1
offsets = []
for docno, bow in enumerate(corpus):
if docno % progress_cnt == 0:
logger.info("PROGRESS: saving document #%i", docno)
if index:
posnow = writer.fout.tell()
if posnow == poslast:
offsets[-1] = -1
offsets.append(posnow)
poslast = posnow
vector = [(x, int(y)) for (x, y) in bow if int(y) != 0] # integer count, not floating weights
max_id, veclen = writer.write_vector(docno, vector)
num_terms = max(num_terms, 1 + max_id)
num_nnz += veclen
num_docs = docno + 1
if num_docs * num_terms != 0:
logger.info(
"saved %ix%i matrix, density=%.3f%% (%i/%i)",
num_docs, num_terms, 100.0 * num_nnz / (num_docs * num_terms),
num_nnz, num_docs * num_terms
)
# now write proper headers, by seeking and overwriting the spaces written earlier
writer.update_headers(num_docs, num_terms, num_nnz)
writer.close()
if index:
return offsets
class UciCorpus(UciReader, IndexedCorpus):
"""Corpus in the UCI bag-of-words format."""
def __init__(self, fname, fname_vocab=None):
"""
Parameters
----------
fname : str
Path to corpus in UCI format.
fname_vocab : bool, optional
Path to vocab.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.corpora import UciCorpus
>>> from gensim.test.utils import datapath
>>>
>>> corpus = UciCorpus(datapath('testcorpus.uci'))
>>> for document in corpus:
... pass
"""
IndexedCorpus.__init__(self, fname)
UciReader.__init__(self, fname)
if fname_vocab is None:
fname_vocab = utils.smart_extension(fname, '.vocab')
self.fname = fname
with utils.open(fname_vocab, 'rb') as fin:
words = [word.strip() for word in fin]
self.id2word = dict(enumerate(words))
self.transposed = True
def __iter__(self):
"""Iterate over the corpus.
Yields
------
list of (int, int)
Document in BoW format.
"""
for docId, doc in super(UciCorpus, self).__iter__():
yield doc # get rid of docId, return the sparse vector only
def create_dictionary(self):
"""Generate :class:`gensim.corpora.dictionary.Dictionary` directly from the corpus and vocabulary data.
Return
------
:class:`gensim.corpora.dictionary.Dictionary`
Dictionary, based on corpus.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.corpora.ucicorpus import UciCorpus
>>> from gensim.test.utils import datapath
>>> ucc = UciCorpus(datapath('testcorpus.uci'))
>>> dictionary = ucc.create_dictionary()
"""
dictionary = Dictionary()
# replace dfs with defaultdict to avoid downstream KeyErrors
# uci vocabularies may contain terms that are not used in the document data
dictionary.dfs = defaultdict(int)
dictionary.id2token = self.id2word
dictionary.token2id = utils.revdict(self.id2word)
dictionary.num_docs = self.num_docs
dictionary.num_nnz = self.num_nnz
for docno, doc in enumerate(self):
if docno % 10000 == 0:
logger.info('PROGRESS: processing document %i of %i', docno, self.num_docs)
for word, count in doc:
dictionary.dfs[word] += 1
dictionary.num_pos += count
return dictionary
@staticmethod
def save_corpus(fname, corpus, id2word=None, progress_cnt=10000, metadata=False):
"""Save a corpus in the UCI Bag-of-Words format.
Warnings
--------
This function is automatically called by :meth`gensim.corpora.ucicorpus.UciCorpus.serialize`,
don't call it directly, call :meth`gensim.corpora.ucicorpus.UciCorpus.serialize` instead.
Parameters
----------
fname : str
Path to output file.
corpus: iterable of iterable of (int, int)
Corpus in BoW format.
id2word : {dict of (int, str), :class:`gensim.corpora.dictionary.Dictionary`}, optional
Mapping between words and their ids. If None - will be inferred from `corpus`.
progress_cnt : int, optional
Progress counter, write log message each `progress_cnt` documents.
metadata : bool, optional
THIS PARAMETER WILL BE IGNORED.
Notes
-----
There are actually two files saved: `fname` and `fname.vocab`, where `fname.vocab` is the vocabulary file.
"""
if id2word is None:
logger.info("no word id mapping provided; initializing from corpus")
id2word = utils.dict_from_corpus(corpus)
num_terms = len(id2word)
elif id2word:
num_terms = 1 + max(id2word)
else:
num_terms = 0
# write out vocabulary
fname_vocab = utils.smart_extension(fname, '.vocab')
logger.info("saving vocabulary of %i words to %s", num_terms, fname_vocab)
with utils.open(fname_vocab, 'wb') as fout:
for featureid in range(num_terms):
fout.write(utils.to_utf8("%s\n" % id2word.get(featureid, '---')))
logger.info("storing corpus in UCI Bag-of-Words format: %s", fname)
return UciWriter.write_corpus(fname, corpus, index=True, progress_cnt=progress_cnt)
|
from pyhap.const import (
CATEGORY_HUMIDIFIER,
HAP_REPR_AID,
HAP_REPR_CHARS,
HAP_REPR_IID,
HAP_REPR_VALUE,
)
from homeassistant.components.homekit.const import (
ATTR_VALUE,
CONF_LINKED_HUMIDITY_SENSOR,
PROP_MAX_VALUE,
PROP_MIN_STEP,
PROP_MIN_VALUE,
PROP_VALID_VALUES,
)
from homeassistant.components.homekit.type_humidifiers import HumidifierDehumidifier
from homeassistant.components.humidifier.const import (
ATTR_HUMIDITY,
ATTR_MAX_HUMIDITY,
ATTR_MIN_HUMIDITY,
DEFAULT_MAX_HUMIDITY,
DEFAULT_MIN_HUMIDITY,
DEVICE_CLASS_DEHUMIDIFIER,
DEVICE_CLASS_HUMIDIFIER,
DOMAIN,
SERVICE_SET_HUMIDITY,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_UNIT_OF_MEASUREMENT,
DEVICE_CLASS_HUMIDITY,
PERCENTAGE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from tests.common import async_mock_service
async def test_humidifier(hass, hk_driver, events):
"""Test if humidifier accessory and HA are updated accordingly."""
entity_id = "humidifier.test"
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass, hk_driver, "HumidifierDehumidifier", entity_id, 1, None
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 1
assert acc.category == CATEGORY_HUMIDIFIER
assert acc.char_current_humidifier_dehumidifier.value == 0
assert acc.char_target_humidifier_dehumidifier.value == 1
assert acc.char_current_humidity.value == 0
assert acc.char_target_humidity.value == 45.0
assert acc.char_active.value == 0
assert acc.char_target_humidity.properties[PROP_MAX_VALUE] == DEFAULT_MAX_HUMIDITY
assert acc.char_target_humidity.properties[PROP_MIN_VALUE] == DEFAULT_MIN_HUMIDITY
assert acc.char_target_humidity.properties[PROP_MIN_STEP] == 1.0
assert acc.char_target_humidifier_dehumidifier.properties[PROP_VALID_VALUES] == {
"Humidifier": 1
}
hass.states.async_set(
entity_id,
STATE_ON,
{ATTR_HUMIDITY: 47},
)
await hass.async_block_till_done()
assert acc.char_target_humidity.value == 47.0
assert acc.char_current_humidifier_dehumidifier.value == 2
assert acc.char_target_humidifier_dehumidifier.value == 1
assert acc.char_active.value == 1
hass.states.async_set(
entity_id,
STATE_OFF,
{ATTR_HUMIDITY: 42, ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDIFIER},
)
await hass.async_block_till_done()
assert acc.char_target_humidity.value == 42.0
assert acc.char_current_humidifier_dehumidifier.value == 0
assert acc.char_target_humidifier_dehumidifier.value == 1
assert acc.char_active.value == 0
# Set from HomeKit
call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_target_humidity_iid,
HAP_REPR_VALUE: 39.0,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert len(call_set_humidity) == 1
assert call_set_humidity[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_humidity[0].data[ATTR_HUMIDITY] == 39.0
assert acc.char_target_humidity.value == 39.0
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "RelativeHumidityHumidifierThreshold to 39.0%"
async def test_dehumidifier(hass, hk_driver, events):
"""Test if dehumidifier accessory and HA are updated accordingly."""
entity_id = "humidifier.test"
hass.states.async_set(
entity_id, STATE_OFF, {ATTR_DEVICE_CLASS: DEVICE_CLASS_DEHUMIDIFIER}
)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass, hk_driver, "HumidifierDehumidifier", entity_id, 1, None
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 1
assert acc.category == CATEGORY_HUMIDIFIER
assert acc.char_current_humidifier_dehumidifier.value == 0
assert acc.char_target_humidifier_dehumidifier.value == 2
assert acc.char_current_humidity.value == 0
assert acc.char_target_humidity.value == 45.0
assert acc.char_active.value == 0
assert acc.char_target_humidity.properties[PROP_MAX_VALUE] == DEFAULT_MAX_HUMIDITY
assert acc.char_target_humidity.properties[PROP_MIN_VALUE] == DEFAULT_MIN_HUMIDITY
assert acc.char_target_humidity.properties[PROP_MIN_STEP] == 1.0
assert acc.char_target_humidifier_dehumidifier.properties[PROP_VALID_VALUES] == {
"Dehumidifier": 2
}
hass.states.async_set(
entity_id,
STATE_ON,
{ATTR_HUMIDITY: 30},
)
await hass.async_block_till_done()
assert acc.char_target_humidity.value == 30.0
assert acc.char_current_humidifier_dehumidifier.value == 3
assert acc.char_target_humidifier_dehumidifier.value == 2
assert acc.char_active.value == 1
hass.states.async_set(
entity_id,
STATE_OFF,
{ATTR_HUMIDITY: 42},
)
await hass.async_block_till_done()
assert acc.char_target_humidity.value == 42.0
assert acc.char_current_humidifier_dehumidifier.value == 0
assert acc.char_target_humidifier_dehumidifier.value == 2
assert acc.char_active.value == 0
# Set from HomeKit
call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_target_humidity_iid,
HAP_REPR_VALUE: 39.0,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert len(call_set_humidity) == 1
assert call_set_humidity[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_humidity[0].data[ATTR_HUMIDITY] == 39.0
assert acc.char_target_humidity.value == 39.0
assert len(events) == 1
assert (
events[-1].data[ATTR_VALUE] == "RelativeHumidityDehumidifierThreshold to 39.0%"
)
async def test_hygrostat_power_state(hass, hk_driver, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "humidifier.test"
hass.states.async_set(
entity_id,
STATE_ON,
{ATTR_HUMIDITY: 43},
)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass, hk_driver, "HumidifierDehumidifier", entity_id, 1, None
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_current_humidifier_dehumidifier.value == 2
assert acc.char_target_humidifier_dehumidifier.value == 1
assert acc.char_active.value == 1
hass.states.async_set(
entity_id,
STATE_OFF,
{ATTR_HUMIDITY: 43},
)
await hass.async_block_till_done()
assert acc.char_current_humidifier_dehumidifier.value == 0
assert acc.char_target_humidifier_dehumidifier.value == 1
assert acc.char_active.value == 0
# Set from HomeKit
call_turn_on = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert len(call_turn_on) == 1
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_active.value == 1
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "Active to 1"
call_turn_off = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 0,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert len(call_turn_off) == 1
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_active.value == 0
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == "Active to 0"
async def test_hygrostat_get_humidity_range(hass, hk_driver):
"""Test if humidity range is evaluated correctly."""
entity_id = "humidifier.test"
hass.states.async_set(
entity_id, STATE_OFF, {ATTR_MIN_HUMIDITY: 40, ATTR_MAX_HUMIDITY: 45}
)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass, hk_driver, "HumidifierDehumidifier", entity_id, 1, None
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_target_humidity.properties[PROP_MAX_VALUE] == 45
assert acc.char_target_humidity.properties[PROP_MIN_VALUE] == 40
async def test_humidifier_with_linked_humidity_sensor(hass, hk_driver):
"""Test a humidifier with a linked humidity sensor can update."""
humidity_sensor_entity_id = "sensor.bedroom_humidity"
hass.states.async_set(
humidity_sensor_entity_id,
"42.0",
{
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
},
)
await hass.async_block_till_done()
entity_id = "humidifier.test"
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass,
hk_driver,
"HumidifierDehumidifier",
entity_id,
1,
{CONF_LINKED_HUMIDITY_SENSOR: humidity_sensor_entity_id},
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_current_humidity.value == 42.0
hass.states.async_set(
humidity_sensor_entity_id,
"43.0",
{
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
},
)
await hass.async_block_till_done()
assert acc.char_current_humidity.value == 43.0
hass.states.async_set(
humidity_sensor_entity_id,
STATE_UNAVAILABLE,
{
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
},
)
await hass.async_block_till_done()
assert acc.char_current_humidity.value == 43.0
hass.states.async_remove(humidity_sensor_entity_id)
await hass.async_block_till_done()
assert acc.char_current_humidity.value == 43.0
async def test_humidifier_with_a_missing_linked_humidity_sensor(hass, hk_driver):
"""Test a humidifier with a configured linked motion sensor that is missing."""
humidity_sensor_entity_id = "sensor.bedroom_humidity"
entity_id = "humidifier.test"
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass,
hk_driver,
"HumidifierDehumidifier",
entity_id,
1,
{CONF_LINKED_HUMIDITY_SENSOR: humidity_sensor_entity_id},
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_current_humidity.value == 0
async def test_humidifier_as_dehumidifier(hass, hk_driver, events, caplog):
"""Test an invalid char_target_humidifier_dehumidifier from HomeKit."""
entity_id = "humidifier.test"
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
acc = HumidifierDehumidifier(
hass, hk_driver, "HumidifierDehumidifier", entity_id, 1, None
)
hk_driver.add_accessory(acc)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_target_humidifier_dehumidifier.value == 1
# Set from HomeKit
char_target_humidifier_dehumidifier_iid = (
acc.char_target_humidifier_dehumidifier.to_HAP()[HAP_REPR_IID]
)
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_target_humidifier_dehumidifier_iid,
HAP_REPR_VALUE: 0,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert "TargetHumidifierDehumidifierState is not supported" in caplog.text
assert len(events) == 0
|
import aiohttp
import pytest
from sharkiqpy import AylaApi, SharkIqAuthError
from homeassistant import config_entries, setup
from homeassistant.components.sharkiq.const import DOMAIN
from homeassistant.core import HomeAssistant
from .const import CONFIG, TEST_PASSWORD, TEST_USERNAME, UNIQUE_ID
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch("sharkiqpy.AylaApi.async_sign_in", return_value=True), patch(
"homeassistant.components.sharkiq.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.sharkiq.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
CONFIG,
)
assert result2["type"] == "create_entry"
assert result2["title"] == f"{TEST_USERNAME:s}"
assert result2["data"] == {
"username": TEST_USERNAME,
"password": TEST_PASSWORD,
}
await hass.async_block_till_done()
mock_setup.assert_called_once()
mock_setup_entry.assert_called_once()
@pytest.mark.parametrize(
"exc,base_error",
[
(SharkIqAuthError, "invalid_auth"),
(aiohttp.ClientError, "cannot_connect"),
(TypeError, "unknown"),
],
)
async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str):
"""Test form errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch.object(AylaApi, "async_sign_in", side_effect=exc):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
CONFIG,
)
assert result2["type"] == "form"
assert result2["errors"].get("base") == base_error
async def test_reauth_success(hass: HomeAssistant):
"""Test reauth flow."""
with patch("sharkiqpy.AylaApi.async_sign_in", return_value=True):
mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG)
mock_config.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "reauth", "unique_id": UNIQUE_ID}, data=CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "reauth_successful"
@pytest.mark.parametrize(
"side_effect,result_type,msg_field,msg",
[
(SharkIqAuthError, "form", "errors", "invalid_auth"),
(aiohttp.ClientError, "abort", "reason", "cannot_connect"),
(TypeError, "abort", "reason", "unknown"),
],
)
async def test_reauth(
hass: HomeAssistant,
side_effect: Exception,
result_type: str,
msg_field: str,
msg: str,
):
"""Test reauth failures."""
with patch("sharkiqpy.AylaApi.async_sign_in", side_effect=side_effect):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth", "unique_id": UNIQUE_ID},
data=CONFIG,
)
msg_value = result[msg_field]
if msg_field == "errors":
msg_value = msg_value.get("base")
assert result["type"] == result_type
assert msg_value == msg
|
from django.utils.translation import gettext_lazy as _
from weblate.checks.base import TargetCheck
class PluralsCheck(TargetCheck):
"""Check for incomplete plural forms."""
check_id = "plurals"
name = _("Missing plurals")
description = _("Some plural forms are not translated")
def check_target_unit(self, sources, targets, unit):
# Is this plural?
if len(sources) == 1:
return False
# Is at least something translated?
if targets == len(targets) * [""]:
return False
# Check for empty translation
return "" in targets
def check_single(self, source, target, unit):
"""We don't check target strings here."""
return False
class SamePluralsCheck(TargetCheck):
"""Check for same plural forms."""
check_id = "same-plurals"
name = _("Same plurals")
description = _("Some plural forms are translated in the same way")
def check_target_unit(self, sources, targets, unit):
# Is this plural?
if len(sources) == 1 or len(targets) == 1:
return False
if targets[0] == "":
return False
return len(set(targets)) == 1
def check_single(self, source, target, unit):
"""We don't check target strings here."""
return False
class ConsistencyCheck(TargetCheck):
"""Check for inconsistent translations."""
check_id = "inconsistent"
name = _("Inconsistent")
description = _(
"This string has more than one translation in this project "
"or is not translated in some components."
)
ignore_untranslated = False
propagates = True
def check_target_unit(self, sources, targets, unit):
if not unit.translation.component.allow_translation_propagation:
return False
for other in unit.same_source_units:
if unit.target == other.target:
continue
if unit.translated or other.translated:
return True
return False
def check_single(self, source, target, unit):
"""We don't check target strings here."""
return False
class TranslatedCheck(TargetCheck):
"""Check for inconsistent translations."""
check_id = "translated"
name = _("Has been translated")
description = _("This string has been translated in the past")
ignore_untranslated = False
def get_description(self, check_obj):
unit = check_obj.unit
target = self.check_target_unit(unit.source, unit.target, unit)
if not target:
return super().get_description(check_obj)
return _('Last translation was "%s".') % target
def check_target_unit(self, sources, targets, unit):
if unit.translated:
return False
from weblate.trans.models import Change
states = {Change.ACTION_SOURCE_CHANGE}
states.update(Change.ACTIONS_CONTENT)
changes = unit.change_set.filter(action__in=states).order()
for action, target in changes.values_list("action", "target"):
if action in Change.ACTIONS_CONTENT and target:
return target
if action == Change.ACTION_SOURCE_CHANGE:
break
return False
def check_single(self, source, target, unit):
"""We don't check target strings here."""
return False
def get_fixup(self, unit):
target = self.check_target_unit(unit.source, unit.target, unit)
if not target:
return None
return [(".*", target, "u")]
|
from __future__ import print_function
import posixpath
import re
from absl import flags
from perfkitbenchmarker import linux_packages
FLAGS = flags.FLAGS
flags.DEFINE_string('openmpi_version', '3.1.2',
'OpenMPI version to install, such as 3.1.2 and 4.0.2.'
'Set to empty to ignore the intallation of OpenMPI.')
flags.DEFINE_bool('openmpi_enable_shared', False,
'Whether openmpi should build shared libraries '
'in addition to static ones.')
flags.DEFINE_bool('openmpi_with_cuda_support', False,
'Compile with CUDA support')
flags.DEFINE_string('openmpi_configs', None,
'command line options to be provided to ./configure for'
'OpenMPI compilation')
MPI_URL_BASE = 'https://download.open-mpi.org/release/open-mpi'
REMOVE_MPI_CMD = 'autoremove -y libopenmpi-dev openmpi-bin openmpi-common'
class MpirunParseOutputError(Exception):
pass
def GetMpiVersion(vm):
"""Get the MPI version on the vm, based on mpirun.
Args:
vm: the virtual machine to query
Returns:
A string containing the active MPI version,
None if mpirun could not be found
"""
stdout, _ = vm.RemoteCommand('mpirun --version',
ignore_failure=True,
suppress_warning=True)
if bool(stdout.rstrip()):
regex = r'MPI\) (\S+)'
match = re.search(regex, stdout)
try:
return str(match.group(1))
except:
raise MpirunParseOutputError('Unable to parse mpirun version output')
else:
return None
def _Install(vm):
"""Installs the OpenMPI package on the VM."""
version_to_install = FLAGS.openmpi_version
if not version_to_install:
return
current_version = GetMpiVersion(vm)
if current_version == version_to_install:
return
first_dot_pos = version_to_install.find('.')
second_dot_pos = version_to_install.find('.', first_dot_pos + 1)
major_version = version_to_install[0:second_dot_pos]
mpi_tar = ('openmpi-{version}.tar.gz'.format(version=version_to_install))
mpi_url = ('{mpi_url_base}/v{major_version}/{mpi_tar}'.format(
mpi_url_base=MPI_URL_BASE, major_version=major_version, mpi_tar=mpi_tar))
install_dir = posixpath.join(
linux_packages.INSTALL_DIR,
'openmpi-{version}'.format(version=version_to_install))
vm.Install('build_tools')
vm.Install('wget')
vm.RemoteCommand('wget %s -P %s' % (mpi_url, install_dir))
vm.RemoteCommand('cd %s && tar xvfz %s' % (install_dir, mpi_tar))
make_jobs = vm.NumCpusForBenchmark()
config_options = []
config_options.append('--enable-static')
config_options.append('--prefix=/usr')
config_options.append('--enable-shared' if FLAGS.openmpi_enable_shared
else '--disable-shared')
if FLAGS.openmpi_with_cuda_support:
config_options.append('--with-cuda=/usr/local/cuda-{version}/'
.format(version=FLAGS.cuda_toolkit_version))
config_options.append('--with-cuda-libdir=/usr/local/cuda-{version}/lib64/'
.format(version=FLAGS.cuda_toolkit_version))
if FLAGS.openmpi_configs:
config_options.append(FLAGS.openmpi_configs)
config_cmd = './configure {}'.format(' '.join(config_options))
vm.RobustRemoteCommand(
'cd %s/openmpi-%s && %s && make -j %s && sudo make install' %
(install_dir, version_to_install, config_cmd, make_jobs))
def GetMpiDir():
"""Returns the installation dirtory of OpenMPI."""
mpi_dir = posixpath.join(
linux_packages.INSTALL_DIR,
'openmpi-{version}'.format(version=FLAGS.openmpi_version))
return mpi_dir
def YumInstall(vm):
"""Installs the OpenMPI package on the VM."""
if not FLAGS.openmpi_version:
return
vm.RobustRemoteCommand(
'sudo yum {}'.format(REMOVE_MPI_CMD), ignore_failure=True)
_Install(vm)
def AptInstall(vm):
"""Installs the OpenMPI package on the VM."""
if not FLAGS.openmpi_version:
return
vm.RobustRemoteCommand(
'sudo apt-get {}'.format(REMOVE_MPI_CMD), ignore_failure=True)
_Install(vm)
def _Uninstall(vm):
"""Uninstalls the OpenMPI package on the VM."""
vm.RemoteCommand('cd {0} && sudo make uninstall'.format(GetMpiDir()))
def YumUninstall(vm):
"""Uninstalls the OpenMPI package on the VM."""
_Uninstall(vm)
def AptUninstall(vm):
"""Uninstalls the OpenMPI package on the VM."""
_Uninstall(vm)
|
import six
import logging
import numpy as np
from pandas import DataFrame, MultiIndex, Series, DatetimeIndex, Index
import pandas as pd
# Used in global scope, do not remove.
from .._config import FAST_CHECK_DF_SERIALIZABLE
from .._util import NP_OBJECT_DTYPE
from ..exceptions import ArcticException
try: # 0.21+ Compatibility
from pandas._libs.tslib import Timestamp
from pandas._libs.tslibs.timezones import get_timezone
except ImportError:
try: # 0.20.x Compatibility
from pandas._libs.tslib import Timestamp, get_timezone
except ImportError: # <= 0.19 Compatibility
from pandas.tslib import Timestamp, get_timezone
log = logging.getLogger(__name__)
PD_VER = pd.__version__
DTN64_DTYPE = 'datetime64[ns]'
def set_fast_check_df_serializable(config):
global FAST_CHECK_DF_SERIALIZABLE
FAST_CHECK_DF_SERIALIZABLE = bool(config)
def _to_primitive(arr, string_max_len=None, forced_dtype=None):
if arr.dtype.hasobject:
if len(arr) > 0 and isinstance(arr[0], Timestamp):
return np.array([t.value for t in arr], dtype=DTN64_DTYPE)
if forced_dtype is not None:
casted_arr = arr.astype(dtype=forced_dtype, copy=False)
elif string_max_len is not None:
casted_arr = np.array(arr.astype('U{:d}'.format(string_max_len)))
else:
casted_arr = np.array(list(arr))
# Pick any unwanted data conversions (e.g. np.NaN to 'nan')
if np.array_equal(arr, casted_arr):
return casted_arr
return arr
def _multi_index_to_records(index, empty_index):
# array of tuples to numpy cols. copy copy copy
if not empty_index:
ix_vals = list(map(np.array, [index.get_level_values(i) for i in range(index.nlevels)]))
else:
# empty multi index has no size, create empty arrays for recarry.
ix_vals = [np.array([]) for n in index.names]
index_names = list(index.names)
count = 0
for i, n in enumerate(index_names):
if n is None:
index_names[i] = 'level_%d' % count
count += 1
log.info("Level in MultiIndex has no name, defaulting to %s" % index_names[i])
index_tz = [get_timezone(i.tz) if isinstance(i, DatetimeIndex) else None for i in index.levels]
return ix_vals, index_names, index_tz
class PandasSerializer(object):
def _index_to_records(self, df):
metadata = {}
index = df.index
index_tz = None
if isinstance(index, MultiIndex):
ix_vals, index_names, index_tz = _multi_index_to_records(index, len(df) == 0)
else:
ix_vals = [index.values]
index_names = list(index.names)
if index_names[0] is None:
index_names = ['index']
log.info("Index has no name, defaulting to 'index'")
if isinstance(index, DatetimeIndex) and index.tz is not None:
index_tz = get_timezone(index.tz)
if index_tz is not None:
metadata['index_tz'] = index_tz
metadata['index'] = index_names
return index_names, ix_vals, metadata
def _index_from_records(self, recarr):
index = recarr.dtype.metadata['index']
if len(index) == 1:
rtn = Index(np.copy(recarr[str(index[0])]), name=index[0])
if isinstance(rtn, DatetimeIndex) and 'index_tz' in recarr.dtype.metadata:
rtn = rtn.tz_localize('UTC').tz_convert(recarr.dtype.metadata['index_tz'])
else:
level_arrays = []
index_tz = recarr.dtype.metadata.get('index_tz', [])
for level_no, index_name in enumerate(index):
# build each index level separately to ensure we end up with the right index dtype
level = Index(np.copy(recarr[str(index_name)]))
if level_no < len(index_tz):
tz = index_tz[level_no]
if tz is not None:
if not isinstance(level, DatetimeIndex) and len(level) == 0:
# index type information got lost during save as the index was empty, cast back
level = DatetimeIndex([], tz=tz)
else:
level = level.tz_localize('UTC').tz_convert(tz)
level_arrays.append(level)
rtn = MultiIndex.from_arrays(level_arrays, names=index)
return rtn
def _to_records(self, df, string_max_len=None, forced_dtype=None):
"""
Similar to DataFrame.to_records()
Differences:
Attempt type conversion for pandas columns stored as objects (e.g. strings),
as we can only store primitives in the ndarray.
Use dtype metadata to store column and index names.
string_max_len: integer - enforces a string size on the dtype, if any
strings exist in the record
"""
index_names, ix_vals, metadata = self._index_to_records(df)
columns, column_vals, multi_column = self._column_data(df)
if "" in columns:
raise ArcticException("Cannot use empty string as a column name.")
if multi_column is not None:
metadata['multi_column'] = multi_column
metadata['columns'] = columns
names = index_names + columns
arrays = []
for arr, name in zip(ix_vals + column_vals, index_names + columns):
arrays.append(_to_primitive(arr, string_max_len,
forced_dtype=None if forced_dtype is None else forced_dtype[name]))
if forced_dtype is None:
dtype = np.dtype([(str(x), v.dtype) if len(v.shape) == 1 else (str(x), v.dtype, v.shape[1])
for x, v in zip(names, arrays)],
metadata=metadata)
else:
dtype = forced_dtype
# The argument names is ignored when dtype is passed
rtn = np.rec.fromarrays(arrays, dtype=dtype, names=names)
# For some reason the dtype metadata is lost in the line above
# and setting rtn.dtype to dtype does not preserve the metadata
# see https://github.com/numpy/numpy/issues/6771
return (rtn, dtype)
def fast_check_serializable(self, df):
"""
Convert efficiently the frame's object-columns/object-index/multi-index/multi-column to
records, by creating a recarray only for the object fields instead for the whole dataframe.
If we have no object dtypes, we can safely convert only the first row to recarray to test if serializable.
Previously we'd serialize twice the full dataframe when it included object fields or multi-index/columns.
Parameters
----------
df: `pandas.DataFrame` or `pandas.Series`
Returns
-------
`tuple[numpy.core.records.recarray, dict[str, numpy.dtype]`
If any object dtypes are detected in columns or index will return a dict with field-name -> dtype
mappings, and empty dict otherwise.
"""
i_dtype, f_dtypes = df.index.dtype, df.dtypes
index_has_object = df.index.dtype is NP_OBJECT_DTYPE
fields_with_object = [f for f in df.columns if f_dtypes[f] is NP_OBJECT_DTYPE]
if df.empty or (not index_has_object and not fields_with_object):
arr, _ = self._to_records(df.iloc[:10]) # only first few rows for performance
return arr, {}
# If only the Index has Objects, choose a small slice (two columns if possible,
# to avoid switching from a DataFrame to a Series)
df_objects_only = df[fields_with_object if fields_with_object else df.columns[:2]]
# Let any exceptions bubble up from here
arr, dtype = self._to_records(df_objects_only)
return arr, {f: dtype[f] for f in dtype.names}
def can_convert_to_records_without_objects(self, df, symbol):
# We can't easily distinguish string columns from objects
try:
# TODO: we can add here instead a check based on df size and enable fast-check if sz > threshold value
if FAST_CHECK_DF_SERIALIZABLE:
arr, _ = self.fast_check_serializable(df)
else:
arr, _ = self._to_records(df)
except Exception as e:
# This exception will also occur when we try to write the object so we fall-back to saving using Pickle
log.warning('Pandas dataframe %s caused exception "%s" when attempting to convert to records. '
'Saving as Blob.' % (symbol, repr(e)))
return False
else:
if arr.dtype.hasobject:
log.warning('Pandas dataframe %s contains Objects, saving as Blob' % symbol)
# Fall-back to saving using Pickle
return False
elif any([len(x[0].shape) for x in arr.dtype.fields.values()]):
log.warning('Pandas dataframe %s contains >1 dimensional arrays, saving as Blob' % symbol)
return False
else:
return True
def serialize(self, item, string_max_len=None, forced_dtype=None):
raise NotImplementedError
def deserialize(self, item, force_bytes_to_unicode=False):
raise NotImplementedError
class SeriesSerializer(PandasSerializer):
TYPE = 'series'
def _column_data(self, s):
if s.name is None:
log.info("Series has no name, defaulting to 'values'")
columns = [s.name if s.name else 'values']
column_vals = [s.values]
return columns, column_vals, None
def deserialize(self, item, force_bytes_to_unicode=False):
index = self._index_from_records(item)
name = item.dtype.names[-1]
data = item[name]
if force_bytes_to_unicode:
if six.PY2 and isinstance(name, (bytes, str)):
name = name.decode('utf-8')
if len(data) and isinstance(data[0], bytes):
data = data.astype('unicode')
if isinstance(index, MultiIndex):
unicode_indexes = []
# MultiIndex requires a conversion at each level.
for level in range(len(index.levels)):
_index = index.get_level_values(level)
if isinstance(_index[0], bytes):
_index = _index.astype('unicode')
unicode_indexes.append(_index)
index = unicode_indexes
else:
if len(index) and type(index[0]) == bytes:
index = index.astype('unicode')
if PD_VER < '0.23.0':
return Series.from_array(data, index=index, name=name)
else:
return Series(data, index=index, name=name)
def serialize(self, item, string_max_len=None, forced_dtype=None):
return self._to_records(item, string_max_len, forced_dtype)
class DataFrameSerializer(PandasSerializer):
TYPE = 'df'
def _column_data(self, df):
columns = list(map(str, df.columns))
if columns != list(df.columns):
log.info("Dataframe column names converted to strings")
column_vals = [df[c].values for c in df.columns]
if isinstance(df.columns, MultiIndex):
ix_vals, ix_names, _ = _multi_index_to_records(df.columns, False)
vals = [list(val) for val in ix_vals]
str_vals = [list(map(str, val)) for val in ix_vals]
if vals != str_vals:
log.info("Dataframe column names converted to strings")
return columns, column_vals, {"names": list(ix_names), "values": str_vals}
else:
return columns, column_vals, None
def deserialize(self, item, force_bytes_to_unicode=False):
index = self._index_from_records(item)
column_fields = [x for x in item.dtype.names if x not in item.dtype.metadata['index']]
multi_column = item.dtype.metadata.get('multi_column')
if len(item) == 0:
rdata = item[column_fields] if len(column_fields) > 0 else None
if multi_column is not None:
columns = MultiIndex.from_arrays(multi_column["values"], names=multi_column["names"])
return DataFrame(rdata, index=index, columns=columns)
else:
return DataFrame(rdata, index=index)
columns = item.dtype.metadata['columns']
df = DataFrame(data=item[column_fields], index=index, columns=columns)
if multi_column is not None:
df.columns = MultiIndex.from_arrays(multi_column["values"], names=multi_column["names"])
if force_bytes_to_unicode:
# This is needed due to 'str' type in py2 when read back in py3 is 'bytes' which breaks the workflow
# of people migrating to py3. # https://github.com/manahl/arctic/issues/598
# This should not be used for a normal flow, and you should instead of writing unicode strings
# if you want to work with str in py3.,
for c in df.select_dtypes(object):
# The conversion is not using astype similar to the index as pandas has a bug where it tries to convert
# the data columns to a unicode string, and the object in this case would be bytes, eg. b'abc'
# which is converted to u"b'abc'" i.e it includes the b character as well! This generally happens
# when there is a str conversion without specifying the encoding. eg. str(b'abc') -> "b'abc'" and the
# fix for this is to tell it the encoding to use: i.e str(b'abc', 'utf-8') -> "abc"
if type(df[c].iloc[0]) == bytes:
df[c] = df[c].str.decode('utf-8')
if isinstance(df.index, MultiIndex):
unicode_indexes = []
# MultiIndex requires a conversion at each level.
for level in range(len(df.index.levels)):
_index = df.index.get_level_values(level)
if isinstance(_index[0], bytes):
_index = _index.astype('unicode')
unicode_indexes.append(_index)
df.index = unicode_indexes
else:
if type(df.index[0]) == bytes:
df.index = df.index.astype('unicode')
if not df.columns.empty and type(df.columns[0]) == bytes:
df.columns = df.columns.astype('unicode')
return df
def serialize(self, item, string_max_len=None, forced_dtype=None):
return self._to_records(item, string_max_len, forced_dtype)
|
import argparse
class RegisterLeafClasses(type):
def __init__(cls, name, bases, nmspc):
super(RegisterLeafClasses, cls).__init__(name, bases, nmspc)
if not hasattr(cls, 'registry'):
cls.registry = set()
cls.registry.add(cls)
cls.registry -= set(bases) # Remove base classes
# metamethods, called on class objects:
def __iter__(cls):
return iter(cls.registry)
def __str__(cls):
if cls in cls.registry:
return cls.__name__
return cls.__name__ + " leaf classes: " + ", ".join([sc.__name__ for sc in cls])
class Plugin(object, metaclass=RegisterLeafClasses):
"""Plugin base class. May be subject to changes."""
MIN_VERSION = None
MAX_VERSION = None
@classmethod
def check_version(cls, version: str) -> bool:
from distutils.version import StrictVersion
if cls.MIN_VERSION:
if StrictVersion(cls.MIN_VERSION) > StrictVersion(version):
return False
if cls.MAX_VERSION:
return StrictVersion(cls.MAX_VERSION) >= StrictVersion(version)
return True
@classmethod
def __str__(cls):
return cls.__name__
@classmethod
def attach(cls, subparsers: argparse.ArgumentParser, log: list, **kwargs):
pass
@staticmethod
def action(args: argparse.Namespace):
pass
|
import pytest
from homeassistant.components.device_tracker.const import DOMAIN, SOURCE_TYPE_BLUETOOTH
from homeassistant.const import CONF_PLATFORM, STATE_HOME, STATE_NOT_HOME
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message
@pytest.fixture(autouse=True)
def setup_comp(hass, mqtt_mock):
"""Set up mqtt component."""
pass
async def test_ensure_device_tracker_platform_validation(hass):
"""Test if platform validation was done."""
async def mock_setup_scanner(hass, config, see, discovery_info=None):
"""Check that Qos was added by validation."""
assert "qos" in config
with patch(
"homeassistant.components.mqtt.device_tracker.async_setup_scanner",
autospec=True,
side_effect=mock_setup_scanner,
) as mock_sp:
dev_id = "paulus"
topic = "/location/paulus"
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: topic}}}
)
assert mock_sp.call_count == 1
async def test_new_message(hass, mock_device_tracker_conf):
"""Test new message."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
topic = "/location/paulus"
location = "work"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: topic}}}
)
async_fire_mqtt_message(hass, topic, location)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == location
async def test_single_level_wildcard_topic(hass, mock_device_tracker_conf):
"""Test single level wildcard topic."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
subscription = "/location/+/paulus"
topic = "/location/room/paulus"
location = "work"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}},
)
async_fire_mqtt_message(hass, topic, location)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == location
async def test_multi_level_wildcard_topic(hass, mock_device_tracker_conf):
"""Test multi level wildcard topic."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
subscription = "/location/#"
topic = "/location/room/paulus"
location = "work"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}},
)
async_fire_mqtt_message(hass, topic, location)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == location
async def test_single_level_wildcard_topic_not_matching(hass, mock_device_tracker_conf):
"""Test not matching single level wildcard topic."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
subscription = "/location/+/paulus"
topic = "/location/paulus"
location = "work"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}},
)
async_fire_mqtt_message(hass, topic, location)
await hass.async_block_till_done()
assert hass.states.get(entity_id) is None
async def test_multi_level_wildcard_topic_not_matching(hass, mock_device_tracker_conf):
"""Test not matching multi level wildcard topic."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
subscription = "/location/#"
topic = "/somewhere/room/paulus"
location = "work"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}},
)
async_fire_mqtt_message(hass, topic, location)
await hass.async_block_till_done()
assert hass.states.get(entity_id) is None
async def test_matching_custom_payload_for_home_and_not_home(
hass, mock_device_tracker_conf
):
"""Test custom payload_home sets state to home and custom payload_not_home sets state to not_home."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
topic = "/location/paulus"
payload_home = "present"
payload_not_home = "not present"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: "mqtt",
"devices": {dev_id: topic},
"payload_home": payload_home,
"payload_not_home": payload_not_home,
}
},
)
async_fire_mqtt_message(hass, topic, payload_home)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_HOME
async_fire_mqtt_message(hass, topic, payload_not_home)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_NOT_HOME
async def test_not_matching_custom_payload_for_home_and_not_home(
hass, mock_device_tracker_conf
):
"""Test not matching payload does not set state to home or not_home."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
topic = "/location/paulus"
payload_home = "present"
payload_not_home = "not present"
payload_not_matching = "test"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: "mqtt",
"devices": {dev_id: topic},
"payload_home": payload_home,
"payload_not_home": payload_not_home,
}
},
)
async_fire_mqtt_message(hass, topic, payload_not_matching)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state != STATE_HOME
assert hass.states.get(entity_id).state != STATE_NOT_HOME
async def test_matching_source_type(hass, mock_device_tracker_conf):
"""Test setting source type."""
dev_id = "paulus"
entity_id = f"{DOMAIN}.{dev_id}"
topic = "/location/paulus"
source_type = SOURCE_TYPE_BLUETOOTH
location = "work"
hass.config.components = {"mqtt", "zone"}
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: "mqtt",
"devices": {dev_id: topic},
"source_type": source_type,
}
},
)
async_fire_mqtt_message(hass, topic, location)
await hass.async_block_till_done()
assert hass.states.get(entity_id).attributes["source_type"] == SOURCE_TYPE_BLUETOOTH
|
import time
import jsonschema
try:
import simplejson as json
except ImportError:
import json
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
from flask import Flask
from flask import jsonify
from flask import request
from flasgger import Swagger
def drop_id_validate(data, schema):
"""
Custom validation function which drops parameter '_id' if present
in data
"""
jsonschema.validate(data, schema)
if data.get('_id') is not None:
del data['_id']
def timestamping_validate(data, schema):
"""
Custom validation function which inserts a timestamp for when the
validation occurred
"""
jsonschema.validate(data, schema)
data['timestamp'] = str(time.time())
def special_validate(data, schema):
"""
Custom validation function which inserts an special flag depending
on the cat's name
"""
jsonschema.validate(data, schema)
data['special'] = str(data['name'] == 'Garfield').lower()
def regular_validate(data, schema):
"""
Regular validation function
"""
jsonschema.validate(data, schema)
app = Flask(__name__)
swag = Swagger(app, validation_function=drop_id_validate)
@app.route('/cat', methods=['POST'])
@swag.validate('Cat')
def create_cat():
"""
Cat creation endpoint which drops '_id' parameters when present in
request data
---
tags:
- cat
summary: Creates a new cat
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Cat object that needs to be persisted to the database
required: true
schema:
id: Cat
required:
- name
- address
properties:
name:
description: Cat's name
type: string
example: Sylvester
address:
description: Cat's house address
type: string
example: 4000 Warner Blvd., Burbank, CA 91522
responses:
200:
description: Successful operation
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
@app.route('/timestamped/cat', methods=['POST'])
@swag.validate('Cat', validation_function=timestamping_validate)
def create_timestamped_cat():
"""
Cat creation endpoint which timestamps validated data
---
tags:
- cat
summary: Creates a new cat
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Cat object that needs to be persisted to the database
required: true
schema:
$ref: '#/definitions/Cat'
responses:
200:
description: Successful operation
schema:
$ref: '#/definitions/Cat'
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
@app.route('/special/cat', methods=['POST'])
@swag.validate('Cat', validation_function=special_validate)
def create_special_cat():
"""
Cat creation endpoint which timestamps validated data
---
tags:
- cat
summary: Creates a new cat
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Cat object that needs to be persisted to the database
required: true
schema:
$ref: '#/definitions/Cat'
responses:
200:
description: Successful operation
schema:
$ref: '#/definitions/Cat'
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
@app.route('/regular/cat', methods=['POST'])
@swag.validate('Cat', validation_function=regular_validate)
def create_regular_cat():
"""
Cat creation endpoint
---
tags:
- cat
summary: Creates a new cat
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Cat object that needs to be persisted to the database
required: true
schema:
$ref: '#/definitions/Cat'
responses:
200:
description: Successful operation
schema:
$ref: '#/definitions/Cat'
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
cat = {
'_id': "594dba7b2879334e411f3dcc",
'name': "Tom",
'address': "MGM, 245 N. Beverly Drive, Beverly Hills, CA 90210"
}
with client.post(
'/cat', data=json.dumps(cat), content_type='application/json') as response:
assert response.status_code == HTTPStatus.OK
received = json.loads(response.data.decode('utf-8'))
assert received.get('_id') is None
assert received.get('timestamp') is None
assert received.get('special') is None
try:
assert received.viewitems() < cat.viewitems()
except AttributeError:
assert received.items() < cat.items()
with client.post(
'/timestamped/cat', data=json.dumps(cat),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.OK
received = json.loads(response.data.decode('utf-8'))
assert received.get('_id') == cat.get('_id')
assert received.get('timestamp') is not None
assert received.get('special') is None
try:
assert received.viewitems() > cat.viewitems()
except AttributeError:
assert received.items() > cat.items()
with client.post(
'/special/cat', data=json.dumps(cat),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.OK
received = json.loads(response.data.decode('utf-8'))
assert received.get('_id') == cat.get('_id')
assert received.get('timestamp') is None
assert received.get('special') is not None
try:
assert received.viewitems() > cat.viewitems()
except AttributeError:
assert received.items() > cat.items()
with client.post(
'/regular/cat', data=json.dumps(cat),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.OK
received = json.loads(response.data.decode('utf-8'))
assert received.get('_id') == cat.get('_id')
assert received.get('timestamp') is None
assert received.get('special') is None
try:
assert received.viewitems() == cat.viewitems()
except AttributeError:
assert received.items() == cat.items()
if __name__ == "__main__":
app.run(debug=True)
|
import logging
import re
import requests
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "MPC-HC"
DEFAULT_PORT = 13579
SUPPORT_MPCHC = (
SUPPORT_VOLUME_MUTE
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_VOLUME_STEP
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the MPC-HC platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
url = f"{host}:{port}"
add_entities([MpcHcDevice(name, url)], True)
class MpcHcDevice(MediaPlayerEntity):
"""Representation of a MPC-HC server."""
def __init__(self, name, url):
"""Initialize the MPC-HC device."""
self._name = name
self._url = url
self._player_variables = {}
self._available = False
def update(self):
"""Get the latest details."""
try:
response = requests.get(f"{self._url}/variables.html", data=None, timeout=3)
mpchc_variables = re.findall(r'<p id="(.+?)">(.+?)</p>', response.text)
for var in mpchc_variables:
self._player_variables[var[0]] = var[1].lower()
self._available = True
except requests.exceptions.RequestException:
if self.available:
_LOGGER.error("Could not connect to MPC-HC at: %s", self._url)
self._player_variables = {}
self._available = False
def _send_command(self, command_id):
"""Send a command to MPC-HC via its window message ID."""
try:
params = {"wm_command": command_id}
requests.get(f"{self._url}/command.html", params=params, timeout=3)
except requests.exceptions.RequestException:
_LOGGER.error(
"Could not send command %d to MPC-HC at: %s", command_id, self._url
)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
state = self._player_variables.get("statestring", None)
if state is None:
return STATE_OFF
if state == "playing":
return STATE_PLAYING
if state == "paused":
return STATE_PAUSED
return STATE_IDLE
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def media_title(self):
"""Return the title of current playing media."""
return self._player_variables.get("file", None)
@property
def volume_level(self):
"""Return the volume level of the media player (0..1)."""
return int(self._player_variables.get("volumelevel", 0)) / 100.0
@property
def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
return self._player_variables.get("muted", "0") == "1"
@property
def media_duration(self):
"""Return the duration of the current playing media in seconds."""
duration = self._player_variables.get("durationstring", "00:00:00").split(":")
return int(duration[0]) * 3600 + int(duration[1]) * 60 + int(duration[2])
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_MPCHC
def volume_up(self):
"""Volume up the media player."""
self._send_command(907)
def volume_down(self):
"""Volume down media player."""
self._send_command(908)
def mute_volume(self, mute):
"""Mute the volume."""
self._send_command(909)
def media_play(self):
"""Send play command."""
self._send_command(887)
def media_pause(self):
"""Send pause command."""
self._send_command(888)
def media_stop(self):
"""Send stop command."""
self._send_command(890)
def media_next_track(self):
"""Send next track command."""
self._send_command(920)
def media_previous_track(self):
"""Send previous track command."""
self._send_command(919)
|
import pickle
from src.config import CACHE_PATH, METADATA_PATH
from src.s3_client_deeposm import post_findings_to_s3
from src.single_layer_network import load_model, MODEL_METADATA_PATH
def main():
"""Post test results to an S3 bucket."""
with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
raster_data_paths = pickle.load(infile)
with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
training_info = pickle.load(infile)
with open(CACHE_PATH + MODEL_METADATA_PATH, 'r') as infile:
model_info = pickle.load(infile)
model = load_model(model_info['neural_net_type'], model_info['tile_size'],
len(model_info['bands']))
post_findings_to_s3(raster_data_paths, model, training_info, model_info['bands'], False)
if __name__ == "__main__":
main()
|
from collections import Counter
import numpy as np
from .mixin import TransformerMixin, EstimatorMixin
from .base import _set_cv
from ..io.pick import _picks_to_idx
from ..parallel import parallel_func
from ..utils import logger, verbose
from .. import pick_types, pick_info
class EMS(TransformerMixin, EstimatorMixin):
"""Transformer to compute event-matched spatial filters.
This version of EMS :footcite:`SchurgerEtAl2013` operates on the entire
time course. No time
window needs to be specified. The result is a spatial filter at each
time point and a corresponding time course. Intuitively, the result
gives the similarity between the filter at each time point and the
data vector (sensors) at that time point.
.. note:: EMS only works for binary classification.
Attributes
----------
filters_ : ndarray, shape (n_channels, n_times)
The set of spatial filters.
classes_ : ndarray, shape (n_classes,)
The target classes.
References
----------
.. footbibliography::
"""
def __repr__(self): # noqa: D105
if hasattr(self, 'filters_'):
return '<EMS: fitted with %i filters on %i classes.>' % (
len(self.filters_), len(self.classes_))
else:
return '<EMS: not fitted.>'
def fit(self, X, y):
"""Fit the spatial filters.
.. note : EMS is fitted on data normalized by channel type before the
fitting of the spatial filters.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The training data.
y : array of int, shape (n_epochs)
The target classes.
Returns
-------
self : instance of EMS
Returns self.
"""
classes = np.unique(y)
if len(classes) != 2:
raise ValueError('EMS only works for binary classification.')
self.classes_ = classes
filters = X[y == classes[0]].mean(0) - X[y == classes[1]].mean(0)
filters /= np.linalg.norm(filters, axis=0)[None, :]
self.filters_ = filters
return self
def transform(self, X):
"""Transform the data by the spatial filters.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The input data.
Returns
-------
X : array, shape (n_epochs, n_times)
The input data transformed by the spatial filters.
"""
Xt = np.sum(X * self.filters_, axis=1)
return Xt
@verbose
def compute_ems(epochs, conditions=None, picks=None, n_jobs=1, cv=None,
verbose=None):
"""Compute event-matched spatial filter on epochs.
This version of EMS :footcite:`SchurgerEtAl2013` operates on the entire
time course. No time
window needs to be specified. The result is a spatial filter at each
time point and a corresponding time course. Intuitively, the result
gives the similarity between the filter at each time point and the
data vector (sensors) at that time point.
.. note : EMS only works for binary classification.
.. note : The present function applies a leave-one-out cross-validation,
following Schurger et al's paper. However, we recommend using
a stratified k-fold cross-validation. Indeed, leave-one-out tends
to overfit and cannot be used to estimate the variance of the
prediction within a given fold.
.. note : Because of the leave-one-out, this function needs an equal
number of epochs in each of the two conditions.
Parameters
----------
epochs : instance of mne.Epochs
The epochs.
conditions : list of str | None, default None
If a list of strings, strings must match the epochs.event_id's key as
well as the number of conditions supported by the objective_function.
If None keys in epochs.event_id are used.
%(picks_good_data)s
%(n_jobs)s
cv : cross-validation object | str | None, default LeaveOneOut
The cross-validation scheme.
%(verbose)s
Returns
-------
surrogate_trials : ndarray, shape (n_trials // 2, n_times)
The trial surrogates.
mean_spatial_filter : ndarray, shape (n_channels, n_times)
The set of spatial filters.
conditions : ndarray, shape (n_classes,)
The conditions used. Values correspond to original event ids.
References
----------
.. footbibliography::
"""
logger.info('...computing surrogate time series. This can take some time')
# Default to leave-one-out cv
cv = 'LeaveOneOut' if cv is None else cv
picks = _picks_to_idx(epochs.info, picks)
if not len(set(Counter(epochs.events[:, 2]).values())) == 1:
raise ValueError('The same number of epochs is required by '
'this function. Please consider '
'`epochs.equalize_event_counts`')
if conditions is None:
conditions = epochs.event_id.keys()
epochs = epochs.copy()
else:
epochs = epochs[conditions]
epochs.drop_bad()
if len(conditions) != 2:
raise ValueError('Currently this function expects exactly 2 '
'conditions but you gave me %i' %
len(conditions))
ev = epochs.events[:, 2]
# Special care to avoid path dependent mappings and orders
conditions = list(sorted(conditions))
cond_idx = [np.where(ev == epochs.event_id[k])[0] for k in conditions]
info = pick_info(epochs.info, picks)
data = epochs.get_data(picks=picks)
# Scale (z-score) the data by channel type
# XXX the z-scoring is applied outside the CV, which is not standard.
for ch_type in ['mag', 'grad', 'eeg']:
if ch_type in epochs:
# FIXME should be applied to all sort of data channels
if ch_type == 'eeg':
this_picks = pick_types(info, meg=False, eeg=True)
else:
this_picks = pick_types(info, meg=ch_type, eeg=False)
data[:, this_picks] /= np.std(data[:, this_picks])
# Setup cross-validation. Need to use _set_cv to deal with sklearn
# deprecation of cv objects.
y = epochs.events[:, 2]
_, cv_splits = _set_cv(cv, 'classifier', X=y, y=y)
parallel, p_func, _ = parallel_func(_run_ems, n_jobs=n_jobs)
# FIXME this parallelization should be removed.
# 1) it's numpy computation so it's already efficient,
# 2) it duplicates the data in RAM,
# 3) the computation is already super fast.
out = parallel(p_func(_ems_diff, data, cond_idx, train, test)
for train, test in cv_splits)
surrogate_trials, spatial_filter = zip(*out)
surrogate_trials = np.array(surrogate_trials)
spatial_filter = np.mean(spatial_filter, axis=0)
return surrogate_trials, spatial_filter, epochs.events[:, 2]
def _ems_diff(data0, data1):
"""Compute the default diff objective function."""
return np.mean(data0, axis=0) - np.mean(data1, axis=0)
def _run_ems(objective_function, data, cond_idx, train, test):
"""Run EMS."""
d = objective_function(*(data[np.intersect1d(c, train)] for c in cond_idx))
d /= np.sqrt(np.sum(d ** 2, axis=0))[None, :]
# compute surrogates
return np.sum(data[test[0]] * d, axis=0), d
|
from django.core.paginator import Paginator
from django.template import Context
from django.test import TestCase
from zinnia.context import get_context_first_matching_object
from zinnia.context import get_context_first_object
from zinnia.context import get_context_loop_positions
class ContextTestCase(TestCase):
"""Tests cases for context"""
def test_get_context_first_matching_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_matching_object(context, ['key']),
(None, None))
self.assertEqual(
get_context_first_matching_object(context, ['a']),
('a', 1))
self.assertEqual(
get_context_first_matching_object(context, ['b', 'a']),
('b', 2))
def test_get_context_first_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_object(context, ['key']),
None)
self.assertEqual(
get_context_first_object(context, ['a']),
1)
self.assertEqual(
get_context_first_object(context, ['b', 'a']),
2)
def test_get_context_loop_positions(self):
paginator = Paginator(range(50), 10)
context = Context({})
self.assertEqual(
get_context_loop_positions(context),
(0, 0))
context = Context({'forloop': {'counter': 5}})
self.assertEqual(
get_context_loop_positions(context),
(5, 5))
context = Context({'forloop': {'counter': 5},
'page_obj': paginator.page(3)})
self.assertEqual(
get_context_loop_positions(context),
(25, 5))
|
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import (
CONF_COMMAND,
CONF_DEVICE_CLASS,
CONF_NAME,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
CONF_VALUE_TEMPLATE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.reload import setup_reload_service
from .const import CONF_COMMAND_TIMEOUT, DEFAULT_TIMEOUT, DOMAIN, PLATFORMS
from .sensor import CommandSensorData
DEFAULT_NAME = "Binary Command Sensor"
DEFAULT_PAYLOAD_ON = "ON"
DEFAULT_PAYLOAD_OFF = "OFF"
SCAN_INTERVAL = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_COMMAND): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_COMMAND_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Command line Binary Sensor."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
name = config.get(CONF_NAME)
command = config.get(CONF_COMMAND)
payload_off = config.get(CONF_PAYLOAD_OFF)
payload_on = config.get(CONF_PAYLOAD_ON)
device_class = config.get(CONF_DEVICE_CLASS)
value_template = config.get(CONF_VALUE_TEMPLATE)
command_timeout = config.get(CONF_COMMAND_TIMEOUT)
if value_template is not None:
value_template.hass = hass
data = CommandSensorData(hass, command, command_timeout)
add_entities(
[
CommandBinarySensor(
hass, data, name, device_class, payload_on, payload_off, value_template
)
],
True,
)
class CommandBinarySensor(BinarySensorEntity):
"""Representation of a command line binary sensor."""
def __init__(
self, hass, data, name, device_class, payload_on, payload_off, value_template
):
"""Initialize the Command line binary sensor."""
self._hass = hass
self.data = data
self._name = name
self._device_class = device_class
self._state = False
self._payload_on = payload_on
self._payload_off = payload_off
self._value_template = value_template
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of the binary sensor."""
return self._device_class
def update(self):
"""Get the latest data and updates the state."""
self.data.update()
value = self.data.value
if self._value_template is not None:
value = self._value_template.render_with_possible_json_value(value, False)
if value == self._payload_on:
self._state = True
elif value == self._payload_off:
self._state = False
|
from tensornetwork.block_sparse.blocksparsetensor import BlockSparseTensor
from tensornetwork.block_sparse.charge import U1Charge, BaseCharge
from tensornetwork.block_sparse.index import Index
import tensornetwork.block_sparse as bs
from tensornetwork.backends.symmetric import decompositions
import tensornetwork.backends.numpy.decompositions as np_decompositions
import pytest
import numpy as np
np_dtypes = [np.float64, np.complex128]
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R, R1", [(2, 1), (3, 2), (3, 1)])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_svds(dtype, R, R1, num_charges):
np.random.seed(10)
D = 30
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
u, s, v, _ = decompositions.svd(bs, A, R1)
u_dense, s_dense, v_dense, _ = np_decompositions.svd(np, A.todense(), R1)
res1 = bs.tensordot(bs.tensordot(u, bs.diag(s), 1), v, 1)
res2 = np.tensordot(np.tensordot(u_dense, np.diag(s_dense), 1), v_dense, 1)
np.testing.assert_almost_equal(res1.todense(), res2)
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R, R1", [(2, 1), (3, 2), (3, 1)])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_singular_values(dtype, R, R1, num_charges):
np.random.seed(10)
D = 30
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
_, s, _, _ = decompositions.svd(bs, A, R1)
_, s_dense, _, _ = np_decompositions.svd(np, A.todense(), R1)
np.testing.assert_almost_equal(
np.sort(s.todense()), np.sort(s_dense[s_dense > 1E-13]))
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R, R1", [(2, 1), (3, 2), (3, 1)])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_max_singular_values(dtype, R, R1, num_charges):
np.random.seed(10)
D = 30
max_singular_values = 12
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
_, s, _, _ = decompositions.svd(
bs, A, R1, max_singular_values=max_singular_values)
assert len(s.data) <= max_singular_values
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_max_truncation_error(dtype, num_charges):
np.random.seed(10)
R = 2
D = 30
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
random_matrix = BlockSparseTensor.random(
[Index(charges[n], flows[n]) for n in range(R)], dtype=dtype)
U, S, V = bs.svd(random_matrix, full_matrices=False)
svals = np.array(range(1, len(S.data) + 1)).astype(np.float64)
S.data = svals[::-1]
val = U @ bs.diag(S) @ V
trunc = 8
mask = np.sqrt(np.cumsum(np.square(svals))) >= trunc
_, S2, _, _ = decompositions.svd(
bs, val, 1, max_truncation_error=trunc)
np.testing.assert_allclose(S2.data, svals[mask][::-1])
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_max_singular_values_larger_than_bond_dimension(dtype, num_charges):
np.random.seed(10)
R = 2
D = 30
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
random_matrix = BlockSparseTensor.random(
[Index(charges[n], flows[n]) for n in range(R)], dtype=dtype)
U, S, V = bs.svd(random_matrix, full_matrices=False)
S.data = np.array(range(len(S.data)))
val = U @ bs.diag(S) @ V
_, S2, _, _ = decompositions.svd(
bs, val, 1, max_singular_values=40)
assert S2.shape == S.shape
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R, R1", [(2, 1), (3, 2), (3, 1)])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_rq(dtype, R, R1, num_charges):
np.random.seed(10)
D = 30
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
r, q = decompositions.rq(bs, A, R1)
res = bs.tensordot(r, q, 1)
r_dense, q_dense = np_decompositions.rq(np, A.todense(), R1, False)
res2 = np.tensordot(r_dense, q_dense, 1)
np.testing.assert_almost_equal(res.todense(), res2)
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R, R1", [(2, 1), (3, 2), (3, 1)])
def test_qr(dtype, R, R1):
np.random.seed(10)
D = 30
charges = [
U1Charge.random(dimension=D, minval=-5, maxval=5) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
q, r = decompositions.qr(bs, A, R1)
res = bs.tensordot(q, r, 1)
q_dense, r_dense = np_decompositions.qr(np, A.todense(), R1, False)
res2 = np.tensordot(q_dense, r_dense, 1)
np.testing.assert_almost_equal(res.todense(), res2)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from dropwizard import DropwizardCollector
##########################################################################
class TestDropwizardCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('DropwizardCollector', {})
self.collector = DropwizardCollector(config, None)
def test_import(self):
self.assertTrue(DropwizardCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen',
Mock(return_value=self.getFixture('stats')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'jvm.memory.totalInit': 9.142272E7,
'jvm.memory.totalUsed': 1.29674584E8,
'jvm.memory.totalMax': 1.13901568E9,
'jvm.memory.totalCommitted': 1.62267136E8,
'jvm.memory.heapInit': 6.7108864E7,
'jvm.memory.heapUsed': 8.3715232E7,
'jvm.memory.heapMax': 9.54466304E8,
'jvm.memory.heapCommitted': 1.15539968E8,
'jvm.memory.heap_usage': 0.08770894441130528,
'jvm.memory.non_heap_usage': 0.24903553182428534,
'jvm.memory.code_cache': 0.038289388020833336,
'jvm.memory.eden_space': 0.1918924383560846,
'jvm.memory.old_gen': 0.022127459689416828,
'jvm.memory.perm_gen': 0.32806533575057983,
'jvm.memory.survivor_space': 1.0,
'jvm.daemon_thread_count': 10,
'jvm.thread_count': 27,
'jvm.fd_usage': 0.014892578125,
'jvm.thread_states.timed_waiting': 0.5185185185185185,
'jvm.thread_states.runnable': 0.0,
'jvm.thread_states.blocked': 0.0,
'jvm.thread_states.waiting': 0.2222222222222222,
'jvm.thread_states.new': 0.0,
'jvm.thread_states.terminated': 0.0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
patch_urlopen = patch(
'urllib2.urlopen',
Mock(
return_value=self.getFixture('stats_blank')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import pytest
from qutebrowser.browser import downloads, qtnetworkdownloads
@pytest.fixture
def manager(config_stub, cookiejar_and_cache):
"""A QtNetwork download manager."""
return qtnetworkdownloads.DownloadManager()
def test_download_model(qapp, qtmodeltester, manager):
"""Simple check for download model internals."""
model = downloads.DownloadModel(manager)
qtmodeltester.check(model)
@pytest.mark.parametrize('url, title, out', [
('http://qutebrowser.org/INSTALL.html',
'Installing qutebrowser | qutebrowser',
'Installing qutebrowser _ qutebrowser.html'),
('http://qutebrowser.org/INSTALL.html',
'Installing qutebrowser | qutebrowser.html',
'Installing qutebrowser _ qutebrowser.html'),
('http://qutebrowser.org/INSTALL.HTML',
'Installing qutebrowser | qutebrowser',
'Installing qutebrowser _ qutebrowser.html'),
('http://qutebrowser.org/INSTALL.html',
'Installing qutebrowser | qutebrowser.HTML',
'Installing qutebrowser _ qutebrowser.HTML'),
('http://qutebrowser.org/',
'qutebrowser | qutebrowser',
'qutebrowser _ qutebrowser.html'),
('https://github.com/qutebrowser/qutebrowser/releases',
'Releases · qutebrowser/qutebrowser',
'Releases · qutebrowser_qutebrowser.html'),
('http://qutebrowser.org/index.php',
'qutebrowser | qutebrowser',
'qutebrowser _ qutebrowser.html'),
('http://qutebrowser.org/index.php',
'qutebrowser | qutebrowser - index.php',
'qutebrowser _ qutebrowser - index.php.html'),
('https://qutebrowser.org/img/cheatsheet-big.png',
'cheatsheet-big.png (3342×2060)',
None),
('http://qutebrowser.org/page-with-no-title.html',
'',
None),
])
@pytest.mark.fake_os('windows')
def test_page_titles(url, title, out):
assert downloads.suggested_fn_from_title(url, title) == out
class TestDownloadTarget:
def test_filename(self):
target = downloads.FileDownloadTarget("/foo/bar")
assert target.filename == "/foo/bar"
def test_fileobj(self):
fobj = object()
target = downloads.FileObjDownloadTarget(fobj)
assert target.fileobj is fobj
def test_openfile(self):
target = downloads.OpenFileDownloadTarget()
assert target.cmdline is None
def test_openfile_custom_command(self):
target = downloads.OpenFileDownloadTarget('echo')
assert target.cmdline == 'echo'
@pytest.mark.parametrize('obj', [
downloads.FileDownloadTarget('foobar'),
downloads.FileObjDownloadTarget(None),
downloads.OpenFileDownloadTarget(),
])
def test_class_hierarchy(self, obj):
assert isinstance(obj, downloads._DownloadTarget)
@pytest.mark.parametrize('raw, expected', [
pytest.param('http://foo/bar', 'bar',
marks=pytest.mark.fake_os('windows')),
pytest.param('A *|<>\\: bear!', 'A ______ bear!',
marks=pytest.mark.fake_os('windows')),
pytest.param('A *|<>\\: bear!', 'A *|<>\\: bear!',
marks=[pytest.mark.fake_os('posix'), pytest.mark.posix]),
])
def test_sanitized_filenames(raw, expected,
config_stub, download_tmpdir, monkeypatch):
manager = downloads.AbstractDownloadManager()
target = downloads.FileDownloadTarget(str(download_tmpdir))
item = downloads.AbstractDownloadItem(manager=manager)
# Don't try to start a timer outside of a QThread
manager._update_timer.isActive = lambda: True
# Abstract methods
item._ensure_can_set_filename = lambda *args: True
item._after_set_filename = lambda *args: True
# Don't try to get current window
monkeypatch.setattr(item, '_get_conflicting_download', list)
manager._init_item(item, True, raw)
item.set_target(target)
assert item._filename.endswith(expected)
class TestConflictingDownloads:
@pytest.fixture
def item1(self, manager):
return downloads.AbstractDownloadItem(manager=manager)
@pytest.fixture
def item2(self, manager):
return downloads.AbstractDownloadItem(manager=manager)
def test_no_downloads(self, item1):
item1._filename = 'download.txt'
assert item1._get_conflicting_download() is None
@pytest.mark.parametrize('filename1, filename2, done, conflict', [
# Different name
('download.txt', 'download2.txt', False, False),
# Finished
('download.txt', 'download.txt', True, False),
# Conflict
('download.txt', 'download.txt', False, True),
])
def test_conflicts(self, manager, item1, item2,
filename1, filename2, done, conflict):
item1._filename = filename1
item2._filename = filename2
item2.done = done
manager.downloads.append(item1)
manager.downloads.append(item2)
expected = item2 if conflict else None
assert item1._get_conflicting_download() is expected
def test_cancel_conflicting_downloads(self, manager, item1, item2, monkeypatch):
item1._filename = 'download.txt'
item2._filename = 'download.txt'
item2.done = False
manager.downloads.append(item1)
manager.downloads.append(item2)
def patched_cancel(remove_data=True):
assert not remove_data
item2.done = True
monkeypatch.setattr(item2, 'cancel', patched_cancel)
monkeypatch.setattr(item1, '_after_set_filename', lambda: None)
item1._cancel_conflicting_download()
assert item2.done
|
from homeassistant.const import STATE_OFF, STATE_ON
from tests.common import MockToggleEntity
ENTITIES = []
def init(empty=False):
"""Initialize the platform with entities."""
global ENTITIES
ENTITIES = (
[]
if empty
else [
MockToggleEntity("AC", STATE_ON),
MockToggleEntity("AC", STATE_OFF),
MockToggleEntity(None, STATE_OFF),
]
)
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Return mock entities."""
async_add_entities_callback(ENTITIES)
|
from abc import ABC, abstractmethod
from samsungctl import Remote
from samsungctl.exceptions import AccessDenied, ConnectionClosed, UnhandledResponse
from samsungtvws import SamsungTVWS
from samsungtvws.exceptions import ConnectionFailure
from websocket import WebSocketException
from homeassistant.const import (
CONF_HOST,
CONF_ID,
CONF_METHOD,
CONF_NAME,
CONF_PORT,
CONF_TIMEOUT,
CONF_TOKEN,
)
from .const import (
CONF_DESCRIPTION,
LOGGER,
METHOD_LEGACY,
RESULT_AUTH_MISSING,
RESULT_CANNOT_CONNECT,
RESULT_NOT_SUPPORTED,
RESULT_SUCCESS,
VALUE_CONF_ID,
VALUE_CONF_NAME,
)
class SamsungTVBridge(ABC):
"""The Base Bridge abstract class."""
@staticmethod
def get_bridge(method, host, port=None, token=None):
"""Get Bridge instance."""
if method == METHOD_LEGACY:
return SamsungTVLegacyBridge(method, host, port)
return SamsungTVWSBridge(method, host, port, token)
def __init__(self, method, host, port):
"""Initialize Bridge."""
self.port = port
self.method = method
self.host = host
self.token = None
self.default_port = None
self._remote = None
self._callback = None
def register_reauth_callback(self, func):
"""Register a callback function."""
self._callback = func
@abstractmethod
def try_connect(self):
"""Try to connect to the TV."""
def is_on(self):
"""Tells if the TV is on."""
self.close_remote()
try:
return self._get_remote() is not None
except (
UnhandledResponse,
AccessDenied,
ConnectionFailure,
):
# We got a response so it's working.
return True
except OSError:
# Different reasons, e.g. hostname not resolveable
return False
def send_key(self, key):
"""Send a key to the tv and handles exceptions."""
try:
# recreate connection if connection was dead
retry_count = 1
for _ in range(retry_count + 1):
try:
self._send_key(key)
break
except (
ConnectionClosed,
BrokenPipeError,
WebSocketException,
):
# BrokenPipe can occur when the commands is sent to fast
# WebSocketException can occur when timed out
self._remote = None
except (UnhandledResponse, AccessDenied):
# We got a response so it's on.
LOGGER.debug("Failed sending command %s", key, exc_info=True)
except OSError:
# Different reasons, e.g. hostname not resolveable
pass
@abstractmethod
def _send_key(self, key):
"""Send the key."""
@abstractmethod
def _get_remote(self):
"""Get Remote object."""
def close_remote(self):
"""Close remote object."""
try:
if self._remote is not None:
# Close the current remote connection
self._remote.close()
self._remote = None
except OSError:
LOGGER.debug("Could not establish connection")
def _notify_callback(self):
"""Notify access denied callback."""
if self._callback:
self._callback()
class SamsungTVLegacyBridge(SamsungTVBridge):
"""The Bridge for Legacy TVs."""
def __init__(self, method, host, port):
"""Initialize Bridge."""
super().__init__(method, host, None)
self.config = {
CONF_NAME: VALUE_CONF_NAME,
CONF_DESCRIPTION: VALUE_CONF_NAME,
CONF_ID: VALUE_CONF_ID,
CONF_HOST: host,
CONF_METHOD: method,
CONF_PORT: None,
CONF_TIMEOUT: 1,
}
def try_connect(self):
"""Try to connect to the Legacy TV."""
config = {
CONF_NAME: VALUE_CONF_NAME,
CONF_DESCRIPTION: VALUE_CONF_NAME,
CONF_ID: VALUE_CONF_ID,
CONF_HOST: self.host,
CONF_METHOD: self.method,
CONF_PORT: None,
# We need this high timeout because waiting for auth popup is just an open socket
CONF_TIMEOUT: 31,
}
try:
LOGGER.debug("Try config: %s", config)
with Remote(config.copy()):
LOGGER.debug("Working config: %s", config)
return RESULT_SUCCESS
except AccessDenied:
LOGGER.debug("Working but denied config: %s", config)
return RESULT_AUTH_MISSING
except UnhandledResponse:
LOGGER.debug("Working but unsupported config: %s", config)
return RESULT_NOT_SUPPORTED
except OSError as err:
LOGGER.debug("Failing config: %s, error: %s", config, err)
return RESULT_CANNOT_CONNECT
def _get_remote(self):
"""Create or return a remote control instance."""
if self._remote is None:
# We need to create a new instance to reconnect.
try:
LOGGER.debug("Create SamsungRemote")
self._remote = Remote(self.config.copy())
# This is only happening when the auth was switched to DENY
# A removed auth will lead to socket timeout because waiting for auth popup is just an open socket
except AccessDenied:
self._notify_callback()
raise
return self._remote
def _send_key(self, key):
"""Send the key using legacy protocol."""
self._get_remote().control(key)
class SamsungTVWSBridge(SamsungTVBridge):
"""The Bridge for WebSocket TVs."""
def __init__(self, method, host, port, token=None):
"""Initialize Bridge."""
super().__init__(method, host, port)
self.token = token
self.default_port = 8001
def try_connect(self):
"""Try to connect to the Websocket TV."""
for self.port in (8001, 8002):
config = {
CONF_NAME: VALUE_CONF_NAME,
CONF_HOST: self.host,
CONF_METHOD: self.method,
CONF_PORT: self.port,
# We need this high timeout because waiting for auth popup is just an open socket
CONF_TIMEOUT: 31,
}
result = None
try:
LOGGER.debug("Try config: %s", config)
with SamsungTVWS(
host=self.host,
port=self.port,
token=self.token,
timeout=config[CONF_TIMEOUT],
name=config[CONF_NAME],
) as remote:
remote.open()
self.token = remote.token
if self.token:
config[CONF_TOKEN] = "*****"
LOGGER.debug("Working config: %s", config)
return RESULT_SUCCESS
except WebSocketException:
LOGGER.debug("Working but unsupported config: %s", config)
result = RESULT_NOT_SUPPORTED
except (OSError, ConnectionFailure) as err:
LOGGER.debug("Failing config: %s, error: %s", config, err)
# pylint: disable=useless-else-on-loop
else:
if result:
return result
return RESULT_CANNOT_CONNECT
def _send_key(self, key):
"""Send the key using websocket protocol."""
if key == "KEY_POWEROFF":
key = "KEY_POWER"
self._get_remote().send_key(key)
def _get_remote(self):
"""Create or return a remote control instance."""
if self._remote is None:
# We need to create a new instance to reconnect.
try:
LOGGER.debug("Create SamsungTVWS")
self._remote = SamsungTVWS(
host=self.host,
port=self.port,
token=self.token,
timeout=8,
name=VALUE_CONF_NAME,
)
self._remote.open()
# This is only happening when the auth was switched to DENY
# A removed auth will lead to socket timeout because waiting for auth popup is just an open socket
except ConnectionFailure:
self._notify_callback()
raise
except WebSocketException:
self._remote = None
return self._remote
|
from homeassistant.const import STATE_UNKNOWN
from homeassistant.helpers import discovery
from homeassistant.setup import async_setup_component
from .common import NUMATO_CFG, mockup_raise
MOCKUP_ENTITY_IDS = {
"sensor.numato_adc_mock_port1",
}
async def test_failing_setups_no_entities(hass, numato_fixture, monkeypatch):
"""When port setup fails, no entity shall be created."""
monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise)
assert await async_setup_component(hass, "numato", NUMATO_CFG)
await hass.async_block_till_done()
for entity_id in MOCKUP_ENTITY_IDS:
assert entity_id not in hass.states.async_entity_ids()
async def test_failing_sensor_update(hass, numato_fixture, monkeypatch):
"""Test condition when a sensor update fails."""
monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "adc_read", mockup_raise)
assert await async_setup_component(hass, "numato", NUMATO_CFG)
await hass.async_block_till_done()
assert hass.states.get("sensor.numato_adc_mock_port1").state is STATE_UNKNOWN
async def test_sensor_setup_without_discovery_info(hass, config, numato_fixture):
"""Test handling of empty discovery_info."""
numato_fixture.discover()
await discovery.async_load_platform(hass, "sensor", "numato", None, config)
for entity_id in MOCKUP_ENTITY_IDS:
assert entity_id not in hass.states.async_entity_ids()
await hass.async_block_till_done() # wait for numato platform to be loaded
for entity_id in MOCKUP_ENTITY_IDS:
assert entity_id in hass.states.async_entity_ids()
|
import unittest
import numpy as np
import bqplot.pyplot as plt
class TestBqplot(unittest.TestCase):
def test_figure(self):
size = 100
scale = 100.0
np.random.seed(0)
x_data = np.arange(size)
y_data = np.cumsum(np.random.randn(size) * scale)
fig = plt.figure(title='First Example')
plt.plot(y_data)
fig.save_png()
|
from datetime import timedelta
import logging
from pyhik.hikvision import HikCamera
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_MOTION,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import (
ATTR_LAST_TRIP_TIME,
CONF_CUSTOMIZE,
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant.util.dt import utcnow
_LOGGER = logging.getLogger(__name__)
CONF_IGNORED = "ignored"
CONF_DELAY = "delay"
DEFAULT_PORT = 80
DEFAULT_IGNORED = False
DEFAULT_DELAY = 0
ATTR_DELAY = "delay"
DEVICE_CLASS_MAP = {
"Motion": DEVICE_CLASS_MOTION,
"Line Crossing": DEVICE_CLASS_MOTION,
"Field Detection": DEVICE_CLASS_MOTION,
"Video Loss": None,
"Tamper Detection": DEVICE_CLASS_MOTION,
"Shelter Alarm": None,
"Disk Full": None,
"Disk Error": None,
"Net Interface Broken": DEVICE_CLASS_CONNECTIVITY,
"IP Conflict": DEVICE_CLASS_CONNECTIVITY,
"Illegal Access": None,
"Video Mismatch": None,
"Bad Video": None,
"PIR Alarm": DEVICE_CLASS_MOTION,
"Face Detection": DEVICE_CLASS_MOTION,
"Scene Change Detection": DEVICE_CLASS_MOTION,
"I/O": None,
"Unattended Baggage": DEVICE_CLASS_MOTION,
"Attended Baggage": DEVICE_CLASS_MOTION,
"Recording Failure": None,
"Exiting Region": DEVICE_CLASS_MOTION,
"Entering Region": DEVICE_CLASS_MOTION,
}
CUSTOMIZE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_IGNORED, default=DEFAULT_IGNORED): cv.boolean,
vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): cv.positive_int,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema(
{cv.string: CUSTOMIZE_SCHEMA}
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Hikvision binary sensor devices."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
customize = config.get(CONF_CUSTOMIZE)
protocol = "https" if config[CONF_SSL] else "http"
url = f"{protocol}://{host}"
data = HikvisionData(hass, url, port, name, username, password)
if data.sensors is None:
_LOGGER.error("Hikvision event stream has no data, unable to set up")
return False
entities = []
for sensor, channel_list in data.sensors.items():
for channel in channel_list:
# Build sensor name, then parse customize config.
if data.type == "NVR":
sensor_name = f"{sensor.replace(' ', '_')}_{channel[1]}"
else:
sensor_name = sensor.replace(" ", "_")
custom = customize.get(sensor_name.lower(), {})
ignore = custom.get(CONF_IGNORED)
delay = custom.get(CONF_DELAY)
_LOGGER.debug(
"Entity: %s - %s, Options - Ignore: %s, Delay: %s",
data.name,
sensor_name,
ignore,
delay,
)
if not ignore:
entities.append(
HikvisionBinarySensor(hass, sensor, channel[1], data, delay)
)
add_entities(entities)
class HikvisionData:
"""Hikvision device event stream object."""
def __init__(self, hass, url, port, name, username, password):
"""Initialize the data object."""
self._url = url
self._port = port
self._name = name
self._username = username
self._password = password
# Establish camera
self.camdata = HikCamera(self._url, self._port, self._username, self._password)
if self._name is None:
self._name = self.camdata.get_name
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.stop_hik)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, self.start_hik)
def stop_hik(self, event):
"""Shutdown Hikvision subscriptions and subscription thread on exit."""
self.camdata.disconnect()
def start_hik(self, event):
"""Start Hikvision event stream thread."""
self.camdata.start_stream()
@property
def sensors(self):
"""Return list of available sensors and their states."""
return self.camdata.current_event_states
@property
def cam_id(self):
"""Return device id."""
return self.camdata.get_id
@property
def name(self):
"""Return device name."""
return self._name
@property
def type(self):
"""Return device type."""
return self.camdata.get_type
def get_attributes(self, sensor, channel):
"""Return attribute list for sensor/channel."""
return self.camdata.fetch_attributes(sensor, channel)
class HikvisionBinarySensor(BinarySensorEntity):
"""Representation of a Hikvision binary sensor."""
def __init__(self, hass, sensor, channel, cam, delay):
"""Initialize the binary_sensor."""
self._hass = hass
self._cam = cam
self._sensor = sensor
self._channel = channel
if self._cam.type == "NVR":
self._name = f"{self._cam.name} {sensor} {channel}"
else:
self._name = f"{self._cam.name} {sensor}"
self._id = f"{self._cam.cam_id}.{sensor}.{channel}"
if delay is None:
self._delay = 0
else:
self._delay = delay
self._timer = None
# Register callback function with pyHik
self._cam.camdata.add_update_callback(self._update_callback, self._id)
def _sensor_state(self):
"""Extract sensor state."""
return self._cam.get_attributes(self._sensor, self._channel)[0]
def _sensor_last_update(self):
"""Extract sensor last update time."""
return self._cam.get_attributes(self._sensor, self._channel)[3]
@property
def name(self):
"""Return the name of the Hikvision sensor."""
return self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._id
@property
def is_on(self):
"""Return true if sensor is on."""
return self._sensor_state()
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
try:
return DEVICE_CLASS_MAP[self._sensor]
except KeyError:
# Sensor must be unknown to us, add as generic
return None
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes."""
attr = {ATTR_LAST_TRIP_TIME: self._sensor_last_update()}
if self._delay != 0:
attr[ATTR_DELAY] = self._delay
return attr
def _update_callback(self, msg):
"""Update the sensor's state, if needed."""
_LOGGER.debug("Callback signal from: %s", msg)
if self._delay > 0 and not self.is_on:
# Set timer to wait until updating the state
def _delay_update(now):
"""Timer callback for sensor update."""
_LOGGER.debug(
"%s Called delayed (%ssec) update", self._name, self._delay
)
self.schedule_update_ha_state()
self._timer = None
if self._timer is not None:
self._timer()
self._timer = None
self._timer = track_point_in_utc_time(
self._hass, _delay_update, utcnow() + timedelta(seconds=self._delay)
)
elif self._delay > 0 and self.is_on:
# For delayed sensors kill any callbacks on true events and update
if self._timer is not None:
self._timer()
self._timer = None
self.schedule_update_ha_state()
else:
self.schedule_update_ha_state()
|
import abodepy.helpers.constants as CONST
import pytest
from tests.common import load_fixture
from tests.components.light.conftest import mock_light_profiles # noqa
@pytest.fixture(autouse=True)
def requests_mock_fixture(requests_mock):
"""Fixture to provide a requests mocker."""
# Mocks the login response for abodepy.
requests_mock.post(CONST.LOGIN_URL, text=load_fixture("abode_login.json"))
# Mocks the logout response for abodepy.
requests_mock.post(CONST.LOGOUT_URL, text=load_fixture("abode_logout.json"))
# Mocks the oauth claims response for abodepy.
requests_mock.get(
CONST.OAUTH_TOKEN_URL, text=load_fixture("abode_oauth_claims.json")
)
# Mocks the panel response for abodepy.
requests_mock.get(CONST.PANEL_URL, text=load_fixture("abode_panel.json"))
# Mocks the automations response for abodepy.
requests_mock.get(CONST.AUTOMATION_URL, text=load_fixture("abode_automation.json"))
# Mocks the devices response for abodepy.
requests_mock.get(CONST.DEVICES_URL, text=load_fixture("abode_devices.json"))
|
import copy
from homeassistant.components.calendar import CalendarEventDevice, get_date
import homeassistant.util.dt as dt_util
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Demo Calendar platform."""
calendar_data_future = DemoGoogleCalendarDataFuture()
calendar_data_current = DemoGoogleCalendarDataCurrent()
add_entities(
[
DemoGoogleCalendar(hass, calendar_data_future, "Calendar 1"),
DemoGoogleCalendar(hass, calendar_data_current, "Calendar 2"),
]
)
class DemoGoogleCalendarData:
"""Representation of a Demo Calendar element."""
event = None
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
event = copy.copy(self.event)
event["title"] = event["summary"]
event["start"] = get_date(event["start"]).isoformat()
event["end"] = get_date(event["end"]).isoformat()
return [event]
class DemoGoogleCalendarDataFuture(DemoGoogleCalendarData):
"""Representation of a Demo Calendar for a future event."""
def __init__(self):
"""Set the event to a future event."""
one_hour_from_now = dt_util.now() + dt_util.dt.timedelta(minutes=30)
self.event = {
"start": {"dateTime": one_hour_from_now.isoformat()},
"end": {
"dateTime": (
one_hour_from_now + dt_util.dt.timedelta(minutes=60)
).isoformat()
},
"summary": "Future Event",
}
class DemoGoogleCalendarDataCurrent(DemoGoogleCalendarData):
"""Representation of a Demo Calendar for a current event."""
def __init__(self):
"""Set the event data."""
middle_of_event = dt_util.now() - dt_util.dt.timedelta(minutes=30)
self.event = {
"start": {"dateTime": middle_of_event.isoformat()},
"end": {
"dateTime": (
middle_of_event + dt_util.dt.timedelta(minutes=60)
).isoformat()
},
"summary": "Current Event",
}
class DemoGoogleCalendar(CalendarEventDevice):
"""Representation of a Demo Calendar element."""
def __init__(self, hass, calendar_data, name):
"""Initialize demo calendar."""
self.data = calendar_data
self._name = name
@property
def event(self):
"""Return the next upcoming event."""
return self.data.event
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_get_events(self, hass, start_date, end_date):
"""Return calendar events within a datetime range."""
return await self.data.async_get_events(hass, start_date, end_date)
|
from homeassistant.components import islamic_prayer_times
import homeassistant.util.dt as dt_util
from . import NOW, PRAYER_TIMES, PRAYER_TIMES_TIMESTAMPS
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_islamic_prayer_times_sensors(hass, legacy_patchable_time):
"""Test minimum Islamic prayer times configuration."""
entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={})
entry.add_to_hass(hass)
with patch(
"prayer_times_calculator.PrayerTimesCalculator.fetch_prayer_times",
return_value=PRAYER_TIMES,
), patch("homeassistant.util.dt.now", return_value=NOW):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
for prayer in PRAYER_TIMES:
assert (
hass.states.get(
f"sensor.{prayer}_{islamic_prayer_times.const.SENSOR_TYPES[prayer]}"
).state
== PRAYER_TIMES_TIMESTAMPS[prayer].astimezone(dt_util.UTC).isoformat()
)
|
from unittest import TestCase
import pandas as pd
from scattertext.TermDocMatrixFromFrequencies import TermDocMatrixFromFrequencies
from scattertext.termcompaction.CompactTerms import CompactTerms
class TestCompactTerms(TestCase):
def test_get_term_indices_to_compact(self):
'''
term_doc_matrix = TermDocMatrixFromPandas(ConventionData2012().get_data(),
category_col='party',
text_col='text',
nlp=whitespace_nlp_with_sentences).build()
term_freq_df = term_doc_matrix.get_term_freq_df()
'''
term_doc_mat = TermDocMatrixFromFrequencies(pd.DataFrame({
'term': ['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'],
'A freq': [6, 3, 3, 3, 5, 0, 0],
'B freq': [6, 3, 3, 3, 5, 1, 1],
}).set_index('term')).build()
new_tdm = CompactTerms(minimum_term_count=2).compact(term_doc_mat)
self.assertEqual(term_doc_mat.get_terms(), ['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'])
self.assertEqual(set(new_tdm.get_terms()),
set(term_doc_mat.get_terms()) - {'c', 'e b', 'e'})
new_tdm = CompactTerms(minimum_term_count=1).compact(term_doc_mat)
self.assertEqual(set(new_tdm.get_terms()),
set(term_doc_mat.get_terms()) - {'c', 'e'})
term_doc_mat = TermDocMatrixFromFrequencies(pd.DataFrame({
'term': ['a', 'a b', 'b'],
'A freq': [5, 4, 8],
'B freq': [1, 1, 1],
}).set_index('term')).build()
self.assertEqual(set(CompactTerms(minimum_term_count=0, slack=0).compact(term_doc_mat).get_terms()), set(['a', 'a b', 'b']))
self.assertEqual(set(CompactTerms(minimum_term_count=0, slack=2).compact(term_doc_mat).get_terms()), set(['b', 'a b']))
|
import unittest
import chainer
from chainer.backends import cuda
from chainer.functions import relu
from chainer import testing
from chainer.testing import attr
from chainercv.links import SeparableConv2DBNActiv
import numpy as np
def _add_one(x):
return x + 1
@testing.parameterize(*testing.product({
'dilate': [1, 2],
'activ': ['relu', 'add_one', None],
}))
class TestSeparableConv2DBNActiv(unittest.TestCase):
in_channels = 3
out_channels = 3
ksize = 3
stride = 1
pad = 1
def setUp(self):
if self.activ == 'relu':
activ = relu
elif self.activ == 'add_one':
activ = _add_one
elif self.activ is None:
activ = None
self.x = np.random.uniform(
-1, 1, (5, self.in_channels, 5, 5)).astype(np.float32)
self.gy = np.random.uniform(
-1, 1, (5, self.out_channels, 5, 5)).astype(np.float32)
# Convolution is the identity function.
dw_initialW = np.array(
[[0, 0, 0], [0, 1, 0], [0, 0, 0]] * self.in_channels,
dtype=np.float32).reshape((self.in_channels, 1, 3, 3))
pw_initialW = np.eye(
self.in_channels, self.out_channels,
dtype=np.float32).reshape(
(self.out_channels, self.in_channels, 1, 1))
bn_kwargs = {'decay': 0.8}
self.l = SeparableConv2DBNActiv(
self.in_channels, self.out_channels, self.ksize,
self.stride, self.pad, self.dilate,
dw_initialW=dw_initialW, pw_initialW=pw_initialW,
dw_activ=activ, pw_activ=None, bn_kwargs=bn_kwargs)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
# Make the batch normalization to be the identity function.
self.l.dw_bn.avg_var[:] = 1
self.l.dw_bn.avg_mean[:] = 0
self.l.pw_bn.avg_var[:] = 1
self.l.pw_bn.avg_mean[:] = 0
with chainer.using_config('train', False):
y = self.l(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, self.l.xp.ndarray)
if self.dilate == 1:
_x_data = x_data
elif self.dilate == 2:
_x_data = x_data[:, :, 1:-1, 1:-1]
if self.activ == 'relu':
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), np.maximum(cuda.to_cpu(_x_data), 0),
decimal=4
)
elif self.activ == 'add_one':
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data) + 1,
decimal=4
)
elif self.activ is None:
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data),
decimal=4
)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.l(x)
if self.dilate == 1:
y.grad = y_grad
elif self.dilate == 2:
y.grad = y_grad[:, :, 1:-1, 1:-1]
y.backward()
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.l.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
|
import os.path as op
import numpy as np
import pytest
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_allclose, assert_equal)
from mne import io, read_events, Epochs, pick_types
from mne.decoding import (Scaler, FilterEstimator, PSDEstimator, Vectorizer,
UnsupervisedSpatialFilter, TemporalFilter)
from mne.defaults import DEFAULTS
from mne.utils import requires_sklearn, run_tests_if_main, check_version
tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
start, stop = 0, 8
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
def test_scaler():
"""Test methods of Scaler."""
raw = io.read_raw_fif(raw_fname)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
y = epochs.events[:, -1]
methods = (None, dict(mag=5, grad=10, eeg=20), 'mean', 'median')
infos = (epochs.info, epochs.info, None, None)
epochs_data_t = epochs_data.transpose([1, 0, 2])
for method, info in zip(methods, infos):
if method in ('mean', 'median') and not check_version('sklearn'):
with pytest.raises(ImportError, match='No module'):
Scaler(info, method)
continue
scaler = Scaler(info, method)
X = scaler.fit_transform(epochs_data, y)
assert_equal(X.shape, epochs_data.shape)
if method is None or isinstance(method, dict):
sd = DEFAULTS['scalings'] if method is None else method
stds = np.zeros(len(picks))
for key in ('mag', 'grad'):
stds[pick_types(epochs.info, meg=key)] = 1. / sd[key]
stds[pick_types(epochs.info, meg=False, eeg=True)] = 1. / sd['eeg']
means = np.zeros(len(epochs.ch_names))
elif method == 'mean':
stds = np.array([np.std(ch_data) for ch_data in epochs_data_t])
means = np.array([np.mean(ch_data) for ch_data in epochs_data_t])
else: # median
percs = np.array([np.percentile(ch_data, [25, 50, 75])
for ch_data in epochs_data_t])
stds = percs[:, 2] - percs[:, 0]
means = percs[:, 1]
assert_allclose(X * stds[:, np.newaxis] + means[:, np.newaxis],
epochs_data, rtol=1e-12, atol=1e-20, err_msg=method)
X2 = scaler.fit(epochs_data, y).transform(epochs_data)
assert_array_equal(X, X2)
# inverse_transform
Xi = scaler.inverse_transform(X)
assert_array_almost_equal(epochs_data, Xi)
# Test init exception
pytest.raises(ValueError, Scaler, None, None)
pytest.raises(TypeError, scaler.fit, epochs, y)
pytest.raises(TypeError, scaler.transform, epochs)
epochs_bad = Epochs(raw, events, event_id, 0, 0.01, baseline=None,
picks=np.arange(len(raw.ch_names))) # non-data chs
scaler = Scaler(epochs_bad.info, None)
pytest.raises(ValueError, scaler.fit, epochs_bad.get_data(), y)
def test_filterestimator():
"""Test methods of FilterEstimator."""
raw = io.read_raw_fif(raw_fname)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
# Add tests for different combinations of l_freq and h_freq
filt = FilterEstimator(epochs.info, l_freq=40, h_freq=80)
y = epochs.events[:, -1]
X = filt.fit_transform(epochs_data, y)
assert (X.shape == epochs_data.shape)
assert_array_equal(filt.fit(epochs_data, y).transform(epochs_data), X)
filt = FilterEstimator(epochs.info, l_freq=None, h_freq=40,
filter_length='auto',
l_trans_bandwidth='auto', h_trans_bandwidth='auto')
y = epochs.events[:, -1]
X = filt.fit_transform(epochs_data, y)
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=1)
y = epochs.events[:, -1]
with pytest.warns(RuntimeWarning, match='longer than the signal'):
pytest.raises(ValueError, filt.fit_transform, epochs_data, y)
filt = FilterEstimator(epochs.info, l_freq=40, h_freq=None,
filter_length='auto',
l_trans_bandwidth='auto', h_trans_bandwidth='auto')
X = filt.fit_transform(epochs_data, y)
# Test init exception
pytest.raises(ValueError, filt.fit, epochs, y)
pytest.raises(ValueError, filt.transform, epochs)
def test_psdestimator():
"""Test methods of PSDEstimator."""
raw = io.read_raw_fif(raw_fname)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
psd = PSDEstimator(2 * np.pi, 0, np.inf)
y = epochs.events[:, -1]
X = psd.fit_transform(epochs_data, y)
assert (X.shape[0] == epochs_data.shape[0])
assert_array_equal(psd.fit(epochs_data, y).transform(epochs_data), X)
# Test init exception
pytest.raises(ValueError, psd.fit, epochs, y)
pytest.raises(ValueError, psd.transform, epochs)
def test_vectorizer():
"""Test Vectorizer."""
data = np.random.rand(150, 18, 6)
vect = Vectorizer()
result = vect.fit_transform(data)
assert_equal(result.ndim, 2)
# check inverse_trasnform
orig_data = vect.inverse_transform(result)
assert_equal(orig_data.ndim, 3)
assert_array_equal(orig_data, data)
assert_array_equal(vect.inverse_transform(result[1:]), data[1:])
# check with different shape
assert_equal(vect.fit_transform(np.random.rand(150, 18, 6, 3)).shape,
(150, 324))
assert_equal(vect.fit_transform(data[1:]).shape, (149, 108))
# check if raised errors are working correctly
vect.fit(np.random.rand(105, 12, 3))
pytest.raises(ValueError, vect.transform, np.random.rand(105, 12, 3, 1))
pytest.raises(ValueError, vect.inverse_transform,
np.random.rand(102, 12, 12))
@requires_sklearn
def test_unsupervised_spatial_filter():
"""Test unsupervised spatial filter."""
from sklearn.decomposition import PCA
from sklearn.kernel_ridge import KernelRidge
raw = io.read_raw_fif(raw_fname)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, baseline=None, verbose=False)
# Test estimator
pytest.raises(ValueError, UnsupervisedSpatialFilter, KernelRidge(2))
# Test fit
X = epochs.get_data()
n_components = 4
usf = UnsupervisedSpatialFilter(PCA(n_components))
usf.fit(X)
usf1 = UnsupervisedSpatialFilter(PCA(n_components))
# test transform
assert_equal(usf.transform(X).ndim, 3)
# test fit_transform
assert_array_almost_equal(usf.transform(X), usf1.fit_transform(X))
assert_equal(usf.transform(X).shape[1], n_components)
assert_array_almost_equal(usf.inverse_transform(usf.transform(X)), X)
# Test with average param
usf = UnsupervisedSpatialFilter(PCA(4), average=True)
usf.fit_transform(X)
pytest.raises(ValueError, UnsupervisedSpatialFilter, PCA(4), 2)
def test_temporal_filter():
"""Test methods of TemporalFilter."""
X = np.random.rand(5, 5, 1200)
# Test init test
values = (('10hz', None, 100., 'auto'), (5., '10hz', 100., 'auto'),
(10., 20., 5., 'auto'), (None, None, 100., '5hz'))
for low, high, sf, ltrans in values:
filt = TemporalFilter(low, high, sf, ltrans, fir_design='firwin')
pytest.raises(ValueError, filt.fit_transform, X)
# Add tests for different combinations of l_freq and h_freq
for low, high in ((5., 15.), (None, 15.), (5., None)):
filt = TemporalFilter(low, high, sfreq=100., fir_design='firwin')
Xt = filt.fit_transform(X)
assert_array_equal(filt.fit_transform(X), Xt)
assert (X.shape == Xt.shape)
# Test fit and transform numpy type check
with pytest.raises(ValueError, match='Data to be filtered must be'):
filt.transform([1, 2])
# Test with 2 dimensional data array
X = np.random.rand(101, 500)
filt = TemporalFilter(l_freq=25., h_freq=50., sfreq=1000.,
filter_length=150, fir_design='firwin2')
assert_equal(filt.fit_transform(X).shape, X.shape)
run_tests_if_main()
|
import numpy as np
from scipy.sparse import csr_matrix
from scattertext.termranking.TermRanker import TermRanker
class DocLengthNormalizedFrequencyRanker(TermRanker):
'''Ranks terms by their document-length adjusted frequency instead of their raw frequency.
This means that each term has a document-specific weight of #(t,d)/|d|.
'''
def get_ranks(self,label_append=' freq'):
row = self._get_row_category_ids()
X = self.get_X()
return self.get_ranks_from_mat(X, row, label_append)
def get_ranks_from_mat(self, X, row, label_append=' freq'):
doc_lengths = X.sum(axis=1)
normX = self._get_normalized_X(X, doc_lengths)
categoryX = csr_matrix((normX.data, (row, normX.indices)))
return self._get_freq_df(categoryX, label_append=label_append)
def _get_normalized_X(self, X, doc_lengths):
return csr_matrix(doc_lengths.mean() * X.astype(np.float32) / doc_lengths)
"""
class VarianceSensitiveFrequencyRanker(TermRanker):
'''Rank terms by their mean document frequency divided by the standard errors.'''
def get_ranks(self, label_append=' freq'):
X = self.get_X()
d = {}
y = self._term_doc_matrix._y
for idx, cat in self._term_doc_matrix._category_idx_store.items():
catX = X[y == idx, :]
catXB = (catX > 0).astype(np.float)
means = catX.mean(axis=0)
non_zero_sds_numerators = ((catX - catXB.multiply(csr_matrix(means)))
.power(2).sum(axis=0))
zero_dfs_numerators = ((catX.shape[0] - catX.getnnz(axis=0))
* np.power(means, 2).A1)
ses = (np.sqrt(non_zero_sds_numerators + zero_dfs_numerators
/ (catX.shape[0] - 1))) / np.sqrt(catX.shape[0])
return means/ses
"""
|
from pysmartthings import Attribute, Capability
import pytest
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ATTR_TRANSITION,
DOMAIN as LIGHT_DOMAIN,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_TRANSITION,
)
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
@pytest.fixture(name="light_devices")
def light_devices_fixture(device_factory):
"""Fixture returns a set of mock light devices."""
return [
device_factory(
"Dimmer 1",
capabilities=[Capability.switch, Capability.switch_level],
status={Attribute.switch: "on", Attribute.level: 100},
),
device_factory(
"Color Dimmer 1",
capabilities=[
Capability.switch,
Capability.switch_level,
Capability.color_control,
],
status={
Attribute.switch: "off",
Attribute.level: 0,
Attribute.hue: 76.0,
Attribute.saturation: 55.0,
},
),
device_factory(
"Color Dimmer 2",
capabilities=[
Capability.switch,
Capability.switch_level,
Capability.color_control,
Capability.color_temperature,
],
status={
Attribute.switch: "on",
Attribute.level: 100,
Attribute.hue: 76.0,
Attribute.saturation: 55.0,
Attribute.color_temperature: 4500,
},
),
]
async def test_entity_state(hass, light_devices):
"""Tests the state attributes properly match the light types."""
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Dimmer 1
state = hass.states.get("light.dimmer_1")
assert state.state == "on"
assert (
state.attributes[ATTR_SUPPORTED_FEATURES]
== SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
)
assert isinstance(state.attributes[ATTR_BRIGHTNESS], int)
assert state.attributes[ATTR_BRIGHTNESS] == 255
# Color Dimmer 1
state = hass.states.get("light.color_dimmer_1")
assert state.state == "off"
assert (
state.attributes[ATTR_SUPPORTED_FEATURES]
== SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION | SUPPORT_COLOR
)
# Color Dimmer 2
state = hass.states.get("light.color_dimmer_2")
assert state.state == "on"
assert (
state.attributes[ATTR_SUPPORTED_FEATURES]
== SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION | SUPPORT_COLOR | SUPPORT_COLOR_TEMP
)
assert state.attributes[ATTR_BRIGHTNESS] == 255
assert state.attributes[ATTR_HS_COLOR] == (273.6, 55.0)
assert isinstance(state.attributes[ATTR_COLOR_TEMP], int)
assert state.attributes[ATTR_COLOR_TEMP] == 222
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory("Light 1", [Capability.switch, Capability.switch_level])
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, LIGHT_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("light.light_1")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_turn_off(hass, light_devices):
"""Test the light turns of successfully."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.color_dimmer_2"}, blocking=True
)
# Assert
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "off"
async def test_turn_off_with_transition(hass, light_devices):
"""Test the light turns of successfully with transition."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_off",
{ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_TRANSITION: 2},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "off"
async def test_turn_on(hass, light_devices):
"""Test the light turns of successfully."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light", "turn_on", {ATTR_ENTITY_ID: "light.color_dimmer_1"}, blocking=True
)
# Assert
state = hass.states.get("light.color_dimmer_1")
assert state is not None
assert state.state == "on"
async def test_turn_on_with_brightness(hass, light_devices):
"""Test the light turns on to the specified brightness."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{
ATTR_ENTITY_ID: "light.color_dimmer_1",
ATTR_BRIGHTNESS: 75,
ATTR_TRANSITION: 2,
},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_1")
assert state is not None
assert state.state == "on"
# round-trip rounding error (expected)
assert state.attributes[ATTR_BRIGHTNESS] == 74
async def test_turn_on_with_minimal_brightness(hass, light_devices):
"""
Test lights set to lowest brightness when converted scale would be zero.
SmartThings light brightness is a percentage (0-100), but Home Assistant uses a
0-255 scale. This tests if a really low value (1-2) is passed, we don't
set the level to zero, which turns off the lights in SmartThings.
"""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{ATTR_ENTITY_ID: "light.color_dimmer_1", ATTR_BRIGHTNESS: 2},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_1")
assert state is not None
assert state.state == "on"
# round-trip rounding error (expected)
assert state.attributes[ATTR_BRIGHTNESS] == 3
async def test_turn_on_with_color(hass, light_devices):
"""Test the light turns on with color."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_HS_COLOR: (180, 50)},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_HS_COLOR] == (180, 50)
async def test_turn_on_with_color_temp(hass, light_devices):
"""Test the light turns on with color temp."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP: 300},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_COLOR_TEMP] == 300
async def test_update_from_signal(hass, device_factory):
"""Test the light updates when receiving a signal."""
# Arrange
device = device_factory(
"Color Dimmer 2",
capabilities=[
Capability.switch,
Capability.switch_level,
Capability.color_control,
Capability.color_temperature,
],
status={
Attribute.switch: "off",
Attribute.level: 100,
Attribute.hue: 76.0,
Attribute.saturation: 55.0,
Attribute.color_temperature: 4500,
},
)
await setup_platform(hass, LIGHT_DOMAIN, devices=[device])
await device.switch_on(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "on"
async def test_unload_config_entry(hass, device_factory):
"""Test the light is removed when the config entry is unloaded."""
# Arrange
device = device_factory(
"Color Dimmer 2",
capabilities=[
Capability.switch,
Capability.switch_level,
Capability.color_control,
Capability.color_temperature,
],
status={
Attribute.switch: "off",
Attribute.level: 100,
Attribute.hue: 76.0,
Attribute.saturation: 55.0,
Attribute.color_temperature: 4500,
},
)
config_entry = await setup_platform(hass, LIGHT_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "light")
# Assert
assert not hass.states.get("light.color_dimmer_2")
|
import logging
from socket import gaierror
import voluptuous as vol
from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT
from homeassistant.core import callback
from homeassistant.helpers.device_registry import format_mac
# pylint: disable=unused-import
from .const import (
CONF_INTERFACE,
CONF_KEY,
CONF_PROTOCOL,
CONF_SID,
DEFAULT_DISCOVERY_RETRY,
DOMAIN,
ZEROCONF_ACPARTNER,
ZEROCONF_GATEWAY,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_GATEWAY_NAME = "Xiaomi Aqara Gateway"
DEFAULT_INTERFACE = "any"
GATEWAY_CONFIG = vol.Schema(
{vol.Optional(CONF_INTERFACE, default=DEFAULT_INTERFACE): str}
)
CONFIG_HOST = {
vol.Optional(CONF_HOST): str,
vol.Optional(CONF_MAC): str,
}
GATEWAY_CONFIG_HOST = GATEWAY_CONFIG.extend(CONFIG_HOST)
GATEWAY_SETTINGS = vol.Schema(
{
vol.Optional(CONF_KEY): vol.All(str, vol.Length(min=16, max=16)),
vol.Optional(CONF_NAME, default=DEFAULT_GATEWAY_NAME): str,
}
)
class XiaomiAqaraFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Xiaomi Aqara config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize."""
self.host = None
self.interface = DEFAULT_INTERFACE
self.sid = None
self.gateways = None
self.selected_gateway = None
@callback
def async_show_form_step_user(self, errors):
"""Show the form belonging to the user step."""
schema = GATEWAY_CONFIG
if (self.host is None and self.sid is None) or errors:
schema = GATEWAY_CONFIG_HOST
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is None:
return self.async_show_form_step_user(errors)
self.interface = user_input[CONF_INTERFACE]
# allow optional manual setting of host and mac
if self.host is None:
self.host = user_input.get(CONF_HOST)
if self.sid is None:
mac_address = user_input.get(CONF_MAC)
# format sid from mac_address
if mac_address is not None:
self.sid = format_mac(mac_address).replace(":", "")
# if host is already known by zeroconf discovery or manual optional settings
if self.host is not None and self.sid is not None:
# Connect to Xiaomi Aqara Gateway
self.selected_gateway = await self.hass.async_add_executor_job(
XiaomiGateway,
self.host,
self.sid,
None,
DEFAULT_DISCOVERY_RETRY,
self.interface,
MULTICAST_PORT,
None,
)
if self.selected_gateway.connection_error:
errors[CONF_HOST] = "invalid_host"
if self.selected_gateway.mac_error:
errors[CONF_MAC] = "invalid_mac"
if errors:
return self.async_show_form_step_user(errors)
return await self.async_step_settings()
# Discover Xiaomi Aqara Gateways in the netwerk to get required SIDs.
xiaomi = XiaomiGatewayDiscovery(self.hass.add_job, [], self.interface)
try:
await self.hass.async_add_executor_job(xiaomi.discover_gateways)
except gaierror:
errors[CONF_INTERFACE] = "invalid_interface"
return self.async_show_form_step_user(errors)
self.gateways = xiaomi.gateways
if len(self.gateways) == 1:
self.selected_gateway = list(self.gateways.values())[0]
self.sid = self.selected_gateway.sid
return await self.async_step_settings()
if len(self.gateways) > 1:
return await self.async_step_select()
errors["base"] = "discovery_error"
return self.async_show_form_step_user(errors)
async def async_step_select(self, user_input=None):
"""Handle multiple aqara gateways found."""
errors = {}
if user_input is not None:
ip_adress = user_input["select_ip"]
self.selected_gateway = self.gateways[ip_adress]
self.sid = self.selected_gateway.sid
return await self.async_step_settings()
select_schema = vol.Schema(
{
vol.Required("select_ip"): vol.In(
[gateway.ip_adress for gateway in self.gateways.values()]
)
}
)
return self.async_show_form(
step_id="select", data_schema=select_schema, errors=errors
)
async def async_step_zeroconf(self, discovery_info):
"""Handle zeroconf discovery."""
name = discovery_info.get("name")
self.host = discovery_info.get("host")
mac_address = discovery_info.get("properties", {}).get("mac")
if not name or not self.host or not mac_address:
return self.async_abort(reason="not_xiaomi_aqara")
# Check if the discovered device is an xiaomi aqara gateway.
if not (
name.startswith(ZEROCONF_GATEWAY) or name.startswith(ZEROCONF_ACPARTNER)
):
_LOGGER.debug(
"Xiaomi device '%s' discovered with host %s, not identified as xiaomi aqara gateway",
name,
self.host,
)
return self.async_abort(reason="not_xiaomi_aqara")
# format mac (include semicolns and make lowercase)
mac_address = format_mac(mac_address)
# format sid from mac_address
self.sid = mac_address.replace(":", "")
unique_id = mac_address
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured(
{CONF_HOST: self.host, CONF_MAC: mac_address}
)
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {"name": self.host}})
return await self.async_step_user()
async def async_step_settings(self, user_input=None):
"""Specify settings and connect aqara gateway."""
errors = {}
if user_input is not None:
# get all required data
name = user_input[CONF_NAME]
key = user_input.get(CONF_KEY)
ip_adress = self.selected_gateway.ip_adress
port = self.selected_gateway.port
protocol = self.selected_gateway.proto
if key is not None:
# validate key by issuing stop ringtone playback command.
self.selected_gateway.key = key
valid_key = self.selected_gateway.write_to_hub(self.sid, mid=10000)
else:
valid_key = True
if valid_key:
# format_mac, for a gateway the sid equels the mac address
mac_address = format_mac(self.sid)
# set unique_id
unique_id = mac_address
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=name,
data={
CONF_HOST: ip_adress,
CONF_PORT: port,
CONF_MAC: mac_address,
CONF_INTERFACE: self.interface,
CONF_PROTOCOL: protocol,
CONF_KEY: key,
CONF_SID: self.sid,
},
)
errors[CONF_KEY] = "invalid_key"
return self.async_show_form(
step_id="settings", data_schema=GATEWAY_SETTINGS, errors=errors
)
|
from mock import MagicMock
from mock import patch
from paasta_tools.cli.cmds import get_latest_deployment
def test_get_latest_deployment(capfd):
mock_args = MagicMock(service="", deploy_group="", soa_dir="")
with patch(
"paasta_tools.cli.cmds.get_latest_deployment.get_currently_deployed_sha",
return_value="FAKE_SHA",
autospec=True,
), patch(
"paasta_tools.cli.cmds.get_latest_deployment.validate_service_name",
autospec=True,
):
assert get_latest_deployment.paasta_get_latest_deployment(mock_args) == 0
assert "FAKE_SHA" in capfd.readouterr()[0]
def test_get_latest_deployment_no_deployment_tag(capfd):
mock_args = MagicMock(
service="fake_service", deploy_group="fake_deploy_group", soa_dir=""
)
with patch(
"paasta_tools.cli.cmds.get_latest_deployment.get_currently_deployed_sha",
return_value=None,
autospec=True,
), patch(
"paasta_tools.cli.cmds.get_latest_deployment.validate_service_name",
autospec=True,
):
assert get_latest_deployment.paasta_get_latest_deployment(mock_args) == 1
assert (
"A deployment could not be found for fake_deploy_group in fake_service"
in capfd.readouterr()[1]
)
|
import sys
import os
import dulwich
from dulwich import porcelain
from dulwich.walk import Walker
from gittle import Gittle
class GitError(Exception):
def __init__(self, arg):
Exception.__init__(self, arg)
def _find_repo(path):
subdirs = os.walk(path).next()[1]
if '.git' in subdirs:
return path
else:
parent = os.path.dirname(path)
if parent == path:
return None
else:
return _find_repo(parent)
#Get the parent git repo, if there is one
def _get_repo():
return Gittle(_find_repo(os.getcwd()))
def any_one(iterable):
it = iter(iterable)
return any(it) and not any(it)
def find_revision_sha(repo, rev):
'''rev may refer to the following ways to "spell" a commit object:
<sha1> full or abbreviated sha, only if unique
<ref> search in local refs, then remote refs.
. If '$GIT_DIR/<refname>' exists, that is what you mean (this is usually
useful only for 'HEAD', 'FETCH_HEAD', 'ORIG_HEAD', 'MERGE_HEAD'
and 'CHERRY_PICK_HEAD');
. otherwise, 'refs/<refname>' if it exists;
. otherwise, 'refs/tags/<refname>' if it exists;
. otherwise, 'refs/heads/<refname>' if it exists;
. otherwise, 'refs/remotes/<refname>' if it exists;
. otherwise, 'refs/remotes/<refname>/HEAD' if it exists.
'''
if rev in repo:
return repo[rev].id
o = repo.repo.object_store
returnval = repo.refs.get(rev) or repo.tags.get(rev) or repo.branches.get(rev) or repo.remote_branches.get(rev)
if returnval:
return returnval
else:
shalist = [sha for sha in o if sha.startswith(rev) and isinstance(o[sha], dulwich.objects.Commit)]
if len(shalist) == 1:
return (shalist[0])
elif len(shalist) > 1:
raise GitError('SHA {} is not unique'.format(rev))
raise GitError('could not find rev {}'.format(rev))
def merge_base(repo, rev1, rev2):
''''git merge-base' finds best common ancestor(s) between two commits to use
in a three-way merge. One common ancestor is 'better' than another common
ancestor if the latter is an ancestor of the former. A common ancestor
that does not have any better common ancestor is a 'best common
ancestor', i.e. a 'merge base'. Note that there can be more than one
merge base for a pair of commits.'''
sha1 = find_revision_sha(repo, rev1)
sha2 = find_revision_sha(repo, rev2)
sha2_ancestors, _ = repo.repo.object_store._collect_ancestors([sha2], [])
merge_bases = []
queue = [sha1]
seen = []
while queue:
elt = queue.pop()
if elt not in seen: #prevent circular
seen.append(elt)
if elt in sha2_ancestors:
merge_bases.append(elt)
elif repo[elt].parents:
queue.extend(repo[elt].parents)
return merge_bases
def count_commits_between(repo, rev1, rev2):
'''find common ancestor. then count ancestor->sha1, and ancestor->sha2 '''
sha1 = find_revision_sha(repo, rev1)
sha2 = find_revision_sha(repo, rev2)
if sha1 == sha2:
return (0, 0)
sha1_ahead = sum(1 for _ in Walker(repo.repo.object_store, [sha1], [sha2]))
sha1_behind = sum(1 for _ in Walker(repo.repo.object_store, [sha2], [sha1]))
return (sha1_ahead, sha1_behind)
def is_ancestor(repo, rev1, rev2):
'''return true if rev1 is an ancestor of rev2'''
sha1 = find_revision_sha(repo, rev1)
sha2 = find_revision_sha(repo, rev2)
return True if sha1 in merge_base(repo, sha1, sha2) else False
def can_ff(repo, oldrev, newrev):
return merge_base(repo, oldrev, newrev) == [oldrev]
def get_remote_tracking_branch(repo, branchname):
config = repo.repo.get_config()
try:
remote = config.get(('branch', branchname), 'remote')
merge = config.get(('branch', branchname), 'merge')
remotebranch = merge.split('refs/heads/')[1]
return remote + '/' + remotebranch
except KeyError:
return None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.