text
stringlengths 213
32.3k
|
---|
import unittest
import logging
import os
import io
import sys
import random
import string
import mmap
import tempfile
from acdcli.api import client, content, common
from acdcli.api.common import RequestError
from acdcli.utils import hashing
logging.basicConfig(level=logging.INFO)
path = os.path.join(os.path.dirname(__file__), 'cache_files')
def gen_rand_sz():
return random.randint(1, 32 * 1024)
def gen_rand_nm():
return str.join('', (random.choice(string.ascii_letters + string.digits) for _ in range(32)))
def gen_temp_file(size=gen_rand_sz()) -> tuple:
f = tempfile.NamedTemporaryFile(mode='w+b')
f.write(os.urandom(size))
f.seek(0)
return f, os.path.getsize(f.name)
def gen_rand_anon_mmap(size=gen_rand_sz()) -> tuple:
mmo = mmap.mmap(-1, size)
mmo.write(os.urandom(size))
mmo.seek(0)
return mmo, size
def do_not_run(func):
return lambda x: None
print(sys.argv)
class APILiveTestCase(unittest.TestCase):
def setUp(self):
self.acd_client = client.ACDClient(path)
self.acd_client.BOReq._wait = lambda: None
self.assertTrue(os.path.isfile(os.path.join(path, 'oauth_data')))
self.assertTrue(os.path.isfile(os.path.join(path, 'endpoint_data')))
def tearDown(self):
pass
#
# common.py
#
def test_back_off_error(self):
self.acd_client.BOReq.get(self.acd_client.content_url)
self.assertEqual(self.acd_client.BOReq._BackOffRequest__retries, 1)
#
# account.py
#
def test_get_quota(self):
q = self.acd_client.get_quota()
self.assertIn('quota', q)
self.assertIn('available', q)
def test_get_usage(self):
self.acd_client.get_account_usage()
#
# content.py
#
def test_upload(self):
f, sz = gen_temp_file()
md5 = hashing.hash_file_obj(f)
n = self.acd_client.upload_file(f.name)
self.assertIn('id', n)
self.assertEqual(n['contentProperties']['size'], sz)
self.assertEqual(n['contentProperties']['md5'], md5)
n = self.acd_client.move_to_trash(n['id'])
def test_upload_stream(self):
s, sz = gen_rand_anon_mmap()
fn = gen_rand_nm()
h = hashing.IncrementalHasher()
n = self.acd_client.upload_stream(s, fn, parent=None, read_callbacks=[h.update])
self.assertEqual(n['contentProperties']['md5'], h.get_result())
self.assertEqual(n['contentProperties']['size'], sz)
self.acd_client.move_to_trash(n['id'])
def test_upload_stream_empty(self):
empty_stream = io.BufferedReader(io.BytesIO())
fn = gen_rand_nm()
n = self.acd_client.upload_stream(empty_stream, fn, parent=None)
self.assertEqual(n['contentProperties']['md5'], 'd41d8cd98f00b204e9800998ecf8427e')
self.assertEqual(n['contentProperties']['size'], 0)
self.acd_client.move_to_trash(n['id'])
def test_overwrite(self):
f, sz = gen_temp_file()
h = hashing.IncrementalHasher()
n = self.acd_client.create_file(os.path.basename(f.name))
self.assertIn('id', n)
n = self.acd_client.overwrite_file(n['id'], f.name, [h.update])
self.assertEqual(n['contentProperties']['version'], 2)
self.assertEqual(n['contentProperties']['md5'], h.get_result())
self.acd_client.move_to_trash(n['id'])
def test_overwrite_stream(self):
s, sz = gen_rand_anon_mmap()
fn = gen_rand_nm()
h = hashing.IncrementalHasher()
n = self.acd_client.create_file(fn)
self.assertIn('id', n)
n = self.acd_client.overwrite_stream(s, n['id'], [h.update])
self.assertEqual(n['contentProperties']['md5'], h.get_result())
self.assertEqual(n['contentProperties']['size'], sz)
empty_stream = io.BufferedReader(io.BytesIO())
n = self.acd_client.overwrite_stream(empty_stream, n['id'])
self.assertEqual(n['contentProperties']['md5'], 'd41d8cd98f00b204e9800998ecf8427e')
self.assertEqual(n['contentProperties']['size'], 0)
self.acd_client.move_to_trash(n['id'])
def test_download(self):
f, sz = gen_temp_file()
self.assertTrue(sz < self.acd_client._conf.getint('transfer', 'dl_chunk_size'))
md5 = hashing.hash_file_obj(f)
n = self.acd_client.upload_file(f.name)
self.assertIn('id', n)
f.close()
self.assertFalse(os.path.exists(f.name))
self.acd_client.download_file(n['id'], f.name)
md5_dl = hashing.hash_file(f.name)
self.assertEqual(md5, md5_dl)
self.acd_client.move_to_trash(n['id'])
def test_download_chunked(self):
ch_sz = gen_rand_sz()
self.acd_client._conf['transfer']['dl_chunk_size'] = str(ch_sz)
f, sz = gen_temp_file(size=5 * ch_sz)
md5 = hashing.hash_file_obj(f)
n = self.acd_client.upload_file(f.name)
self.assertEqual(n['contentProperties']['md5'], md5)
f.close()
self.assertFalse(os.path.exists(f.name))
f = io.BytesIO()
self.acd_client.chunked_download(n['id'], f, length=sz)
self.acd_client.move_to_trash(n['id'])
dl_md5 = hashing.hash_file_obj(f)
self.assertEqual(sz, f.tell())
self.assertEqual(md5, dl_md5)
def test_incomplete_download(self):
ch_sz = gen_rand_sz()
self.acd_client._conf['transfer']['dl_chunk_size'] = str(ch_sz)
f, sz = gen_temp_file(size=5 * ch_sz)
md5 = hashing.hash_file_obj(f)
n = self.acd_client.upload_file(f.name)
self.assertEqual(n['contentProperties']['md5'], md5)
f.close()
with self.assertRaises(RequestError) as cm:
self.acd_client.download_file(n['id'], f.name, length=sz + 1)
self.assertEqual(cm.exception.status_code, RequestError.CODE.INCOMPLETE_RESULT)
self.acd_client.download_file(n['id'], f.name, length=sz)
self.acd_client.move_to_trash(n['id'])
os.remove(f.name)
def test_download_resume(self):
ch_sz = gen_rand_sz()
self.acd_client._conf['transfer']['dl_chunk_size'] = str(ch_sz)
f, sz = gen_temp_file(size=5 * ch_sz)
md5 = hashing.hash_file(f.name)
n = self.acd_client.upload_file(f.name)
self.assertEqual(n['contentProperties']['md5'], md5)
f.close()
basename = os.path.basename(f.name)
self.assertFalse(os.path.exists(f.name))
p_fn = basename + content.PARTIAL_SUFFIX
with open(p_fn, 'wb') as f:
self.acd_client.chunked_download(n['id'], f, length=int(sz * random.random()))
self.assertLess(os.path.getsize(p_fn), sz)
self.acd_client.download_file(n['id'], basename)
self.acd_client.move_to_trash(n['id'])
dl_md5 = hashing.hash_file(basename)
self.assertEqual(md5, dl_md5)
os.remove(basename)
def test_create_file(self):
name = gen_rand_nm()
node = self.acd_client.create_file(name)
self.acd_client.move_to_trash(node['id'])
self.assertEqual(node['name'], name)
self.assertEqual(node['parents'][0], self.acd_client.get_root_id())
def test_get_root_id(self):
id = self.acd_client.get_root_id()
self.assertTrue(common.is_valid_id(id))
# helper
def create_random_dir(self):
nm = gen_rand_nm()
n = self.acd_client.create_folder(nm)
self.assertIn('id', n)
return n['id']
def test_mkdir(self):
f_id = self.create_random_dir()
self.acd_client.move_to_trash(f_id)
#
# metadata.py
#
@do_not_run
def test_get_changes(self):
nodes, purged_nodes, checkpoint, reset = self.acd_client.get_changes(include_purged=False)
self.assertGreaterEqual(len(nodes), 1)
self.assertEqual(len(purged_nodes), 0)
self.assertTrue(reset)
nodes, purged_nodes, checkpoint, reset = self.acd_client.get_changes(checkpoint=checkpoint)
self.assertEqual(len(nodes), 0)
self.assertEqual(len(purged_nodes), 0)
self.assertFalse(reset)
def test_move_node(self):
f_id = self.create_random_dir()
node = self.acd_client.create_file(gen_rand_nm())
old_parent = node['parents'][0]
node = self.acd_client.move_node(node['id'], f_id)
self.assertEqual(node['parents'][0], f_id)
self.acd_client.move_to_trash(f_id)
self.acd_client.move_to_trash(node['id'])
def test_rename_node(self):
nm = gen_rand_nm()
nm2 = gen_rand_nm()
node = self.acd_client.create_file(nm)
self.assertEqual(node['name'], nm)
node = self.acd_client.rename_node(node['id'], nm2)
self.assertEqual(node['name'], nm2)
self.acd_client.move_to_trash(node['id'])
#
# trash.py
#
def test_trash(self):
# unnecessary
pass
def test_restore(self):
f_id = self.create_random_dir()
n = self.acd_client.move_to_trash(f_id)
self.assertEqual(n['status'], 'TRASH')
n = self.acd_client.restore(n['id'])
self.assertEqual(n['status'], 'AVAILABLE')
n = self.acd_client.move_to_trash(n['id'])
self.assertEqual(n['status'], 'TRASH')
def test_purge(self):
f_id = self.create_random_dir()
n = self.acd_client.move_to_trash(f_id)
self.assertEqual(n['status'], 'TRASH')
with self.assertRaises(RequestError):
self.acd_client.purge(n['id'])
|
import logging
from aiounifi.api import SOURCE_EVENT
from aiounifi.events import (
WIRED_CLIENT_BLOCKED,
WIRED_CLIENT_UNBLOCKED,
WIRELESS_CLIENT_BLOCKED,
WIRELESS_CLIENT_UNBLOCKED,
)
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.restore_state import RestoreEntity
from .const import DOMAIN as UNIFI_DOMAIN
from .unifi_client import UniFiClient
_LOGGER = logging.getLogger(__name__)
BLOCK_SWITCH = "block"
POE_SWITCH = "poe"
CLIENT_BLOCKED = (WIRED_CLIENT_BLOCKED, WIRELESS_CLIENT_BLOCKED)
CLIENT_UNBLOCKED = (WIRED_CLIENT_UNBLOCKED, WIRELESS_CLIENT_UNBLOCKED)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches for UniFi component.
Switches are controlling network access and switch ports with POE.
"""
controller = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
controller.entities[DOMAIN] = {BLOCK_SWITCH: set(), POE_SWITCH: set()}
if controller.site_role != "admin":
return
# Store previously known POE control entities in case their POE are turned off.
previously_known_poe_clients = []
entity_registry = await hass.helpers.entity_registry.async_get_registry()
for entity in entity_registry.entities.values():
if (
entity.config_entry_id != config_entry.entry_id
or not entity.unique_id.startswith(POE_SWITCH)
):
continue
mac = entity.unique_id.replace(f"{POE_SWITCH}-", "")
if mac in controller.api.clients or mac in controller.api.clients_all:
previously_known_poe_clients.append(entity.unique_id)
for mac in controller.option_block_clients:
if mac not in controller.api.clients and mac in controller.api.clients_all:
client = controller.api.clients_all[mac]
controller.api.clients.process_raw([client.raw])
@callback
def items_added(
clients: set = controller.api.clients, devices: set = controller.api.devices
) -> None:
"""Update the values of the controller."""
if controller.option_block_clients:
add_block_entities(controller, async_add_entities, clients)
if controller.option_poe_clients:
add_poe_entities(
controller, async_add_entities, clients, previously_known_poe_clients
)
for signal in (controller.signal_update, controller.signal_options_update):
controller.listeners.append(async_dispatcher_connect(hass, signal, items_added))
items_added()
previously_known_poe_clients.clear()
@callback
def add_block_entities(controller, async_add_entities, clients):
"""Add new switch entities from the controller."""
switches = []
for mac in controller.option_block_clients:
if mac in controller.entities[DOMAIN][BLOCK_SWITCH] or mac not in clients:
continue
client = controller.api.clients[mac]
switches.append(UniFiBlockClientSwitch(client, controller))
if switches:
async_add_entities(switches)
@callback
def add_poe_entities(
controller, async_add_entities, clients, previously_known_poe_clients
):
"""Add new switch entities from the controller."""
switches = []
devices = controller.api.devices
for mac in clients:
if mac in controller.entities[DOMAIN][POE_SWITCH]:
continue
poe_client_id = f"{POE_SWITCH}-{mac}"
client = controller.api.clients[mac]
if poe_client_id not in previously_known_poe_clients and (
mac in controller.wireless_clients
or client.sw_mac not in devices
or not devices[client.sw_mac].ports[client.sw_port].port_poe
or not devices[client.sw_mac].ports[client.sw_port].poe_enable
or controller.mac == client.mac
):
continue
# Multiple POE-devices on same port means non UniFi POE driven switch
multi_clients_on_port = False
for client2 in controller.api.clients.values():
if poe_client_id in previously_known_poe_clients:
break
if (
client2.is_wired
and client.mac != client2.mac
and client.sw_mac == client2.sw_mac
and client.sw_port == client2.sw_port
):
multi_clients_on_port = True
break
if multi_clients_on_port:
continue
switches.append(UniFiPOEClientSwitch(client, controller))
if switches:
async_add_entities(switches)
class UniFiPOEClientSwitch(UniFiClient, SwitchEntity, RestoreEntity):
"""Representation of a client that uses POE."""
DOMAIN = DOMAIN
TYPE = POE_SWITCH
def __init__(self, client, controller):
"""Set up POE switch."""
super().__init__(client, controller)
self.poe_mode = None
if client.sw_port and self.port.poe_mode != "off":
self.poe_mode = self.port.poe_mode
async def async_added_to_hass(self):
"""Call when entity about to be added to Home Assistant."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state is None:
return
if self.poe_mode is None:
self.poe_mode = state.attributes["poe_mode"]
if not self.client.sw_mac:
self.client.raw["sw_mac"] = state.attributes["switch"]
if not self.client.sw_port:
self.client.raw["sw_port"] = state.attributes["port"]
@property
def is_on(self):
"""Return true if POE is active."""
return self.port.poe_mode != "off"
@property
def available(self):
"""Return if switch is available.
Poe_mode None means its poe state is unknown.
Sw_mac unavailable means restored client.
"""
return (
self.poe_mode is None
or self.client.sw_mac
and (
self.controller.available
and self.client.sw_mac in self.controller.api.devices
)
)
async def async_turn_on(self, **kwargs):
"""Enable POE for client."""
await self.device.async_set_port_poe_mode(self.client.sw_port, self.poe_mode)
async def async_turn_off(self, **kwargs):
"""Disable POE for client."""
await self.device.async_set_port_poe_mode(self.client.sw_port, "off")
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {
"power": self.port.poe_power,
"switch": self.client.sw_mac,
"port": self.client.sw_port,
"poe_mode": self.poe_mode,
}
return attributes
@property
def device(self):
"""Shortcut to the switch that client is connected to."""
return self.controller.api.devices[self.client.sw_mac]
@property
def port(self):
"""Shortcut to the switch port that client is connected to."""
try:
return self.device.ports[self.client.sw_port]
except (AttributeError, KeyError, TypeError):
_LOGGER.warning(
"Entity %s reports faulty device %s or port %s",
self.entity_id,
self.client.sw_mac,
self.client.sw_port,
)
async def options_updated(self) -> None:
"""Config entry options are updated, remove entity if option is disabled."""
if not self.controller.option_poe_clients:
await self.remove_item({self.client.mac})
class UniFiBlockClientSwitch(UniFiClient, SwitchEntity):
"""Representation of a blockable client."""
DOMAIN = DOMAIN
TYPE = BLOCK_SWITCH
def __init__(self, client, controller):
"""Set up block switch."""
super().__init__(client, controller)
self._is_blocked = client.blocked
@callback
def async_update_callback(self) -> None:
"""Update the clients state."""
if self.client.last_updated == SOURCE_EVENT:
if self.client.event.event in CLIENT_BLOCKED + CLIENT_UNBLOCKED:
self._is_blocked = self.client.event.event in CLIENT_BLOCKED
super().async_update_callback()
@property
def is_on(self):
"""Return true if client is allowed to connect."""
return not self._is_blocked
async def async_turn_on(self, **kwargs):
"""Turn on connectivity for client."""
await self.controller.api.clients.async_unblock(self.client.mac)
async def async_turn_off(self, **kwargs):
"""Turn off connectivity for client."""
await self.controller.api.clients.async_block(self.client.mac)
@property
def icon(self):
"""Return the icon to use in the frontend."""
if self._is_blocked:
return "mdi:network-off"
return "mdi:network"
async def options_updated(self) -> None:
"""Config entry options are updated, remove entity if option is disabled."""
if self.client.mac not in self.controller.option_block_clients:
await self.remove_item({self.client.mac})
|
import logging
import unittest
import uuid
import boto3
import moto
from smart_open import open
BUCKET_NAME = 'test-smartopen'
KEY_NAME = 'test-key'
logger = logging.getLogger(__name__)
@moto.mock_s3
def setUpModule():
'''Called once by unittest when initializing this module. Sets up the
test S3 bucket.
'''
bucket = boto3.resource('s3').create_bucket(Bucket=BUCKET_NAME)
bucket.wait_until_exists()
boto3.resource('s3').BucketVersioning(BUCKET_NAME).enable()
@moto.mock_s3
def tearDownModule():
'''Called once by unittest when tearing down this module. Empties and
removes the test S3 bucket.
'''
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
try:
bucket.object_versions.delete()
bucket.delete()
except s3.meta.client.exceptions.NoSuchBucket:
pass
bucket.wait_until_not_exists()
def get_versions(bucket, key):
"""Return object versions in chronological order."""
return [
v.id
for v in sorted(
boto3.resource('s3').Bucket(bucket).object_versions.filter(Prefix=key),
key=lambda version: version.last_modified,
)
]
@moto.mock_s3
class TestVersionId(unittest.TestCase):
def setUp(self):
#
# Each run of this test reuses the BUCKET_NAME, but works with a
# different key for isolation.
#
self.key = 'test-write-key-{}'.format(uuid.uuid4().hex)
self.url = "s3://%s/%s" % (BUCKET_NAME, self.key)
self.test_ver1 = u"String version 1.0".encode('utf8')
self.test_ver2 = u"String version 2.0".encode('utf8')
bucket = boto3.resource('s3').Bucket(BUCKET_NAME)
bucket.put_object(Key=self.key, Body=self.test_ver1)
logging.critical('versions after first write: %r', get_versions(BUCKET_NAME, self.key))
bucket.put_object(Key=self.key, Body=self.test_ver2)
self.versions = get_versions(BUCKET_NAME, self.key)
logging.critical('versions after second write: %r', get_versions(BUCKET_NAME, self.key))
assert len(self.versions) == 2
def test_good_id(self):
"""Does passing the version_id parameter into the s3 submodule work correctly when reading?"""
params = {'version_id': self.versions[0]}
with open(self.url, mode='rb', transport_params=params) as fin:
actual = fin.read()
self.assertEqual(actual, self.test_ver1)
def test_bad_id(self):
"""Does passing an invalid version_id exception into the s3 submodule get handled correctly?"""
params = {'version_id': 'bad-version-does-not-exist'}
with self.assertRaises(IOError):
open(self.url, 'rb', transport_params=params)
def test_bad_mode(self):
"""Do we correctly handle non-None version when writing?"""
params = {'version_id': self.versions[0]}
with self.assertRaises(ValueError):
open(self.url, 'wb', transport_params=params)
def test_no_version(self):
"""Passing in no version at all gives the newest version of the file?"""
with open(self.url, 'rb') as fin:
actual = fin.read()
self.assertEqual(actual, self.test_ver2)
def test_newest_version(self):
"""Passing in the newest version explicitly gives the most recent content?"""
params = {'version_id': self.versions[1]}
with open(self.url, mode='rb', transport_params=params) as fin:
actual = fin.read()
self.assertEqual(actual, self.test_ver2)
def test_oldset_version(self):
"""Passing in the oldest version gives the oldest content?"""
params = {'version_id': self.versions[0]}
with open(self.url, mode='rb', transport_params=params) as fin:
actual = fin.read()
self.assertEqual(actual, self.test_ver1)
def test_version_to_boto3(self):
"""Passing in the oldest version gives the oldest content?"""
self.versions = get_versions(BUCKET_NAME, self.key)
params = {'version_id': self.versions[0]}
with open(self.url, mode='rb', transport_params=params) as fin:
returned_obj = fin.to_boto3()
boto3_body = boto3_body = returned_obj.get()['Body'].read()
self.assertEqual(boto3_body, self.test_ver1)
if __name__ == '__main__':
unittest.main()
|
import logging
import re
from pexpect import pxssh
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = vol.All(
PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=""): cv.string,
vol.Optional(CONF_PORT): cv.port,
}
)
)
def get_scanner(hass, config):
"""Validate the configuration and return a Cisco scanner."""
scanner = CiscoDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class CiscoDeviceScanner(DeviceScanner):
"""This class queries a wireless router running Cisco IOS firmware."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.port = config.get(CONF_PORT)
self.password = config[CONF_PASSWORD]
self.last_results = {}
self.success_init = self._update_info()
_LOGGER.info("cisco_ios scanner initialized")
def get_device_name(self, device):
"""Get the firmware doesn't save the name of the wireless device."""
return None
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
def _update_info(self):
"""
Ensure the information from the Cisco router is up to date.
Returns boolean if scanning successful.
"""
string_result = self._get_arp_data()
if string_result:
self.last_results = []
last_results = []
lines_result = string_result.splitlines()
# Remove the first two lines, as they contains the arp command
# and the arp table titles e.g.
# show ip arp
# Protocol Address | Age (min) | Hardware Addr | Type | Interface
lines_result = lines_result[2:]
for line in lines_result:
parts = line.split()
if len(parts) != 6:
continue
# ['Internet', '10.10.11.1', '-', '0027.d32d.0123', 'ARPA',
# 'GigabitEthernet0']
age = parts[2]
hw_addr = parts[3]
if age != "-":
mac = _parse_cisco_mac_address(hw_addr)
age = int(age)
if age < 1:
last_results.append(mac)
self.last_results = last_results
return True
return False
def _get_arp_data(self):
"""Open connection to the router and get arp entries."""
try:
cisco_ssh = pxssh.pxssh()
cisco_ssh.login(
self.host,
self.username,
self.password,
port=self.port,
auto_prompt_reset=False,
)
# Find the hostname
initial_line = cisco_ssh.before.decode("utf-8").splitlines()
router_hostname = initial_line[len(initial_line) - 1]
router_hostname += "#"
# Set the discovered hostname as prompt
regex_expression = ("(?i)^%s" % router_hostname).encode()
cisco_ssh.PROMPT = re.compile(regex_expression, re.MULTILINE)
# Allow full arp table to print at once
cisco_ssh.sendline("terminal length 0")
cisco_ssh.prompt(1)
cisco_ssh.sendline("show ip arp")
cisco_ssh.prompt(1)
devices_result = cisco_ssh.before
return devices_result.decode("utf-8")
except pxssh.ExceptionPxssh as px_e:
_LOGGER.error("pxssh failed on login")
_LOGGER.error(px_e)
return None
def _parse_cisco_mac_address(cisco_hardware_addr):
"""
Parse a Cisco formatted HW address to normal MAC.
e.g. convert
001d.ec02.07ab
to:
00:1D:EC:02:07:AB
Takes in cisco_hwaddr: HWAddr String from Cisco ARP table
Returns a regular standard MAC address
"""
cisco_hardware_addr = cisco_hardware_addr.replace(".", "")
blocks = [
cisco_hardware_addr[x : x + 2] for x in range(0, len(cisco_hardware_addr), 2)
]
return ":".join(blocks).upper()
|
import numpy as np
from examples.fft import fft
import tensornetwork as tn
def test_fft():
n = 3
initial_state = [complex(0)] * (1 << n)
initial_state[1] = 1j
initial_state[5] = -1
initial_node = tn.Node(np.array(initial_state).reshape((2,) * n))
fft_out = fft.add_fft([initial_node[k] for k in range(n)])
result = tn.contractors.greedy(tn.reachable(fft_out[0].node1), fft_out)
tn.flatten_edges(fft_out)
actual = result.tensor
expected = np.fft.fft(initial_state, norm="ortho")
np.testing.assert_allclose(expected, actual)
|
import logging
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_DISARMING,
STATE_ALARM_TRIGGERED,
)
from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities) -> None:
"""Set up TotalConnect alarm panels based on a config entry."""
alarms = []
client = hass.data[DOMAIN][entry.entry_id]
for location_id, location in client.locations.items():
location_name = location.location_name
alarms.append(TotalConnectAlarm(location_name, location_id, client))
async_add_entities(alarms, True)
class TotalConnectAlarm(alarm.AlarmControlPanelEntity):
"""Represent an TotalConnect status."""
def __init__(self, name, location_id, client):
"""Initialize the TotalConnect status."""
self._name = name
self._location_id = location_id
self._client = client
self._state = None
self._device_state_attributes = {}
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._device_state_attributes
def update(self):
"""Return the state of the device."""
self._client.get_armed_status(self._location_id)
attr = {
"location_name": self._name,
"location_id": self._location_id,
"ac_loss": self._client.locations[self._location_id].ac_loss,
"low_battery": self._client.locations[self._location_id].low_battery,
"cover_tampered": self._client.locations[
self._location_id
].is_cover_tampered(),
"triggered_source": None,
"triggered_zone": None,
}
if self._client.locations[self._location_id].is_disarmed():
state = STATE_ALARM_DISARMED
elif self._client.locations[self._location_id].is_armed_home():
state = STATE_ALARM_ARMED_HOME
elif self._client.locations[self._location_id].is_armed_night():
state = STATE_ALARM_ARMED_NIGHT
elif self._client.locations[self._location_id].is_armed_away():
state = STATE_ALARM_ARMED_AWAY
elif self._client.locations[self._location_id].is_armed_custom_bypass():
state = STATE_ALARM_ARMED_CUSTOM_BYPASS
elif self._client.locations[self._location_id].is_arming():
state = STATE_ALARM_ARMING
elif self._client.locations[self._location_id].is_disarming():
state = STATE_ALARM_DISARMING
elif self._client.locations[self._location_id].is_triggered_police():
state = STATE_ALARM_TRIGGERED
attr["triggered_source"] = "Police/Medical"
elif self._client.locations[self._location_id].is_triggered_fire():
state = STATE_ALARM_TRIGGERED
attr["triggered_source"] = "Fire/Smoke"
elif self._client.locations[self._location_id].is_triggered_gas():
state = STATE_ALARM_TRIGGERED
attr["triggered_source"] = "Carbon Monoxide"
else:
logging.info("Total Connect Client returned unknown status")
state = None
self._state = state
self._device_state_attributes = attr
def alarm_disarm(self, code=None):
"""Send disarm command."""
if self._client.disarm(self._location_id) is not True:
raise HomeAssistantError(f"TotalConnect failed to disarm {self._name}.")
def alarm_arm_home(self, code=None):
"""Send arm home command."""
if self._client.arm_stay(self._location_id) is not True:
raise HomeAssistantError(f"TotalConnect failed to arm home {self._name}.")
def alarm_arm_away(self, code=None):
"""Send arm away command."""
if self._client.arm_away(self._location_id) is not True:
raise HomeAssistantError(f"TotalConnect failed to arm away {self._name}.")
def alarm_arm_night(self, code=None):
"""Send arm night command."""
if self._client.arm_stay_night(self._location_id) is not True:
raise HomeAssistantError(f"TotalConnect failed to arm night {self._name}.")
|
import logging
from pyhap.const import CATEGORY_SWITCH, CATEGORY_TELEVISION
from homeassistant.components.media_player import (
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
DOMAIN,
SERVICE_SELECT_SOURCE,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_SELECT_SOURCE,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_STOP,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SERVICE_VOLUME_DOWN,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
SERVICE_VOLUME_UP,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
STATE_STANDBY,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from .accessories import TYPES, HomeAccessory
from .const import (
ATTR_KEY_NAME,
CHAR_ACTIVE,
CHAR_ACTIVE_IDENTIFIER,
CHAR_CONFIGURED_NAME,
CHAR_CURRENT_VISIBILITY_STATE,
CHAR_IDENTIFIER,
CHAR_INPUT_SOURCE_TYPE,
CHAR_IS_CONFIGURED,
CHAR_MUTE,
CHAR_NAME,
CHAR_ON,
CHAR_REMOTE_KEY,
CHAR_SLEEP_DISCOVER_MODE,
CHAR_VOLUME,
CHAR_VOLUME_CONTROL_TYPE,
CHAR_VOLUME_SELECTOR,
CONF_FEATURE_LIST,
EVENT_HOMEKIT_TV_REMOTE_KEY_PRESSED,
FEATURE_ON_OFF,
FEATURE_PLAY_PAUSE,
FEATURE_PLAY_STOP,
FEATURE_TOGGLE_MUTE,
KEY_ARROW_DOWN,
KEY_ARROW_LEFT,
KEY_ARROW_RIGHT,
KEY_ARROW_UP,
KEY_BACK,
KEY_EXIT,
KEY_FAST_FORWARD,
KEY_INFORMATION,
KEY_NEXT_TRACK,
KEY_PLAY_PAUSE,
KEY_PREVIOUS_TRACK,
KEY_REWIND,
KEY_SELECT,
SERV_INPUT_SOURCE,
SERV_SWITCH,
SERV_TELEVISION,
SERV_TELEVISION_SPEAKER,
)
from .util import get_media_player_features
_LOGGER = logging.getLogger(__name__)
MEDIA_PLAYER_KEYS = {
0: KEY_REWIND,
1: KEY_FAST_FORWARD,
2: KEY_NEXT_TRACK,
3: KEY_PREVIOUS_TRACK,
4: KEY_ARROW_UP,
5: KEY_ARROW_DOWN,
6: KEY_ARROW_LEFT,
7: KEY_ARROW_RIGHT,
8: KEY_SELECT,
9: KEY_BACK,
10: KEY_EXIT,
11: KEY_PLAY_PAUSE,
15: KEY_INFORMATION,
}
# Names may not contain special characters
# or emjoi (/ is a special character for Apple)
MODE_FRIENDLY_NAME = {
FEATURE_ON_OFF: "Power",
FEATURE_PLAY_PAUSE: "Play-Pause",
FEATURE_PLAY_STOP: "Play-Stop",
FEATURE_TOGGLE_MUTE: "Mute",
}
MEDIA_PLAYER_OFF_STATES = (
STATE_OFF,
STATE_UNKNOWN,
STATE_STANDBY,
"None",
)
@TYPES.register("MediaPlayer")
class MediaPlayer(HomeAccessory):
"""Generate a Media Player accessory."""
def __init__(self, *args):
"""Initialize a Switch accessory object."""
super().__init__(*args, category=CATEGORY_SWITCH)
state = self.hass.states.get(self.entity_id)
self.chars = {
FEATURE_ON_OFF: None,
FEATURE_PLAY_PAUSE: None,
FEATURE_PLAY_STOP: None,
FEATURE_TOGGLE_MUTE: None,
}
feature_list = self.config.get(
CONF_FEATURE_LIST, get_media_player_features(state)
)
if FEATURE_ON_OFF in feature_list:
name = self.generate_service_name(FEATURE_ON_OFF)
serv_on_off = self.add_preload_service(SERV_SWITCH, CHAR_NAME)
serv_on_off.configure_char(CHAR_NAME, value=name)
self.chars[FEATURE_ON_OFF] = serv_on_off.configure_char(
CHAR_ON, value=False, setter_callback=self.set_on_off
)
if FEATURE_PLAY_PAUSE in feature_list:
name = self.generate_service_name(FEATURE_PLAY_PAUSE)
serv_play_pause = self.add_preload_service(SERV_SWITCH, CHAR_NAME)
serv_play_pause.configure_char(CHAR_NAME, value=name)
self.chars[FEATURE_PLAY_PAUSE] = serv_play_pause.configure_char(
CHAR_ON, value=False, setter_callback=self.set_play_pause
)
if FEATURE_PLAY_STOP in feature_list:
name = self.generate_service_name(FEATURE_PLAY_STOP)
serv_play_stop = self.add_preload_service(SERV_SWITCH, CHAR_NAME)
serv_play_stop.configure_char(CHAR_NAME, value=name)
self.chars[FEATURE_PLAY_STOP] = serv_play_stop.configure_char(
CHAR_ON, value=False, setter_callback=self.set_play_stop
)
if FEATURE_TOGGLE_MUTE in feature_list:
name = self.generate_service_name(FEATURE_TOGGLE_MUTE)
serv_toggle_mute = self.add_preload_service(SERV_SWITCH, CHAR_NAME)
serv_toggle_mute.configure_char(CHAR_NAME, value=name)
self.chars[FEATURE_TOGGLE_MUTE] = serv_toggle_mute.configure_char(
CHAR_ON, value=False, setter_callback=self.set_toggle_mute
)
self.async_update_state(state)
def generate_service_name(self, mode):
"""Generate name for individual service."""
return f"{self.display_name} {MODE_FRIENDLY_NAME[mode]}"
def set_on_off(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value)
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
def set_play_pause(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug(
'%s: Set switch state for "play_pause" to %s', self.entity_id, value
)
service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_PAUSE
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
def set_play_stop(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug(
'%s: Set switch state for "play_stop" to %s', self.entity_id, value
)
service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_STOP
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
def set_toggle_mute(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug(
'%s: Set switch state for "toggle_mute" to %s', self.entity_id, value
)
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value}
self.call_service(DOMAIN, SERVICE_VOLUME_MUTE, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = new_state.state
if self.chars[FEATURE_ON_OFF]:
hk_state = current_state not in MEDIA_PLAYER_OFF_STATES
_LOGGER.debug(
'%s: Set current state for "on_off" to %s', self.entity_id, hk_state
)
if self.chars[FEATURE_ON_OFF].value != hk_state:
self.chars[FEATURE_ON_OFF].set_value(hk_state)
if self.chars[FEATURE_PLAY_PAUSE]:
hk_state = current_state == STATE_PLAYING
_LOGGER.debug(
'%s: Set current state for "play_pause" to %s',
self.entity_id,
hk_state,
)
if self.chars[FEATURE_PLAY_PAUSE].value != hk_state:
self.chars[FEATURE_PLAY_PAUSE].set_value(hk_state)
if self.chars[FEATURE_PLAY_STOP]:
hk_state = current_state == STATE_PLAYING
_LOGGER.debug(
'%s: Set current state for "play_stop" to %s',
self.entity_id,
hk_state,
)
if self.chars[FEATURE_PLAY_STOP].value != hk_state:
self.chars[FEATURE_PLAY_STOP].set_value(hk_state)
if self.chars[FEATURE_TOGGLE_MUTE]:
current_state = bool(new_state.attributes.get(ATTR_MEDIA_VOLUME_MUTED))
_LOGGER.debug(
'%s: Set current state for "toggle_mute" to %s',
self.entity_id,
current_state,
)
if self.chars[FEATURE_TOGGLE_MUTE].value != current_state:
self.chars[FEATURE_TOGGLE_MUTE].set_value(current_state)
@TYPES.register("TelevisionMediaPlayer")
class TelevisionMediaPlayer(HomeAccessory):
"""Generate a Television Media Player accessory."""
def __init__(self, *args):
"""Initialize a Switch accessory object."""
super().__init__(*args, category=CATEGORY_TELEVISION)
state = self.hass.states.get(self.entity_id)
self.support_select_source = False
self.sources = []
self.chars_tv = [CHAR_REMOTE_KEY]
self.chars_speaker = []
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
self._supports_play_pause = features & (SUPPORT_PLAY | SUPPORT_PAUSE)
if features & SUPPORT_VOLUME_MUTE or features & SUPPORT_VOLUME_STEP:
self.chars_speaker.extend(
(CHAR_NAME, CHAR_ACTIVE, CHAR_VOLUME_CONTROL_TYPE, CHAR_VOLUME_SELECTOR)
)
if features & SUPPORT_VOLUME_SET:
self.chars_speaker.append(CHAR_VOLUME)
source_list = state.attributes.get(ATTR_INPUT_SOURCE_LIST, [])
if source_list and features & SUPPORT_SELECT_SOURCE:
self.support_select_source = True
serv_tv = self.add_preload_service(SERV_TELEVISION, self.chars_tv)
self.set_primary_service(serv_tv)
serv_tv.configure_char(CHAR_CONFIGURED_NAME, value=self.display_name)
serv_tv.configure_char(CHAR_SLEEP_DISCOVER_MODE, value=True)
self.char_active = serv_tv.configure_char(
CHAR_ACTIVE, setter_callback=self.set_on_off
)
self.char_remote_key = serv_tv.configure_char(
CHAR_REMOTE_KEY, setter_callback=self.set_remote_key
)
if CHAR_VOLUME_SELECTOR in self.chars_speaker:
serv_speaker = self.add_preload_service(
SERV_TELEVISION_SPEAKER, self.chars_speaker
)
serv_tv.add_linked_service(serv_speaker)
name = f"{self.display_name} Volume"
serv_speaker.configure_char(CHAR_NAME, value=name)
serv_speaker.configure_char(CHAR_ACTIVE, value=1)
self.char_mute = serv_speaker.configure_char(
CHAR_MUTE, value=False, setter_callback=self.set_mute
)
volume_control_type = 1 if CHAR_VOLUME in self.chars_speaker else 2
serv_speaker.configure_char(
CHAR_VOLUME_CONTROL_TYPE, value=volume_control_type
)
self.char_volume_selector = serv_speaker.configure_char(
CHAR_VOLUME_SELECTOR, setter_callback=self.set_volume_step
)
if CHAR_VOLUME in self.chars_speaker:
self.char_volume = serv_speaker.configure_char(
CHAR_VOLUME, setter_callback=self.set_volume
)
if self.support_select_source:
self.sources = source_list
self.char_input_source = serv_tv.configure_char(
CHAR_ACTIVE_IDENTIFIER, setter_callback=self.set_input_source
)
for index, source in enumerate(self.sources):
serv_input = self.add_preload_service(
SERV_INPUT_SOURCE, [CHAR_IDENTIFIER, CHAR_NAME]
)
serv_tv.add_linked_service(serv_input)
serv_input.configure_char(CHAR_CONFIGURED_NAME, value=source)
serv_input.configure_char(CHAR_NAME, value=source)
serv_input.configure_char(CHAR_IDENTIFIER, value=index)
serv_input.configure_char(CHAR_IS_CONFIGURED, value=True)
input_type = 3 if "hdmi" in source.lower() else 0
serv_input.configure_char(CHAR_INPUT_SOURCE_TYPE, value=input_type)
serv_input.configure_char(CHAR_CURRENT_VISIBILITY_STATE, value=False)
_LOGGER.debug("%s: Added source %s", self.entity_id, source)
self.async_update_state(state)
def set_on_off(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value)
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
def set_mute(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug(
'%s: Set switch state for "toggle_mute" to %s', self.entity_id, value
)
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value}
self.call_service(DOMAIN, SERVICE_VOLUME_MUTE, params)
def set_volume(self, value):
"""Send volume step value if call came from HomeKit."""
_LOGGER.debug("%s: Set volume to %s", self.entity_id, value)
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_LEVEL: value}
self.call_service(DOMAIN, SERVICE_VOLUME_SET, params)
def set_volume_step(self, value):
"""Send volume step value if call came from HomeKit."""
_LOGGER.debug("%s: Step volume by %s", self.entity_id, value)
service = SERVICE_VOLUME_DOWN if value else SERVICE_VOLUME_UP
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
def set_input_source(self, value):
"""Send input set value if call came from HomeKit."""
_LOGGER.debug("%s: Set current input to %s", self.entity_id, value)
source = self.sources[value]
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_INPUT_SOURCE: source}
self.call_service(DOMAIN, SERVICE_SELECT_SOURCE, params)
def set_remote_key(self, value):
"""Send remote key value if call came from HomeKit."""
_LOGGER.debug("%s: Set remote key to %s", self.entity_id, value)
key_name = MEDIA_PLAYER_KEYS.get(value)
if key_name is None:
_LOGGER.warning("%s: Unhandled key press for %s", self.entity_id, value)
return
if key_name == KEY_PLAY_PAUSE and self._supports_play_pause:
# Handle Play Pause by directly updating the media player entity.
state = self.hass.states.get(self.entity_id).state
if state in (STATE_PLAYING, STATE_PAUSED):
service = (
SERVICE_MEDIA_PLAY if state == STATE_PAUSED else SERVICE_MEDIA_PAUSE
)
else:
service = SERVICE_MEDIA_PLAY_PAUSE
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
else:
# Unhandled keys can be handled by listening to the event bus
self.hass.bus.fire(
EVENT_HOMEKIT_TV_REMOTE_KEY_PRESSED,
{ATTR_KEY_NAME: key_name, ATTR_ENTITY_ID: self.entity_id},
)
@callback
def async_update_state(self, new_state):
"""Update Television state after state changed."""
current_state = new_state.state
# Power state television
hk_state = 0
if current_state not in MEDIA_PLAYER_OFF_STATES:
hk_state = 1
_LOGGER.debug("%s: Set current active state to %s", self.entity_id, hk_state)
if self.char_active.value != hk_state:
self.char_active.set_value(hk_state)
# Set mute state
if CHAR_VOLUME_SELECTOR in self.chars_speaker:
current_mute_state = bool(new_state.attributes.get(ATTR_MEDIA_VOLUME_MUTED))
_LOGGER.debug(
"%s: Set current mute state to %s",
self.entity_id,
current_mute_state,
)
if self.char_mute.value != current_mute_state:
self.char_mute.set_value(current_mute_state)
# Set active input
if self.support_select_source and self.sources:
source_name = new_state.attributes.get(ATTR_INPUT_SOURCE)
_LOGGER.debug("%s: Set current input to %s", self.entity_id, source_name)
if source_name in self.sources:
index = self.sources.index(source_name)
if self.char_input_source.value != index:
self.char_input_source.set_value(index)
elif hk_state:
_LOGGER.warning(
"%s: Sources out of sync. Restart Home Assistant",
self.entity_id,
)
if self.char_input_source.value != 0:
self.char_input_source.set_value(0)
|
from __future__ import absolute_import, print_function
import cgi
import mimetypes
import os
import posixpath
import re
import shutil
from six import StringIO
from six.moves import BaseHTTPServer
from six.moves.urllib.parse import quote, unquote
__version__ = "0.1"
__all__ = ["SimpleHTTPRequestHandler"]
__author__ = "bones7456"
__home_page__ = "http://li2z.cn/"
class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Simple HTTP request handler with GET/HEAD/POST commands.
This serves files from the current directory and any of its
subdirectories. The MIME type for files is determined by
calling the .guess_type() method. And can reveive file uploaded
by client.
The GET/HEAD/POST requests are identical except that the HEAD
request omits the actual contents of the file.
"""
server_version = "SimpleHTTPWithUpload/" + __version__
def do_GET(self):
"""Serve a GET request."""
f = self.send_head()
if f:
self.copyfile(f, self.wfile)
f.close()
def do_HEAD(self):
"""Serve a HEAD request."""
f = self.send_head()
if f:
f.close()
def do_POST(self):
"""Serve a POST request."""
r, info = self.deal_post_data()
print(r, info, "by: ", self.client_address)
f = StringIO()
f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
f.write("<html>\n<title>Upload Result Page</title>\n")
f.write("<body>\n<h2>Upload Result Page</h2>\n")
f.write("<hr>\n")
if r:
f.write("<strong>Success:</strong>")
else:
f.write("<strong>Failed:</strong>")
f.write(info)
f.write("<br><a href=\"%s\">back</a>" % self.headers['referer'])
f.write("<hr><small>Powerd By: bones7456, check new version at ")
f.write("<a href=\"http://li2z.cn/?s=SimpleHTTPServerWithUpload\">")
f.write("here</a>.</small></body>\n</html>\n")
length = f.tell()
f.seek(0)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(length))
self.end_headers()
if f:
self.copyfile(f, self.wfile)
f.close()
def deal_post_data(self):
boundary = self.headers.plisttext.split("=")[1]
remainbytes = int(self.headers['content-length'])
line = self.rfile.readline()
remainbytes -= len(line)
if not boundary in line:
return (False, "Content NOT begin with boundary")
line = self.rfile.readline()
remainbytes -= len(line)
fn = re.findall(r'Content-Disposition.*name="file"; filename="(.*)"', line)
if not fn:
return (False, "Can't find out file name...")
path = self.translate_path(self.path)
fn = os.path.join(path, fn[0])
line = self.rfile.readline()
remainbytes -= len(line)
line = self.rfile.readline()
remainbytes -= len(line)
try:
out = open(fn, 'wb')
except IOError:
return (False, "Can't create file to write, do you have permission to write?")
preline = self.rfile.readline()
remainbytes -= len(preline)
while remainbytes > 0:
line = self.rfile.readline()
remainbytes -= len(line)
if boundary in line:
preline = preline[0:-1]
if preline.endswith('\r'):
preline = preline[0:-1]
out.write(preline)
out.close()
return (True, "File '%s' upload success!" % fn)
else:
out.write(preline)
preline = line
return (False, "Unexpect Ends of data.")
def send_head(self):
"""Common code for GET and HEAD commands.
This sends the response code and MIME headers.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
and must be closed by the caller under all circumstances), or
None, in which case the caller has nothing further to do.
"""
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
if not self.path.endswith('/'):
# redirect browser - doing basically what apache does
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
return None
for index in "index.html", "index.htm":
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
return self.list_directory(path)
ctype = self.guess_type(path)
try:
# Always read in binary mode. Opening files in text mode may cause
# newline translations, making the actual size of the content
# transmitted *less* than the content-length!
f = open(path, 'rb')
except IOError:
self.send_error(404, "File not found")
return None
self.send_response(200)
self.send_header("Content-type", ctype)
fs = os.fstat(f.fileno())
self.send_header("Content-Length", str(fs[6]))
self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
self.end_headers()
return f
def list_directory(self, path):
"""Helper to produce a directory listing (absent index.html).
Return value is either a file object, or None (indicating an
error). In either case, the headers are sent, making the
interface the same as for send_head().
"""
try:
list = os.listdir(path)
except os.error:
self.send_error(404, "No permission to list directory")
return None
list.sort(key=lambda a: a.lower())
f = StringIO()
displaypath = cgi.escape(unquote(self.path))
f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
f.write("<hr>\n")
f.write("<form ENCTYPE=\"multipart/form-data\" method=\"post\">")
f.write("<input name=\"file\" type=\"file\"/>")
f.write("<input type=\"submit\" value=\"upload\"/></form>\n")
f.write("<hr>\n<ul>\n")
for name in list:
fullname = os.path.join(path, name)
displayname = linkname = name
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
f.write('<li><a href="%s">%s</a>\n' % (quote(linkname), cgi.escape(displayname)))
f.write("</ul>\n<hr>\n</body>\n</html>\n")
length = f.tell()
f.seek(0)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(length))
self.end_headers()
return f
def translate_path(self, path):
"""Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
"""
# abandon query parameters
path = path.split('?', 1)[0]
path = path.split('#', 1)[0]
path = posixpath.normpath(unquote(path))
words = path.split('/')
words = filter(None, words)
path = os.getcwd()
for word in words:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir): continue
path = os.path.join(path, word)
return path
def copyfile(self, source, outputfile):
"""Copy all data between two file objects.
The SOURCE argument is a file object open for reading
(or anything with a read() method) and the DESTINATION
argument is a file object open for writing (or
anything with a write() method).
The only reason for overriding this would be to change
the block size or perhaps to replace newlines by CRLF
-- note however that this the default server uses this
to copy binary data as well.
"""
shutil.copyfileobj(source, outputfile)
def guess_type(self, path):
"""Guess the type of a file.
Argument is a PATH (a filename).
Return value is a string of the form type/subtype,
usable for a MIME Content-type header.
The default implementation looks the file's extension
up in the table self.extensions_map, using application/octet-stream
as a default; however it would be permissible (if
slow) to look inside the data to make a better guess.
"""
base, ext = posixpath.splitext(path)
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
if ext in self.extensions_map:
return self.extensions_map[ext]
else:
return self.extensions_map['']
if not mimetypes.inited:
mimetypes.init() # try to read system mime.types
extensions_map = mimetypes.types_map.copy()
extensions_map.update({
'': 'application/octet-stream', # Default
'.py': 'text/plain',
'.c': 'text/plain',
'.h': 'text/plain',
})
def main(port=8000):
server = BaseHTTPServer.HTTPServer(('0.0.0.0', port), SimpleHTTPRequestHandler)
try:
print('Serving HTTP on 0.0.0.0 port %d ...' % port)
print('local IP address is %s' % globals()['_stash'].libcore.get_lan_ip())
server.serve_forever()
except KeyboardInterrupt:
print('Server shutting down ...')
server.server_close()
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('port', nargs='?', type=int, default=8000, help='port to server HTTP')
ns = ap.parse_args()
main(ns.port)
|
from enum import Enum
import pytest
from homeassistant.components import alarm_control_panel
from homeassistant.components.ness_alarm import (
ATTR_CODE,
ATTR_OUTPUT_ID,
CONF_DEVICE_PORT,
CONF_ZONE_ID,
CONF_ZONE_NAME,
CONF_ZONES,
DOMAIN,
SERVICE_AUX,
SERVICE_PANIC,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_DISARM,
SERVICE_ALARM_TRIGGER,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
VALID_CONFIG = {
DOMAIN: {
CONF_HOST: "alarm.local",
CONF_DEVICE_PORT: 1234,
CONF_ZONES: [
{CONF_ZONE_NAME: "Zone 1", CONF_ZONE_ID: 1},
{CONF_ZONE_NAME: "Zone 2", CONF_ZONE_ID: 2},
],
}
}
async def test_setup_platform(hass, mock_nessclient):
"""Test platform setup."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
assert hass.services.has_service(DOMAIN, "panic")
assert hass.services.has_service(DOMAIN, "aux")
await hass.async_block_till_done()
assert hass.states.get("alarm_control_panel.alarm_panel") is not None
assert hass.states.get("binary_sensor.zone_1") is not None
assert hass.states.get("binary_sensor.zone_2") is not None
assert mock_nessclient.keepalive.call_count == 1
assert mock_nessclient.update.call_count == 1
async def test_panic_service(hass, mock_nessclient):
"""Test calling panic service."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.services.async_call(
DOMAIN, SERVICE_PANIC, blocking=True, service_data={ATTR_CODE: "1234"}
)
mock_nessclient.panic.assert_awaited_once_with("1234")
async def test_aux_service(hass, mock_nessclient):
"""Test calling aux service."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.services.async_call(
DOMAIN, SERVICE_AUX, blocking=True, service_data={ATTR_OUTPUT_ID: 1}
)
mock_nessclient.aux.assert_awaited_once_with(1, True)
async def test_dispatch_state_change(hass, mock_nessclient):
"""Test calling aux service."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
on_state_change = mock_nessclient.on_state_change.call_args[0][0]
on_state_change(MockArmingState.ARMING)
await hass.async_block_till_done()
assert hass.states.is_state("alarm_control_panel.alarm_panel", STATE_ALARM_ARMING)
async def test_alarm_disarm(hass, mock_nessclient):
"""Test disarm."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
await hass.services.async_call(
alarm_control_panel.DOMAIN,
SERVICE_ALARM_DISARM,
blocking=True,
service_data={
ATTR_ENTITY_ID: "alarm_control_panel.alarm_panel",
ATTR_CODE: "1234",
},
)
mock_nessclient.disarm.assert_called_once_with("1234")
async def test_alarm_arm_away(hass, mock_nessclient):
"""Test disarm."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
await hass.services.async_call(
alarm_control_panel.DOMAIN,
SERVICE_ALARM_ARM_AWAY,
blocking=True,
service_data={
ATTR_ENTITY_ID: "alarm_control_panel.alarm_panel",
ATTR_CODE: "1234",
},
)
mock_nessclient.arm_away.assert_called_once_with("1234")
async def test_alarm_arm_home(hass, mock_nessclient):
"""Test disarm."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
await hass.services.async_call(
alarm_control_panel.DOMAIN,
SERVICE_ALARM_ARM_HOME,
blocking=True,
service_data={
ATTR_ENTITY_ID: "alarm_control_panel.alarm_panel",
ATTR_CODE: "1234",
},
)
mock_nessclient.arm_home.assert_called_once_with("1234")
async def test_alarm_trigger(hass, mock_nessclient):
"""Test disarm."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
await hass.services.async_call(
alarm_control_panel.DOMAIN,
SERVICE_ALARM_TRIGGER,
blocking=True,
service_data={
ATTR_ENTITY_ID: "alarm_control_panel.alarm_panel",
ATTR_CODE: "1234",
},
)
mock_nessclient.panic.assert_called_once_with("1234")
async def test_dispatch_zone_change(hass, mock_nessclient):
"""Test zone change events dispatch a signal to subscribers."""
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
on_zone_change = mock_nessclient.on_zone_change.call_args[0][0]
on_zone_change(1, True)
await hass.async_block_till_done()
assert hass.states.is_state("binary_sensor.zone_1", "on")
assert hass.states.is_state("binary_sensor.zone_2", "off")
async def test_arming_state_change(hass, mock_nessclient):
"""Test arming state change handing."""
states = [
(MockArmingState.UNKNOWN, STATE_UNKNOWN),
(MockArmingState.DISARMED, STATE_ALARM_DISARMED),
(MockArmingState.ARMING, STATE_ALARM_ARMING),
(MockArmingState.EXIT_DELAY, STATE_ALARM_ARMING),
(MockArmingState.ARMED, STATE_ALARM_ARMED_AWAY),
(MockArmingState.ENTRY_DELAY, STATE_ALARM_PENDING),
(MockArmingState.TRIGGERED, STATE_ALARM_TRIGGERED),
]
await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
assert hass.states.is_state("alarm_control_panel.alarm_panel", STATE_UNKNOWN)
on_state_change = mock_nessclient.on_state_change.call_args[0][0]
for arming_state, expected_state in states:
on_state_change(arming_state)
await hass.async_block_till_done()
assert hass.states.is_state("alarm_control_panel.alarm_panel", expected_state)
class MockArmingState(Enum):
"""Mock nessclient.ArmingState enum."""
UNKNOWN = "UNKNOWN"
DISARMED = "DISARMED"
ARMING = "ARMING"
EXIT_DELAY = "EXIT_DELAY"
ARMED = "ARMED"
ENTRY_DELAY = "ENTRY_DELAY"
TRIGGERED = "TRIGGERED"
class MockClient:
"""Mock nessclient.Client stub."""
async def panic(self, code):
"""Handle panic."""
pass
async def disarm(self, code):
"""Handle disarm."""
pass
async def arm_away(self, code):
"""Handle arm_away."""
pass
async def arm_home(self, code):
"""Handle arm_home."""
pass
async def aux(self, output_id, state):
"""Handle auxiliary control."""
pass
async def keepalive(self):
"""Handle keepalive."""
pass
async def update(self):
"""Handle update."""
pass
def on_zone_change(self):
"""Handle on_zone_change."""
pass
def on_state_change(self):
"""Handle on_state_change."""
pass
async def close(self):
"""Handle close."""
pass
@pytest.fixture
def mock_nessclient():
"""Mock the nessclient Client constructor.
Replaces nessclient.Client with a Mock which always returns the same
MagicMock() instance.
"""
_mock_instance = MagicMock(MockClient())
_mock_factory = MagicMock()
_mock_factory.return_value = _mock_instance
with patch(
"homeassistant.components.ness_alarm.Client", new=_mock_factory, create=True
), patch(
"homeassistant.components.ness_alarm.ArmingState", new=MockArmingState
), patch(
"homeassistant.components.ness_alarm.alarm_control_panel.ArmingState",
new=MockArmingState,
):
yield _mock_instance
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.linux_packages import speccpu
from perfkitbenchmarker.linux_packages import speccpu2006
FLAGS = flags.FLAGS
_SPECINT_BENCHMARKS = frozenset([
'perlbench', 'bzip2', 'gcc', 'mcf', 'gobmk', 'hmmer', 'sjeng',
'libquantum', 'h264ref', 'omnetpp', 'astar', 'xalancbmk'])
_SPECFP_BENCHMARKS = frozenset([
'bwaves', 'gamess', 'milc', 'zeusmp', 'gromacs', 'cactusADM',
'leslie3d', 'namd', 'dealII', 'soplex', 'povray', 'calculix',
'GemsFDTD', 'tonto', 'lbm', 'wrf', 'sphinx3'])
_SPECCPU_SUBSETS = frozenset(['int', 'fp', 'all'])
flags.DEFINE_enum(
'benchmark_subset', 'int',
_SPECFP_BENCHMARKS | _SPECINT_BENCHMARKS | _SPECCPU_SUBSETS,
'Used by the PKB speccpu2006 benchmark. Specifies a subset of SPEC CPU2006 '
'benchmarks to run.')
flags.DEFINE_enum('runspec_metric', 'rate', ['rate', 'speed'],
'SPEC test to run. Speed is time-based metric, rate is '
'throughput-based metric.')
BENCHMARK_NAME = 'speccpu2006'
BENCHMARK_CONFIG = """
speccpu2006:
description: Runs SPEC CPU2006
vm_groups:
default:
vm_spec: *default_single_core
disk_spec: *default_50_gb
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Installs SPEC CPU2006 on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vm = benchmark_spec.vms[0]
vm.Install('speccpu2006')
# Set attribute outside of the install function, so benchmark will work
# even with --install_packages=False.
config = speccpu2006.GetSpecInstallConfig(vm.GetScratchDir())
setattr(vm, speccpu.VM_STATE_ATTR, config)
def Run(benchmark_spec):
"""Runs SPEC CPU2006 on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vm = benchmark_spec.vms[0]
version_specific_parameters = []
if FLAGS.runspec_metric == 'rate':
version_specific_parameters.append(' --rate=%s ' % vm.NumCpusForBenchmark())
else:
version_specific_parameters.append(' --speed ')
speccpu.Run(vm, 'runspec',
FLAGS.benchmark_subset, version_specific_parameters)
log_files = []
# FIXME(liquncheng): Only reference runs generate SPEC scores. The log
# id is hardcoded as 001, which might change with different runspec
# parameters. SPEC CPU2006 will generate different logs for build, test
# run, training run and ref run.
if FLAGS.benchmark_subset in _SPECINT_BENCHMARKS | set(['int', 'all']):
log_files.append('CINT2006.001.ref.txt')
if FLAGS.benchmark_subset in _SPECFP_BENCHMARKS | set(['fp', 'all']):
log_files.append('CFP2006.001.ref.txt')
partial_results = FLAGS.benchmark_subset not in _SPECCPU_SUBSETS
return speccpu.ParseOutput(vm, log_files, partial_results,
FLAGS.runspec_metric)
def Cleanup(benchmark_spec):
"""Cleans up SPEC CPU2006 from the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vm = benchmark_spec.vms[0]
speccpu.Uninstall(vm)
|
import pytest
import yarl
import homeassistant.components.media_player as media_player
from homeassistant.components.media_player.const import (
DOMAIN as DOMAIN_MP,
SERVICE_PLAY_MEDIA,
)
import homeassistant.components.notify as notify
import homeassistant.components.tts as tts
from homeassistant.config import async_process_ha_core_config
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import assert_setup_component, async_mock_service
def relative_url(url):
"""Convert an absolute url to a relative one."""
return str(yarl.URL(url).relative())
@pytest.fixture(autouse=True)
def mutagen_mock():
"""Mock writing tags."""
with patch(
"homeassistant.components.tts.SpeechManager.write_tags",
side_effect=lambda *args: args[1],
):
yield
@pytest.fixture(autouse=True)
async def internal_url_mock(hass):
"""Mock internal URL of the instance."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
async def test_setup_platform(hass):
"""Set up the tts platform ."""
config = {
notify.DOMAIN: {
"platform": "tts",
"name": "tts_test",
"tts_service": "tts.demo_say",
"media_player": "media_player.demo",
}
}
with assert_setup_component(1, notify.DOMAIN):
assert await async_setup_component(hass, notify.DOMAIN, config)
assert hass.services.has_service(notify.DOMAIN, "tts_test")
async def test_setup_component_and_test_service(hass):
"""Set up the demo platform and call service."""
calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
config = {
tts.DOMAIN: {"platform": "demo"},
media_player.DOMAIN: {"platform": "demo"},
notify.DOMAIN: {
"platform": "tts",
"name": "tts_test",
"tts_service": "tts.demo_say",
"media_player": "media_player.demo",
"language": "en",
},
}
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(hass, tts.DOMAIN, config)
with assert_setup_component(1, notify.DOMAIN):
assert await async_setup_component(hass, notify.DOMAIN, config)
await hass.services.async_call(
notify.DOMAIN,
"tts_test",
{
tts.ATTR_MESSAGE: "There is someone at the door.",
},
blocking=True,
)
await hass.async_block_till_done()
assert len(calls) == 1
|
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_absolute_redirection(build, output_dir):
abs_source = os.path.join(output_dir, "redirects", "absolute_source.html")
assert os.path.exists(abs_source)
abs_destination = os.path.join(output_dir, "posts", "absolute.html")
assert os.path.exists(abs_destination)
with open(abs_destination) as abs_destination_fd:
abs_destination_content = abs_destination_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=/redirects/absolute_source.html">'
assert redirect_tag in abs_destination_content
with open(abs_source) as abs_source_fd:
absolute_source_content = abs_source_fd.read()
assert absolute_source_content == "absolute"
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, "external.html")
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
def test_relative_redirection(build, output_dir):
rel_destination = os.path.join(output_dir, "relative.html")
assert os.path.exists(rel_destination)
rel_source = os.path.join(output_dir, "redirects", "rel_src.html")
assert os.path.exists(rel_source)
with open(rel_destination) as rel_destination_fd:
rel_destination_content = rel_destination_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=redirects/rel_src.html">'
assert redirect_tag in rel_destination_content
with open(rel_source) as rel_source_fd:
rel_source_content = rel_source_fd.read()
assert rel_source_content == "relative"
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
# Source file for absolute redirect
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
# Source file for relative redirect
target_path = os.path.join(redirects_dir, "rel_src.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("relative")
# Configure usage of specific redirects
append_config(
target_dir,
"""
REDIRECTIONS = [
("posts/absolute.html", "/redirects/absolute_source.html"),
("external.html", "http://www.example.com/"),
("relative.html", "redirects/rel_src.html"),
]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import mnist_benchmark
from perfkitbenchmarker.sample import Sample
class MnistBenchmarkTestCase(unittest.TestCase,
test_util.SamplesTestMixin):
def setUp(self):
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'mnist_output.txt')
with open(path) as fp:
self.contents = fp.read()
self.metadata_input = {'num_examples_per_epoch': 1251.1,
'train_batch_size': 1024}
self.metadata_output = {'num_examples_per_epoch': 1251.1,
'train_batch_size': 1024, 'step': 2000,
'elapsed_seconds': 0, 'epoch': 1.5985932379506036}
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testTrainResults(self):
samples = mnist_benchmark.MakeSamplesFromTrainOutput(
self.metadata_input, self.contents, 0, 2000)
golden = [
Sample('Loss', 0.09562386, '', self.metadata_output),
Sample('Global Steps Per Second', 217.69966666666664,
'global_steps/sec', self.metadata_output),
Sample('Examples Per Second', 222924.33333333334,
'examples/sec', self.metadata_output)
]
self.assertEqual(samples, golden)
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testEvalResults(self):
samples = mnist_benchmark.MakeSamplesFromEvalOutput(
self.metadata_input, self.contents, 0)
golden = [
Sample('Eval Loss', 0.03615343, '', self.metadata_output),
Sample('Accuracy', 98.77387, '%', self.metadata_output)
]
self.assertEqual(samples, golden)
if __name__ == '__main__':
unittest.main()
|
import numpy as np
from numpy.testing import assert_array_equal
import pytest
from mne.utils import requires_sklearn
from mne.decoding.time_frequency import TimeFrequency
@requires_sklearn
def test_timefrequency():
"""Test TimeFrequency."""
from sklearn.base import clone
# Init
n_freqs = 3
freqs = [20, 21, 22]
tf = TimeFrequency(freqs, sfreq=100)
for output in ['avg_power', 'foo', None]:
pytest.raises(ValueError, TimeFrequency, freqs, output=output)
tf = clone(tf)
# Fit
n_epochs, n_chans, n_times = 10, 2, 100
X = np.random.rand(n_epochs, n_chans, n_times)
tf.fit(X, None)
# Transform
tf = TimeFrequency(freqs, sfreq=100)
tf.fit_transform(X, None)
# 3-D X
Xt = tf.transform(X)
assert_array_equal(Xt.shape, [n_epochs, n_chans, n_freqs, n_times])
# 2-D X
Xt = tf.transform(X[:, 0, :])
assert_array_equal(Xt.shape, [n_epochs, n_freqs, n_times])
# 3-D with decim
tf = TimeFrequency(freqs, sfreq=100, decim=2)
Xt = tf.transform(X)
assert_array_equal(Xt.shape, [n_epochs, n_chans, n_freqs, n_times // 2])
|
from homeassistant.core import callback
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DOMAIN as DECONZ_DOMAIN
class DeconzBase:
"""Common base for deconz entities and events."""
def __init__(self, device, gateway):
"""Set up device and add update callback to get data from websocket."""
self._device = device
self.gateway = gateway
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return self._device.uniqueid
@property
def serial(self):
"""Return a serial number for this device."""
if self._device.uniqueid is None or self._device.uniqueid.count(":") != 7:
return None
return self._device.uniqueid.split("-", 1)[0]
@property
def device_info(self):
"""Return a device description for device registry."""
if self.serial is None:
return None
bridgeid = self.gateway.api.config.bridgeid
return {
"connections": {(CONNECTION_ZIGBEE, self.serial)},
"identifiers": {(DECONZ_DOMAIN, self.serial)},
"manufacturer": self._device.manufacturer,
"model": self._device.modelid,
"name": self._device.name,
"sw_version": self._device.swversion,
"via_device": (DECONZ_DOMAIN, bridgeid),
}
class DeconzDevice(DeconzBase, Entity):
"""Representation of a deCONZ device."""
TYPE = ""
def __init__(self, device, gateway):
"""Set up device and add update callback to get data from websocket."""
super().__init__(device, gateway)
self.gateway.entities[self.TYPE].add(self.unique_id)
@property
def entity_registry_enabled_default(self):
"""Return if the entity should be enabled when first added to the entity registry.
Daylight is a virtual sensor from deCONZ that should never be enabled by default.
"""
if self._device.type == "Daylight":
return False
return True
async def async_added_to_hass(self):
"""Subscribe to device events."""
self._device.register_callback(self.async_update_callback)
self.gateway.deconz_ids[self.entity_id] = self._device.deconz_id
self.async_on_remove(
async_dispatcher_connect(
self.hass, self.gateway.signal_reachable, self.async_update_callback
)
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect device object when removed."""
self._device.remove_callback(self.async_update_callback)
del self.gateway.deconz_ids[self.entity_id]
self.gateway.entities[self.TYPE].remove(self.unique_id)
@callback
def async_update_callback(self, force_update=False):
"""Update the device's state."""
if not force_update and self.gateway.ignore_state_updates:
return
self.async_write_ha_state()
@property
def available(self):
"""Return True if device is available."""
return self.gateway.available and self._device.reachable
@property
def name(self):
"""Return the name of the device."""
return self._device.name
@property
def should_poll(self):
"""No polling needed."""
return False
|
from gi.repository import GObject, Gtk, GtkSource
@Gtk.Template(resource_path='/org/gnome/meld/ui/findbar.ui')
class FindBar(Gtk.Grid):
__gtype_name__ = 'FindBar'
find_entry = Gtk.Template.Child()
find_next_button = Gtk.Template.Child()
find_previous_button = Gtk.Template.Child()
match_case = Gtk.Template.Child()
regex = Gtk.Template.Child()
replace_all_button = Gtk.Template.Child()
replace_button = Gtk.Template.Child()
replace_entry = Gtk.Template.Child()
whole_word = Gtk.Template.Child()
wrap_box = Gtk.Template.Child()
replace_mode = GObject.Property(type=bool, default=False)
@GObject.Signal(
name='activate-secondary',
flags=(
GObject.SignalFlags.RUN_FIRST |
GObject.SignalFlags.ACTION
),
)
def activate_secondary(self) -> None:
self._find_text(backwards=True)
def __init__(self, parent):
super().__init__()
self.search_context = None
self.notify_id = None
self.set_text_view(None)
# Setup a signal for when the find bar loses focus
parent.connect('set-focus-child', self.on_focus_child)
# Create and bind our GtkSourceSearchSettings
settings = GtkSource.SearchSettings()
self.match_case.bind_property('active', settings, 'case-sensitive')
self.whole_word.bind_property('active', settings, 'at-word-boundaries')
self.regex.bind_property('active', settings, 'regex-enabled')
self.find_entry.bind_property('text', settings, 'search-text')
settings.set_wrap_around(True)
self.search_settings = settings
# Bind visibility and layout for find-and-replace mode
self.bind_property('replace_mode', self.replace_entry, 'visible')
self.bind_property('replace_mode', self.replace_all_button, 'visible')
self.bind_property('replace_mode', self.replace_button, 'visible')
self.bind_property(
'replace_mode', self, 'row-spacing', GObject.BindingFlags.DEFAULT,
lambda binding, replace_mode: 6 if replace_mode else 0)
def on_focus_child(self, container, widget):
if widget is not None:
visible = self.props.visible
if widget is not self and visible:
self.hide()
return False
def hide(self):
self.set_text_view(None)
self.wrap_box.set_visible(False)
Gtk.Widget.hide(self)
def update_match_state(self, *args):
# Note that -1 here implies that the search is still running
no_matches = (
self.search_context.props.occurrences_count == 0 and
self.search_settings.props.search_text
)
style_context = self.find_entry.get_style_context()
if no_matches:
style_context.add_class(Gtk.STYLE_CLASS_ERROR)
else:
style_context.remove_class(Gtk.STYLE_CLASS_ERROR)
def set_text_view(self, textview):
self.textview = textview
if textview is not None:
self.search_context = GtkSource.SearchContext.new(
textview.get_buffer(), self.search_settings)
self.search_context.set_highlight(True)
self.notify_id = self.search_context.connect(
'notify::occurrences-count', self.update_match_state)
else:
if self.notify_id:
self.search_context.disconnect(self.notify_id)
self.notify_id = None
self.search_context = None
def start_find(self, *, textview: Gtk.TextView, replace: bool, text: str):
self.replace_mode = replace
self.set_text_view(textview)
if text:
self.find_entry.set_text(text)
self.show()
self.find_entry.grab_focus()
def start_find_next(self, textview):
self.set_text_view(textview)
self._find_text()
def start_find_previous(self, textview):
self.set_text_view(textview)
self._find_text(backwards=True)
@Gtk.Template.Callback()
def on_find_next_button_clicked(self, button):
self._find_text()
@Gtk.Template.Callback()
def on_find_previous_button_clicked(self, button):
self._find_text(backwards=True)
@Gtk.Template.Callback()
def on_replace_button_clicked(self, entry):
buf = self.textview.get_buffer()
oldsel = buf.get_selection_bounds()
match = self._find_text(0)
newsel = buf.get_selection_bounds()
# Only replace if there is an already-selected match at the cursor
if (match and oldsel and oldsel[0].equal(newsel[0]) and
oldsel[1].equal(newsel[1])):
self.search_context.replace(
newsel[0], newsel[1], self.replace_entry.get_text(), -1)
self._find_text(0)
@Gtk.Template.Callback()
def on_replace_all_button_clicked(self, entry):
buf = self.textview.get_buffer()
saved_insert = buf.create_mark(
None, buf.get_iter_at_mark(buf.get_insert()), True)
self.search_context.replace_all(self.replace_entry.get_text(), -1)
if not saved_insert.get_deleted():
buf.place_cursor(buf.get_iter_at_mark(saved_insert))
self.textview.scroll_to_mark(
buf.get_insert(), 0.25, True, 0.5, 0.5)
@Gtk.Template.Callback()
def on_toggle_replace_button_clicked(self, button):
self.replace_mode = not self.replace_mode
@Gtk.Template.Callback()
def on_find_entry_changed(self, entry):
self._find_text(0)
@Gtk.Template.Callback()
def on_stop_search(self, search_entry):
self.hide()
def _find_text(self, start_offset=1, backwards=False):
if not self.textview or not self.search_context:
return
buf = self.textview.get_buffer()
insert = buf.get_iter_at_mark(buf.get_insert())
start, end = buf.get_bounds()
self.wrap_box.set_visible(False)
if not backwards:
insert.forward_chars(start_offset)
match, start, end, wrapped = self.search_context.forward(insert)
else:
match, start, end, wrapped = self.search_context.backward(insert)
if match:
self.wrap_box.set_visible(wrapped)
buf.place_cursor(start)
buf.move_mark(buf.get_selection_bound(), end)
self.textview.scroll_to_mark(
buf.get_insert(), 0.25, True, 0.5, 0.5)
return True
else:
buf.place_cursor(buf.get_iter_at_mark(buf.get_insert()))
self.wrap_box.set_visible(False)
FindBar.set_css_name('meld-find-bar')
|
from lxml.etree import XPath, ElementBase
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import _forms_xpath, _options_xpath, _nons, _transform_result
from lxml.html import defs
import copy
try:
basestring
except NameError:
# Python 3
basestring = str
__all__ = ['FormNotFound', 'fill_form', 'fill_form_html',
'insert_errors', 'insert_errors_html',
'DefaultErrorCreator']
class FormNotFound(LookupError):
"""
Raised when no form can be found
"""
_form_name_xpath = XPath('descendant-or-self::form[name=$name]|descendant-or-self::x:form[name=$name]', namespaces={'x':XHTML_NAMESPACE})
_input_xpath = XPath('|'.join(['descendant-or-self::'+_tag for _tag in ('input','select','textarea','x:input','x:select','x:textarea')]),
namespaces={'x':XHTML_NAMESPACE})
_label_for_xpath = XPath('//label[@for=$for_id]|//x:label[@for=$for_id]',
namespaces={'x':XHTML_NAMESPACE})
_name_xpath = XPath('descendant-or-self::*[@name=$name]')
def fill_form(
el,
values,
form_id=None,
form_index=None,
):
el = _find_form(el, form_id=form_id, form_index=form_index)
_fill_form(el, values)
def fill_form_html(html, values, form_id=None, form_index=None):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
fill_form(doc, values, form_id=form_id, form_index=form_index)
return _transform_result(result_type, doc)
def _fill_form(el, values):
counts = {}
if hasattr(values, 'mixed'):
# For Paste request parameters
values = values.mixed()
inputs = _input_xpath(el)
for input in inputs:
name = input.get('name')
if not name:
continue
if _takes_multiple(input):
value = values.get(name, [])
if not isinstance(value, (list, tuple)):
value = [value]
_fill_multiple(input, value)
elif name not in values:
continue
else:
index = counts.get(name, 0)
counts[name] = index + 1
value = values[name]
if isinstance(value, (list, tuple)):
try:
value = value[index]
except IndexError:
continue
elif index > 0:
continue
_fill_single(input, value)
def _takes_multiple(input):
if _nons(input.tag) == 'select' and input.get('multiple'):
# FIXME: multiple="0"?
return True
type = input.get('type', '').lower()
if type in ('radio', 'checkbox'):
return True
return False
def _fill_multiple(input, value):
type = input.get('type', '').lower()
if type == 'checkbox':
v = input.get('value')
if v is None:
if not value:
result = False
else:
result = value[0]
if isinstance(value, basestring):
# The only valid "on" value for an unnamed checkbox is 'on'
result = result == 'on'
_check(input, result)
else:
_check(input, v in value)
elif type == 'radio':
v = input.get('value')
_check(input, v in value)
else:
assert _nons(input.tag) == 'select'
for option in _options_xpath(input):
v = option.get('value')
if v is None:
# This seems to be the default, at least on IE
# FIXME: but I'm not sure
v = option.text_content()
_select(option, v in value)
def _check(el, check):
if check:
el.set('checked', '')
else:
if 'checked' in el.attrib:
del el.attrib['checked']
def _select(el, select):
if select:
el.set('selected', '')
else:
if 'selected' in el.attrib:
del el.attrib['selected']
def _fill_single(input, value):
if _nons(input.tag) == 'textarea':
input.text = value
else:
input.set('value', value)
def _find_form(el, form_id=None, form_index=None):
if form_id is None and form_index is None:
forms = _forms_xpath(el)
for form in forms:
return form
raise FormNotFound(
"No forms in page")
if form_id is not None:
form = el.get_element_by_id(form_id)
if form is not None:
return form
forms = _form_name_xpath(el, name=form_id)
if forms:
return forms[0]
else:
raise FormNotFound(
"No form with the name or id of %r (forms: %s)"
% (id, ', '.join(_find_form_ids(el))))
if form_index is not None:
forms = _forms_xpath(el)
try:
return forms[form_index]
except IndexError:
raise FormNotFound(
"There is no form with the index %r (%i forms found)"
% (form_index, len(forms)))
def _find_form_ids(el):
forms = _forms_xpath(el)
if not forms:
yield '(no forms)'
return
for index, form in enumerate(forms):
if form.get('id'):
if form.get('name'):
yield '%s or %s' % (form.get('id'),
form.get('name'))
else:
yield form.get('id')
elif form.get('name'):
yield form.get('name')
else:
yield '(unnamed form %s)' % index
############################################################
## Error filling
############################################################
class DefaultErrorCreator(object):
insert_before = True
block_inside = True
error_container_tag = 'div'
error_message_class = 'error-message'
error_block_class = 'error-block'
default_message = "Invalid"
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unexpected keyword argument: %s" % name)
setattr(self, name, value)
def __call__(self, el, is_block, message):
error_el = el.makeelement(self.error_container_tag)
if self.error_message_class:
error_el.set('class', self.error_message_class)
if is_block and self.error_block_class:
error_el.set('class', error_el.get('class', '')+' '+self.error_block_class)
if message is None or message == '':
message = self.default_message
if isinstance(message, ElementBase):
error_el.append(message)
else:
assert isinstance(message, basestring), (
"Bad message; should be a string or element: %r" % message)
error_el.text = message or self.default_message
if is_block and self.block_inside:
if self.insert_before:
error_el.tail = el.text
el.text = None
el.insert(0, error_el)
else:
el.append(error_el)
else:
parent = el.getparent()
pos = parent.index(el)
if self.insert_before:
parent.insert(pos, error_el)
else:
error_el.tail = el.tail
el.tail = None
parent.insert(pos+1, error_el)
default_error_creator = DefaultErrorCreator()
def insert_errors(
el,
errors,
form_id=None,
form_index=None,
error_class="error",
error_creator=default_error_creator,
):
el = _find_form(el, form_id=form_id, form_index=form_index)
for name, error in errors.items():
if error is None:
continue
for error_el, message in _find_elements_for_name(el, name, error):
assert isinstance(message, (basestring, type(None), ElementBase)), (
"Bad message: %r" % message)
_insert_error(error_el, message, error_class, error_creator)
def insert_errors_html(html, values, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
insert_errors(doc, values, **kw)
return _transform_result(result_type, doc)
def _insert_error(el, error, error_class, error_creator):
if _nons(el.tag) in defs.empty_tags or _nons(el.tag) == 'textarea':
is_block = False
else:
is_block = True
if _nons(el.tag) != 'form' and error_class:
_add_class(el, error_class)
if el.get('id'):
labels = _label_for_xpath(el, for_id=el.get('id'))
if labels:
for label in labels:
_add_class(label, error_class)
error_creator(el, is_block, error)
def _add_class(el, class_name):
if el.get('class'):
el.set('class', el.get('class')+' '+class_name)
else:
el.set('class', class_name)
def _find_elements_for_name(form, name, error):
if name is None:
# An error for the entire form
yield form, error
return
if name.startswith('#'):
# By id
el = form.get_element_by_id(name[1:])
if el is not None:
yield el, error
return
els = _name_xpath(form, name=name)
if not els:
# FIXME: should this raise an exception?
return
if not isinstance(error, (list, tuple)):
yield els[0], error
return
# FIXME: if error is longer than els, should it raise an error?
for el, err in zip(els, error):
if err is None:
continue
yield el, err
|
import collections
from typing import Callable, Dict, List, Set, Tuple, Union
import attr
import zigpy.profiles.zha
import zigpy.profiles.zll
import zigpy.zcl as zcl
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate import DOMAIN as CLIMATE
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
# importing channels updates registries
from . import channels as zha_channels # noqa: F401 pylint: disable=unused-import
from .decorators import CALLABLE_T, DictRegistry, SetRegistry
from .typing import ChannelType
GROUP_ENTITY_DOMAINS = [LIGHT, SWITCH, FAN]
PHILLIPS_REMOTE_CLUSTER = 0xFC00
SMARTTHINGS_ACCELERATION_CLUSTER = 0xFC02
SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE = 0x8000
SMARTTHINGS_HUMIDITY_CLUSTER = 0xFC45
REMOTE_DEVICE_TYPES = {
zigpy.profiles.zha.PROFILE_ID: [
zigpy.profiles.zha.DeviceType.COLOR_CONTROLLER,
zigpy.profiles.zha.DeviceType.COLOR_DIMMER_SWITCH,
zigpy.profiles.zha.DeviceType.COLOR_SCENE_CONTROLLER,
zigpy.profiles.zha.DeviceType.DIMMER_SWITCH,
zigpy.profiles.zha.DeviceType.LEVEL_CONTROL_SWITCH,
zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER,
zigpy.profiles.zha.DeviceType.NON_COLOR_SCENE_CONTROLLER,
zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH,
zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT_SWITCH,
zigpy.profiles.zha.DeviceType.REMOTE_CONTROL,
zigpy.profiles.zha.DeviceType.SCENE_SELECTOR,
],
zigpy.profiles.zll.PROFILE_ID: [
zigpy.profiles.zll.DeviceType.COLOR_CONTROLLER,
zigpy.profiles.zll.DeviceType.COLOR_SCENE_CONTROLLER,
zigpy.profiles.zll.DeviceType.CONTROL_BRIDGE,
zigpy.profiles.zll.DeviceType.CONTROLLER,
zigpy.profiles.zll.DeviceType.SCENE_CONTROLLER,
],
}
REMOTE_DEVICE_TYPES = collections.defaultdict(list, REMOTE_DEVICE_TYPES)
SINGLE_INPUT_CLUSTER_DEVICE_CLASS = {
# this works for now but if we hit conflicts we can break it out to
# a different dict that is keyed by manufacturer
SMARTTHINGS_ACCELERATION_CLUSTER: BINARY_SENSOR,
SMARTTHINGS_HUMIDITY_CLUSTER: SENSOR,
zcl.clusters.closures.DoorLock.cluster_id: LOCK,
zcl.clusters.closures.WindowCovering.cluster_id: COVER,
zcl.clusters.general.AnalogInput.cluster_id: SENSOR,
zcl.clusters.general.MultistateInput.cluster_id: SENSOR,
zcl.clusters.general.OnOff.cluster_id: SWITCH,
zcl.clusters.general.PowerConfiguration.cluster_id: SENSOR,
zcl.clusters.homeautomation.ElectricalMeasurement.cluster_id: SENSOR,
zcl.clusters.hvac.Fan.cluster_id: FAN,
zcl.clusters.measurement.IlluminanceMeasurement.cluster_id: SENSOR,
zcl.clusters.measurement.OccupancySensing.cluster_id: BINARY_SENSOR,
zcl.clusters.measurement.PressureMeasurement.cluster_id: SENSOR,
zcl.clusters.measurement.RelativeHumidity.cluster_id: SENSOR,
zcl.clusters.measurement.TemperatureMeasurement.cluster_id: SENSOR,
zcl.clusters.security.IasZone.cluster_id: BINARY_SENSOR,
zcl.clusters.smartenergy.Metering.cluster_id: SENSOR,
}
SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS = {
zcl.clusters.general.OnOff.cluster_id: BINARY_SENSOR
}
SWITCH_CLUSTERS = SetRegistry()
BINARY_SENSOR_CLUSTERS = SetRegistry()
BINARY_SENSOR_CLUSTERS.add(SMARTTHINGS_ACCELERATION_CLUSTER)
BINDABLE_CLUSTERS = SetRegistry()
CHANNEL_ONLY_CLUSTERS = SetRegistry()
CLIMATE_CLUSTERS = SetRegistry()
CUSTOM_CLUSTER_MAPPINGS = {}
DEVICE_CLASS = {
zigpy.profiles.zha.PROFILE_ID: {
SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE: DEVICE_TRACKER,
zigpy.profiles.zha.DeviceType.THERMOSTAT: CLIMATE,
zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT: LIGHT,
zigpy.profiles.zha.DeviceType.COLOR_TEMPERATURE_LIGHT: LIGHT,
zigpy.profiles.zha.DeviceType.DIMMABLE_BALLAST: LIGHT,
zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT: LIGHT,
zigpy.profiles.zha.DeviceType.DIMMABLE_PLUG_IN_UNIT: LIGHT,
zigpy.profiles.zha.DeviceType.EXTENDED_COLOR_LIGHT: LIGHT,
zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT: COVER,
zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST: SWITCH,
zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT: LIGHT,
zigpy.profiles.zha.DeviceType.ON_OFF_PLUG_IN_UNIT: SWITCH,
zigpy.profiles.zha.DeviceType.SHADE: COVER,
zigpy.profiles.zha.DeviceType.SMART_PLUG: SWITCH,
},
zigpy.profiles.zll.PROFILE_ID: {
zigpy.profiles.zll.DeviceType.COLOR_LIGHT: LIGHT,
zigpy.profiles.zll.DeviceType.COLOR_TEMPERATURE_LIGHT: LIGHT,
zigpy.profiles.zll.DeviceType.DIMMABLE_LIGHT: LIGHT,
zigpy.profiles.zll.DeviceType.DIMMABLE_PLUGIN_UNIT: LIGHT,
zigpy.profiles.zll.DeviceType.EXTENDED_COLOR_LIGHT: LIGHT,
zigpy.profiles.zll.DeviceType.ON_OFF_LIGHT: LIGHT,
zigpy.profiles.zll.DeviceType.ON_OFF_PLUGIN_UNIT: SWITCH,
},
}
DEVICE_CLASS = collections.defaultdict(dict, DEVICE_CLASS)
DEVICE_TRACKER_CLUSTERS = SetRegistry()
LIGHT_CLUSTERS = SetRegistry()
OUTPUT_CHANNEL_ONLY_CLUSTERS = SetRegistry()
CLIENT_CHANNELS_REGISTRY = DictRegistry()
COMPONENT_CLUSTERS = {
BINARY_SENSOR: BINARY_SENSOR_CLUSTERS,
CLIMATE: CLIMATE_CLUSTERS,
DEVICE_TRACKER: DEVICE_TRACKER_CLUSTERS,
LIGHT: LIGHT_CLUSTERS,
SWITCH: SWITCH_CLUSTERS,
}
ZIGBEE_CHANNEL_REGISTRY = DictRegistry()
def set_or_callable(value):
"""Convert single str or None to a set. Pass through callables and sets."""
if value is None:
return frozenset()
if callable(value):
return value
if isinstance(value, (frozenset, set, list)):
return frozenset(value)
return frozenset([str(value)])
@attr.s(frozen=True)
class MatchRule:
"""Match a ZHA Entity to a channel name or generic id."""
channel_names: Union[Callable, Set[str], str] = attr.ib(
factory=frozenset, converter=set_or_callable
)
generic_ids: Union[Callable, Set[str], str] = attr.ib(
factory=frozenset, converter=set_or_callable
)
manufacturers: Union[Callable, Set[str], str] = attr.ib(
factory=frozenset, converter=set_or_callable
)
models: Union[Callable, Set[str], str] = attr.ib(
factory=frozenset, converter=set_or_callable
)
aux_channels: Union[Callable, Set[str], str] = attr.ib(
factory=frozenset, converter=set_or_callable
)
@property
def weight(self) -> int:
"""Return the weight of the matching rule.
Most specific matches should be preferred over less specific. Model matching
rules have a priority over manufacturer matching rules and rules matching a
single model/manufacturer get a better priority over rules matching multiple
models/manufacturers. And any model or manufacturers matching rules get better
priority over rules matching only channels.
But in case of a channel name/channel id matching, we give rules matching
multiple channels a better priority over rules matching a single channel.
"""
weight = 0
if self.models:
weight += 401 - (1 if callable(self.models) else len(self.models))
if self.manufacturers:
weight += 301 - (
1 if callable(self.manufacturers) else len(self.manufacturers)
)
weight += 10 * len(self.channel_names)
weight += 5 * len(self.generic_ids)
weight += 1 * len(self.aux_channels)
return weight
def claim_channels(self, channel_pool: List[ChannelType]) -> List[ChannelType]:
"""Return a list of channels this rule matches + aux channels."""
claimed = []
if isinstance(self.channel_names, frozenset):
claimed.extend([ch for ch in channel_pool if ch.name in self.channel_names])
if isinstance(self.generic_ids, frozenset):
claimed.extend(
[ch for ch in channel_pool if ch.generic_id in self.generic_ids]
)
if isinstance(self.aux_channels, frozenset):
claimed.extend([ch for ch in channel_pool if ch.name in self.aux_channels])
return claimed
def strict_matched(self, manufacturer: str, model: str, channels: List) -> bool:
"""Return True if this device matches the criteria."""
return all(self._matched(manufacturer, model, channels))
def loose_matched(self, manufacturer: str, model: str, channels: List) -> bool:
"""Return True if this device matches the criteria."""
return any(self._matched(manufacturer, model, channels))
def _matched(self, manufacturer: str, model: str, channels: List) -> list:
"""Return a list of field matches."""
if not any(attr.asdict(self).values()):
return [False]
matches = []
if self.channel_names:
channel_names = {ch.name for ch in channels}
matches.append(self.channel_names.issubset(channel_names))
if self.generic_ids:
all_generic_ids = {ch.generic_id for ch in channels}
matches.append(self.generic_ids.issubset(all_generic_ids))
if self.manufacturers:
if callable(self.manufacturers):
matches.append(self.manufacturers(manufacturer))
else:
matches.append(manufacturer in self.manufacturers)
if self.models:
if callable(self.models):
matches.append(self.models(model))
else:
matches.append(model in self.models)
return matches
RegistryDictType = Dict[str, Dict[MatchRule, CALLABLE_T]]
GroupRegistryDictType = Dict[str, CALLABLE_T]
class ZHAEntityRegistry:
"""Channel to ZHA Entity mapping."""
def __init__(self):
"""Initialize Registry instance."""
self._strict_registry: RegistryDictType = collections.defaultdict(dict)
self._loose_registry: RegistryDictType = collections.defaultdict(dict)
self._group_registry: GroupRegistryDictType = {}
def get_entity(
self,
component: str,
manufacturer: str,
model: str,
channels: List[ChannelType],
default: CALLABLE_T = None,
) -> Tuple[CALLABLE_T, List[ChannelType]]:
"""Match a ZHA Channels to a ZHA Entity class."""
matches = self._strict_registry[component]
for match in sorted(matches, key=lambda x: x.weight, reverse=True):
if match.strict_matched(manufacturer, model, channels):
claimed = match.claim_channels(channels)
return self._strict_registry[component][match], claimed
return default, []
def get_group_entity(self, component: str) -> CALLABLE_T:
"""Match a ZHA group to a ZHA Entity class."""
return self._group_registry.get(component)
def strict_match(
self,
component: str,
channel_names: Union[Callable, Set[str], str] = None,
generic_ids: Union[Callable, Set[str], str] = None,
manufacturers: Union[Callable, Set[str], str] = None,
models: Union[Callable, Set[str], str] = None,
aux_channels: Union[Callable, Set[str], str] = None,
) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Decorate a strict match rule."""
rule = MatchRule(
channel_names, generic_ids, manufacturers, models, aux_channels
)
def decorator(zha_ent: CALLABLE_T) -> CALLABLE_T:
"""Register a strict match rule.
All non empty fields of a match rule must match.
"""
self._strict_registry[component][rule] = zha_ent
return zha_ent
return decorator
def loose_match(
self,
component: str,
channel_names: Union[Callable, Set[str], str] = None,
generic_ids: Union[Callable, Set[str], str] = None,
manufacturers: Union[Callable, Set[str], str] = None,
models: Union[Callable, Set[str], str] = None,
aux_channels: Union[Callable, Set[str], str] = None,
) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Decorate a loose match rule."""
rule = MatchRule(
channel_names, generic_ids, manufacturers, models, aux_channels
)
def decorator(zha_entity: CALLABLE_T) -> CALLABLE_T:
"""Register a loose match rule.
All non empty fields of a match rule must match.
"""
self._loose_registry[component][rule] = zha_entity
return zha_entity
return decorator
def group_match(self, component: str) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Decorate a group match rule."""
def decorator(zha_ent: CALLABLE_T) -> CALLABLE_T:
"""Register a group match rule."""
self._group_registry[component] = zha_ent
return zha_ent
return decorator
ZHA_ENTITIES = ZHAEntityRegistry()
|
import os
import subprocess
import sys
import zipfile
def main(argv):
if len(argv) != 2:
sys.exit('Usage: %s <outfile>' % argv[0])
zip_file_path = argv[1]
version = subprocess.check_output(('pkb.py', '--version')).rstrip()
with zipfile.ZipFile(zip_file_path, 'w') as zip_file:
for dir_path, _, file_names in os.walk('perfkitbenchmarker'):
for file_name in file_names:
if not file_name.endswith('.pyc'):
zip_file.write(os.path.join(dir_path, file_name))
for file_name in ('AUTHORS', 'CHANGES.md', 'CONTRIBUTING.md', 'LICENSE',
'README.md', 'requirements.txt'):
zip_file.write(file_name)
zip_file.write('pkb.py', '__main__.py')
zip_file.writestr('perfkitbenchmarker/version.txt', version)
if __name__ == '__main__':
main(sys.argv)
|
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import (
TRIGGER_BASE_SCHEMA,
toggle_entity,
)
from homeassistant.components.homeassistant.triggers import (
numeric_state as numeric_state_trigger,
)
from homeassistant.const import (
CONF_ABOVE,
CONF_BELOW,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_FOR,
CONF_PLATFORM,
CONF_TYPE,
PERCENTAGE,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
TARGET_TRIGGER_SCHEMA = vol.All(
TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): "target_humidity_changed",
vol.Optional(CONF_BELOW): vol.Any(vol.Coerce(int)),
vol.Optional(CONF_ABOVE): vol.Any(vol.Coerce(int)),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
)
TOGGLE_TRIGGER_SCHEMA = toggle_entity.TRIGGER_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN}
)
TRIGGER_SCHEMA = vol.Any(TARGET_TRIGGER_SCHEMA, TOGGLE_TRIGGER_SCHEMA)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for Humidifier devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = await toggle_entity.async_get_triggers(hass, device_id, DOMAIN)
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "target_humidity_changed",
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
trigger_type = config[CONF_TYPE]
if trigger_type == "target_humidity_changed":
numeric_state_config = {
numeric_state_trigger.CONF_PLATFORM: "numeric_state",
numeric_state_trigger.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
numeric_state_trigger.CONF_VALUE_TEMPLATE: "{{ state.attributes.humidity }}",
}
if CONF_ABOVE in config:
numeric_state_config[CONF_ABOVE] = config[CONF_ABOVE]
if CONF_BELOW in config:
numeric_state_config[CONF_BELOW] = config[CONF_BELOW]
if CONF_FOR in config:
numeric_state_config[CONF_FOR] = config[CONF_FOR]
numeric_state_config = numeric_state_trigger.TRIGGER_SCHEMA(
numeric_state_config
)
return await numeric_state_trigger.async_attach_trigger(
hass, numeric_state_config, action, automation_info, platform_type="device"
)
return await toggle_entity.async_attach_trigger(
hass, config, action, automation_info
)
async def async_get_trigger_capabilities(hass: HomeAssistant, config):
"""List trigger capabilities."""
trigger_type = config[CONF_TYPE]
if trigger_type == "target_humidity_changed":
return {
"extra_fields": vol.Schema(
{
vol.Optional(
CONF_ABOVE, description={"suffix": PERCENTAGE}
): vol.Coerce(int),
vol.Optional(
CONF_BELOW, description={"suffix": PERCENTAGE}
): vol.Coerce(int),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
}
return await toggle_entity.async_get_trigger_capabilities(hass, config)
|
import logging
import asyncio
from typing import Union, List, Literal
from datetime import timedelta
from copy import copy
import contextlib
import discord
from redbot.core import Config, checks, commands
from redbot.core.utils import AsyncIter
from redbot.core.utils.chat_formatting import pagify, box
from redbot.core.utils.antispam import AntiSpam
from redbot.core.bot import Red
from redbot.core.i18n import Translator, cog_i18n, set_contextual_locales_from_guild
from redbot.core.utils.predicates import MessagePredicate
from redbot.core.utils.tunnel import Tunnel
_ = Translator("Reports", __file__)
log = logging.getLogger("red.reports")
@cog_i18n(_)
class Reports(commands.Cog):
"""Create user reports that server staff can respond to.
Users can open reports using `[p]report`. These are then sent
to a channel in the server for staff, and the report creator
gets a DM. Both can be used to communicate.
"""
default_guild_settings = {"output_channel": None, "active": False, "next_ticket": 1}
default_report = {"report": {}}
# This can be made configureable later if it
# becomes an issue.
# Intervals should be a list of tuples in the form
# (period: timedelta, max_frequency: int)
# see redbot/core/utils/antispam.py for more details
intervals = [
(timedelta(seconds=5), 1),
(timedelta(minutes=5), 3),
(timedelta(hours=1), 10),
(timedelta(days=1), 24),
]
def __init__(self, bot: Red):
super().__init__()
self.bot = bot
self.config = Config.get_conf(self, 78631113035100160, force_registration=True)
self.config.register_guild(**self.default_guild_settings)
self.config.init_custom("REPORT", 2)
self.config.register_custom("REPORT", **self.default_report)
self.antispam = {}
self.user_cache = []
self.tunnel_store = {}
# (guild, ticket#):
# {'tun': Tunnel, 'msgs': List[int]}
async def red_delete_data_for_user(
self,
*,
requester: Literal["discord_deleted_user", "owner", "user", "user_strict"],
user_id: int,
):
if requester != "discord_deleted_user":
return
all_reports = await self.config.custom("REPORT").all()
steps = 0
paths = []
# this doesn't use async iter intentionally due to the nested iterations
for guild_id_str, tickets in all_reports.items():
for ticket_number, ticket in tickets.items():
steps += 1
if not steps % 100:
await asyncio.sleep(0) # yield context
if ticket.get("report", {}).get("user_id", 0) == user_id:
paths.append((guild_id_str, ticket_number))
async with self.config.custom("REPORT").all() as all_reports:
async for guild_id_str, ticket_number in AsyncIter(paths, steps=100):
r = all_reports[guild_id_str][ticket_number]["report"]
r["user_id"] = 0xDE1
# this might include EUD, and a report of a deleted user
# that's been unhandled for long enough for the
# user to be deleted and the bot receive a request like this...
r["report"] = "[REPORT DELETED DUE TO DISCORD REQUEST]"
@property
def tunnels(self):
return [x["tun"] for x in self.tunnel_store.values()]
@checks.admin_or_permissions(manage_guild=True)
@commands.guild_only()
@commands.group(name="reportset")
async def reportset(self, ctx: commands.Context):
"""Manage Reports."""
pass
@checks.admin_or_permissions(manage_guild=True)
@reportset.command(name="output")
async def reportset_output(self, ctx: commands.Context, channel: discord.TextChannel):
"""Set the channel where reports will be sent."""
await self.config.guild(ctx.guild).output_channel.set(channel.id)
await ctx.send(_("The report channel has been set."))
@checks.admin_or_permissions(manage_guild=True)
@reportset.command(name="toggle", aliases=["toggleactive"])
async def reportset_toggle(self, ctx: commands.Context):
"""Enable or Disable reporting for this server."""
active = await self.config.guild(ctx.guild).active()
active = not active
await self.config.guild(ctx.guild).active.set(active)
if active:
await ctx.send(_("Reporting is now enabled"))
else:
await ctx.send(_("Reporting is now disabled."))
async def internal_filter(self, m: discord.Member, mod=False, perms=None):
if perms and m.guild_permissions >= perms:
return True
if mod and await self.bot.is_mod(m):
return True
# The following line is for consistency with how perms are handled
# in Red, though I'm not sure it makes sense to use here.
if await self.bot.is_owner(m):
return True
async def discover_guild(
self,
author: discord.User,
*,
mod: bool = False,
permissions: Union[discord.Permissions, dict] = None,
prompt: str = "",
):
"""
discovers which of shared guilds between the bot
and provided user based on conditions (mod or permissions is an or)
prompt is for providing a user prompt for selection
"""
shared_guilds = []
if permissions is None:
perms = discord.Permissions()
elif isinstance(permissions, discord.Permissions):
perms = permissions
else:
perms = discord.Permissions(**permissions)
async for guild in AsyncIter(self.bot.guilds, steps=100):
x = guild.get_member(author.id)
if x is not None:
if await self.internal_filter(x, mod, perms):
shared_guilds.append(guild)
if len(shared_guilds) == 0:
raise ValueError("No Qualifying Shared Guilds")
if len(shared_guilds) == 1:
return shared_guilds[0]
output = ""
guilds = sorted(shared_guilds, key=lambda g: g.name)
for i, guild in enumerate(guilds, 1):
output += "{}: {}\n".format(i, guild.name)
output += "\n{}".format(prompt)
for page in pagify(output, delims=["\n"]):
await author.send(box(page))
try:
message = await self.bot.wait_for(
"message",
check=MessagePredicate.same_context(channel=author.dm_channel, user=author),
timeout=45,
)
except asyncio.TimeoutError:
await author.send(_("You took too long to select. Try again later."))
return None
try:
message = int(message.content.strip())
guild = guilds[message - 1]
except (ValueError, IndexError):
await author.send(_("That wasn't a valid choice."))
return None
else:
return guild
async def send_report(self, msg: discord.Message, guild: discord.Guild):
author = guild.get_member(msg.author.id)
report = msg.clean_content
channel_id = await self.config.guild(guild).output_channel()
channel = guild.get_channel(channel_id)
if channel is None:
return None
files: List[discord.File] = await Tunnel.files_from_attach(msg)
ticket_number = await self.config.guild(guild).next_ticket()
await self.config.guild(guild).next_ticket.set(ticket_number + 1)
if await self.bot.embed_requested(channel, author):
em = discord.Embed(description=report)
em.set_author(
name=_("Report from {author}{maybe_nick}").format(
author=author, maybe_nick=(f" ({author.nick})" if author.nick else "")
),
icon_url=author.avatar_url,
)
em.set_footer(text=_("Report #{}").format(ticket_number))
send_content = None
else:
em = None
send_content = _("Report from {author.mention} (Ticket #{number})").format(
author=author, number=ticket_number
)
send_content += "\n" + report
try:
await Tunnel.message_forwarder(
destination=channel, content=send_content, embed=em, files=files
)
except (discord.Forbidden, discord.HTTPException):
return None
await self.config.custom("REPORT", guild.id, ticket_number).report.set(
{"user_id": author.id, "report": report}
)
return ticket_number
@commands.group(name="report", invoke_without_command=True)
async def report(self, ctx: commands.Context, *, _report: str = ""):
"""Send a report.
Use without arguments for interactive reporting, or do
`[p]report <text>` to use it non-interactively.
"""
author = ctx.author
guild = ctx.guild
if guild is None:
guild = await self.discover_guild(
author, prompt=_("Select a server to make a report in by number.")
)
if guild is None:
return
g_active = await self.config.guild(guild).active()
if not g_active:
return await author.send(_("Reporting has not been enabled for this server"))
if guild.id not in self.antispam:
self.antispam[guild.id] = {}
if author.id not in self.antispam[guild.id]:
self.antispam[guild.id][author.id] = AntiSpam(self.intervals)
if self.antispam[guild.id][author.id].spammy:
return await author.send(
_(
"You've sent too many reports recently. "
"Please contact a server admin if this is important matter, "
"or please wait and try again later."
)
)
if author.id in self.user_cache:
return await author.send(
_(
"Please finish making your prior report before trying to make an "
"additional one!"
)
)
self.user_cache.append(author.id)
if _report:
_m = copy(ctx.message)
_m.content = _report
_m.content = _m.clean_content
val = await self.send_report(_m, guild)
else:
try:
await author.send(
_(
"Please respond to this message with your Report."
"\nYour report should be a single message"
)
)
except discord.Forbidden:
return await ctx.send(_("This requires DMs enabled."))
try:
message = await self.bot.wait_for(
"message",
check=MessagePredicate.same_context(ctx, channel=author.dm_channel),
timeout=180,
)
except asyncio.TimeoutError:
return await author.send(_("You took too long. Try again later."))
else:
val = await self.send_report(message, guild)
with contextlib.suppress(discord.Forbidden, discord.HTTPException):
if val is None:
if await self.config.guild(ctx.guild).output_channel() is None:
await author.send(
_(
"This server has no reports channel set up. Please contact a server admin."
)
)
else:
await author.send(
_("There was an error sending your report, please contact a server admin.")
)
else:
await author.send(_("Your report was submitted. (Ticket #{})").format(val))
self.antispam[guild.id][author.id].stamp()
@report.after_invoke
async def report_cleanup(self, ctx: commands.Context):
"""
The logic is cleaner this way
"""
if ctx.author.id in self.user_cache:
self.user_cache.remove(ctx.author.id)
if ctx.guild and ctx.invoked_subcommand is None:
if ctx.channel.permissions_for(ctx.guild.me).manage_messages:
try:
await ctx.message.delete()
except discord.NotFound:
pass
@commands.Cog.listener()
async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent):
"""
oh dear....
"""
if not str(payload.emoji) == "\N{NEGATIVE SQUARED CROSS MARK}":
return
_id = payload.message_id
t = next(filter(lambda x: _id in x[1]["msgs"], self.tunnel_store.items()), None)
if t is None:
return
guild = t[0][0]
tun = t[1]["tun"]
if payload.user_id in [x.id for x in tun.members]:
await set_contextual_locales_from_guild(self.bot, guild)
await tun.react_close(
uid=payload.user_id, message=_("{closer} has closed the correspondence")
)
self.tunnel_store.pop(t[0], None)
@commands.Cog.listener()
async def on_message(self, message: discord.Message):
to_remove = []
for k, v in self.tunnel_store.items():
guild, ticket_number = k
if await self.bot.cog_disabled_in_guild(self, guild):
to_remove.append(k)
continue
await set_contextual_locales_from_guild(self.bot, guild)
topic = _("Re: ticket# {ticket_number} in {guild.name}").format(
ticket_number=ticket_number, guild=guild
)
# Tunnels won't forward unintended messages, this is safe
msgs = await v["tun"].communicate(message=message, topic=topic)
if msgs:
self.tunnel_store[k]["msgs"] = msgs
for key in to_remove:
if tun := self.tunnel_store.pop(key, None):
guild, ticket = key
await set_contextual_locales_from_guild(self.bot, guild)
await tun["tun"].close_because_disabled(
_(
"Correspondence about ticket# {ticket_number} in "
"{guild.name} has been ended due "
"to reports being disabled in that server."
).format(ticket_number=ticket, guild=guild)
)
@commands.guild_only()
@checks.mod_or_permissions(manage_roles=True)
@report.command(name="interact")
async def response(self, ctx, ticket_number: int):
"""Open a message tunnel.
This tunnel will forward things you say in this channel
to the ticket opener's direct messages.
Tunnels do not persist across bot restarts.
"""
guild = ctx.guild
rec = await self.config.custom("REPORT", guild.id, ticket_number).report()
try:
user = guild.get_member(rec.get("user_id"))
except KeyError:
return await ctx.send(_("That ticket doesn't seem to exist"))
if user is None:
return await ctx.send(_("That user isn't here anymore."))
tun = Tunnel(recipient=user, origin=ctx.channel, sender=ctx.author)
if tun is None:
return await ctx.send(
_(
"Either you or the user you are trying to reach already "
"has an open communication."
)
)
big_topic = _(
" Anything you say or upload here "
"(8MB file size limitation on uploads) "
"will be forwarded to them until the communication is closed.\n"
"You can close a communication at any point by reacting with "
"the \N{NEGATIVE SQUARED CROSS MARK} to the last message received.\n"
"Any message successfully forwarded will be marked with "
"\N{WHITE HEAVY CHECK MARK}.\n"
"Tunnels are not persistent across bot restarts."
)
topic = (
_(
"A moderator in the server `{guild.name}` has opened a 2-way communication about "
"ticket number {ticket_number}."
).format(guild=guild, ticket_number=ticket_number)
+ big_topic
)
try:
m = await tun.communicate(message=ctx.message, topic=topic, skip_message_content=True)
except discord.Forbidden:
await ctx.send(_("That user has DMs disabled."))
else:
self.tunnel_store[(guild, ticket_number)] = {"tun": tun, "msgs": m}
await ctx.send(
_(
"You have opened a 2-way communication about ticket number {ticket_number}."
).format(ticket_number=ticket_number)
+ big_topic
)
|
from .lexer import Token
###{standalone
class Indenter:
def __init__(self):
self.paren_level = None
self.indent_level = None
assert self.tab_len > 0
def handle_NL(self, token):
if self.paren_level > 0:
return
yield token
indent_str = token.rsplit('\n', 1)[1] # Tabs and spaces
indent = indent_str.count(' ') + indent_str.count('\t') * self.tab_len
if indent > self.indent_level[-1]:
self.indent_level.append(indent)
yield Token.new_borrow_pos(self.INDENT_type, indent_str, token)
else:
while indent < self.indent_level[-1]:
self.indent_level.pop()
yield Token.new_borrow_pos(self.DEDENT_type, indent_str, token)
assert indent == self.indent_level[-1], '%s != %s' % (indent, self.indent_level[-1])
def _process(self, stream):
for token in stream:
if token.type == self.NL_type:
for t in self.handle_NL(token):
yield t
else:
yield token
if token.type in self.OPEN_PAREN_types:
self.paren_level += 1
elif token.type in self.CLOSE_PAREN_types:
self.paren_level -= 1
assert self.paren_level >= 0
while len(self.indent_level) > 1:
self.indent_level.pop()
yield Token(self.DEDENT_type, '')
assert self.indent_level == [0], self.indent_level
def process(self, stream):
self.paren_level = 0
self.indent_level = [0]
return self._process(stream)
# XXX Hack for ContextualLexer. Maybe there's a more elegant solution?
@property
def always_accept(self):
return (self.NL_type,)
###}
|
import time
import unittest
import object_storage_api_tests # noqa: importing for flags
import object_storage_interface
import validate_service
class MockObjectStorageService(object_storage_interface.ObjectStorageServiceBase): # noqa
def __init__(self):
self.bucket = None
self.objects = {}
def _CheckBucket(self, bucket):
"""Make sure that we are only passed one bucket name.
Args:
bucket: the name of a bucket.
Raises: ValueError, if this object has been passed a different
bucket name previously.
"""
if self.bucket is None:
self.bucket = bucket
elif self.bucket != bucket:
raise ValueError(
'MockObjectStorageService passed two bucket names: %s and %s' %
(self.bucket, bucket))
def ListObjects(self, bucket, prefix):
self._CheckBucket(bucket)
return [value
for name, value in self.objects.iteritems()
if name.startswith(prefix)]
def DeleteObjects(self, bucket, objects_to_delete, objects_deleted=None):
self._CheckBucket(bucket)
for name in objects_to_delete:
if name in self.objects:
del self.objects[name]
if objects_deleted is not None:
objects_deleted.append(name)
def WriteObjectFromBuffer(self, bucket, object, stream, size):
self._CheckBucket(bucket)
stream.seek(0)
self.objects[object] = stream.read(size)
return time.time(), 0.001
def ReadObject(self, bucket, object):
self._CheckBucket(bucket)
self.objects[object]
return time.time(), 0.001
class TestScenarios(unittest.TestCase):
"""Test that the benchmark scenarios complete.
Specifically, given a correctly operating service
(MockObjectStorageService), verify that the benchmarking scenarios
run to completion without raising an exception.
"""
def setUp(self):
self.FLAGS = object_storage_api_tests.FLAGS
self.FLAGS([])
self.objects_written_file = self.FLAGS.objects_written_file
self.FLAGS.objects_written_file = '/tmp/objects-written'
def tearDown(self):
self.FLAGS.objects_written_file = self.objects_written_file
def testOneByteRW(self):
object_storage_api_tests.OneByteRWBenchmark(MockObjectStorageService())
def testListConsistency(self):
object_storage_api_tests.ListConsistencyBenchmark(
MockObjectStorageService())
def testSingleStreamThroughput(self):
object_storage_api_tests.SingleStreamThroughputBenchmark(
MockObjectStorageService())
def testCleanupBucket(self):
object_storage_api_tests.CleanupBucket(MockObjectStorageService())
def testMultiStreamWriteAndRead(self):
service = MockObjectStorageService()
# Have to sequence MultiStreamWrites and MultiStreamReads because
# MultiStreamReads will read from the objects_written_file that
# MultiStreamWrites generates.
object_storage_api_tests.MultiStreamWrites(service)
object_storage_api_tests.MultiStreamReads(service)
class TestValidateService(unittest.TestCase):
"""Validate the ValidateService script."""
def setUp(self):
self.FLAGS = object_storage_api_tests.FLAGS
self.FLAGS([])
self.objects_written_file = self.FLAGS.objects_written_file
self.FLAGS.objects_written_file = '/tmp/objects-written'
def testValidateService(self):
validate_service.ValidateService(MockObjectStorageService())
if __name__ == '__main__':
unittest.main()
|
from __future__ import unicode_literals
from lib.data.data import pystrs
from lib.fun.decorator import magic
from lib.fun.fun import range_compatible
def pid6_magic(*args):
"""chinese id card last 6 digit"""
posrule = lambda _: str(_) if _ >= 10 else "0" + str(_)
# day
value1314 = " ".join(posrule(x) for x in range_compatible(1, 32))
value1516 = " ".join(posrule(x) for x in range_compatible(1, 100))
post18 = ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "X")
value1718 = ""
if pystrs.default_sex == pystrs.sex_range[0]:
rand = ("1", "3", "5", "7", "9")
for _ in rand:
for _p in post18:
value1718 += _ + _p + " "
elif pystrs.default_sex == pystrs.sex_range[1]:
rand = ("0", "2", "4", "6", "8")
for _ in rand:
for _p in post18:
value1718 += _ + _p + " "
elif pystrs.default_sex == pystrs.sex_range[2]:
rand = " ".join(str(_) for _ in range_compatible(0, 10))
for _ in rand.split(" "):
for _p in post18:
value1718 += _ + _p + " "
@magic
def pid6():
for v1314 in value1314.split(" "):
for v1516 in value1516.split(" "):
for v1718 in value1718.split(" "):
if v1718 != "":
yield "".join(v1314 + v1516 + v1718)
|
from flexx import flx
from flexxamples.demos.drawing import Drawing
from flexxamples.howtos.splitters import Split
from flexxamples.demos.twente import Twente
class MultiApp(flx.TabLayout):
def init(self):
Drawing(title='Drawing')
Split(title='Split')
Twente(title='Twente')
if __name__ == '__main__':
# This example is setup as a desktop app
flx.launch(MultiApp)
flx.run()
|
import os
import posixpath
import re
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import amdblis
from perfkitbenchmarker.linux_packages import openblas
PACKAGE_NAME = 'hpcc'
HPCC_TAR = 'hpcc-1.5.0.tar.gz'
HPCC_URL = 'https://icl.cs.utk.edu/projectsfiles/hpcc/download/' + HPCC_TAR
PREPROVISIONED_DATA = {
HPCC_TAR: '0a6fef7ab9f3347e549fed65ebb98234feea9ee18aea0c8f59baefbe3cf7ffb8'
}
PACKAGE_DATA_URL = {
HPCC_TAR: HPCC_URL
}
HPCC_DIR = '%s/hpcc-1.5.0' % linux_packages.INSTALL_DIR
HPCC_VERSION = '1.5.0'
MAKE_FLAVOR_CBLAS = 'Linux_PII_CBLAS'
MAKE_FLAVOR_MKL = 'intel64'
MAKE_FLAVOR_OPEN_BLAS = 'OPEN_BLAS'
MAKE_FLAVOR_AMD_BLIS = 'AMD_BLIS'
HPCC_MAKEFILE_CBLAS = 'Make.%s' % MAKE_FLAVOR_CBLAS
HPCC_MAKEFILE_MKL = 'Make.%s' % MAKE_FLAVOR_MKL
HPCC_MAKEFILE_OPEN_BLAS = 'Make.%s' % MAKE_FLAVOR_OPEN_BLAS
HPCC_MAKEFILE_AMD_BLIS = 'Make.%s' % MAKE_FLAVOR_AMD_BLIS
HPCC_MAKEFILE_PATH_MKL = '%s/hpl/%s' % (HPCC_DIR, HPCC_MAKEFILE_MKL)
HPCC_MAKEFILE_PATH_OPEN_BLAS = '%s/hpl/%s' % (HPCC_DIR, HPCC_MAKEFILE_OPEN_BLAS)
HPCC_MAKEFILE_PATH_AMD_BLIS = '%s/hpl/%s' % (HPCC_DIR, HPCC_MAKEFILE_AMD_BLIS)
HPCC_MATH_LIBRARY_OPEN_BLAS = 'openblas'
HPCC_MATH_LIBRARY_AMD_BLIS = 'amdblis'
HPCC_MATH_LIBRARY_MKL = 'mkl'
# A dict mapping HPCC benchmarks to dicts mapping summary result names to units.
# The name of the summary result is added as a metric with that name and the
# specified units.
HPCC_METRIC_MAP = {
'MPI RandomAccess': {
'MPIRandomAccess_time': 'seconds',
'MPIRandomAccess_CheckTime': 'seconds',
'MPIRandomAccess_ExeUpdates': 'updates',
'MPIRandomAccess_GUPs': 'GUP/s',
},
'StarRandomAccess': {
'StarRandomAccess_GUPs': 'GUP/s',
},
'SingleRandomAccess': {
'SingleRandomAccess_GUPs': 'GUP/s',
},
'MPI RandomAccess LCG': {
'MPIRandomAccess_LCG_time': 'seconds',
'MPIRandomAccess_LCG_CheckTime': 'seconds',
'MPIRandomAccess_LCG_ExeUpdates': 'updates',
'MPIRandomAccess_LCG_GUPs': 'GUP/s',
},
'StarRandomAccess LCG': {
'StarRandomAccess_LCG_GUPs': 'GUP/s',
},
'SingleRandomAccess LCG': {
'SingleRandomAccess_LCG_GUPs': 'GUP/s',
},
'PTRANS': {
'PTRANS_GBs': 'GB/s',
'PTRANS_time': 'seconds',
},
'StarDGEMM': {
'StarDGEMM_Gflops': 'Gflop/s',
},
'SingleDGEMM': {
'SingleDGEMM_Gflops': 'Gflop/s',
},
'StarSTREAM': {
'StarSTREAM_Copy': 'GB/s',
'StarSTREAM_Scale': 'GB/s',
'StarSTREAM_Add': 'GB/s',
'StarSTREAM_Triad': 'GB/s',
},
'SingleSTREAM': {
'SingleSTREAM_Copy': 'GB/s',
'SingleSTREAM_Scale': 'GB/s',
'SingleSTREAM_Add': 'GB/s',
'SingleSTREAM_Triad': 'GB/s',
},
'MPIFFT': {
'MPIFFT_Gflops': 'Gflop/s',
'MPIFFT_time0': 'seconds',
'MPIFFT_time1': 'seconds',
'MPIFFT_time2': 'seconds',
'MPIFFT_time3': 'seconds',
'MPIFFT_time4': 'seconds',
'MPIFFT_time5': 'seconds',
'MPIFFT_time6': 'seconds',
},
'StarFFT': {
'StarFFT_Gflops': 'Gflop/s',
},
'SingleFFT': {
'SingleFFT_Gflops': 'Gflop/s',
},
'Latency/Bandwidth': {
'MaxPingPongLatency_usec': 'usec',
'RandomlyOrderedRingLatency_usec': 'usec',
'MinPingPongBandwidth_GBytes': 'GB',
'NaturallyOrderedRingBandwidth_GBytes': 'GB',
'RandomlyOrderedRingBandwidth_GBytes': 'GB',
'MinPingPongLatency_usec': 'usec',
'AvgPingPongLatency_usec': 'usec',
'MaxPingPongBandwidth_GBytes': 'GB',
'AvgPingPongBandwidth_GBytes': 'GB',
'NaturallyOrderedRingLatency_usec': 'usec',
},
'HPL': {
'HPL_Tflops': 'Tflop/s',
'HPL_time': 'seconds',
},
}
# A dict mapping HPCC benchmarks to sets of summary result names that should be
# added to the metadata for a benchmark.
HPCC_METADATA_MAP = {
'MPI RandomAccess': {
'MPIRandomAccess_N',
'MPIRandomAccess_Errors',
'MPIRandomAccess_ErrorsFraction',
'MPIRandomAccess_TimeBound',
'MPIRandomAccess_Algorithm',
},
'StarRandomAccess': {'RandomAccess_N'},
'SingleRandomAccess': {'RandomAccess_N'},
'MPI RandomAccess LCG': {
'MPIRandomAccess_LCG_N',
'MPIRandomAccess_LCG_Errors',
'MPIRandomAccess_LCG_ErrorsFraction',
'MPIRandomAccess_LCG_TimeBound',
'MPIRandomAccess_LCG_Algorithm',
},
'StarRandomAccess LCG': {'RandomAccess_LCG_N'},
'SingleRandomAccess LCG': {'RandomAccess_LCG_N'},
'PTRANS': {
'PTRANS_residual',
'PTRANS_n',
'PTRANS_nb',
'PTRANS_nprow',
'PTRANS_npcol',
},
'StarDGEMM': {'DGEMM_N'},
'SingleDGEMM': {'DGEMM_N'},
'StarSTREAM': {
'STREAM_Threads',
'STREAM_VectorSize',
},
'SingleSTREAM': {
'STREAM_Threads',
'STREAM_VectorSize',
},
'MPIFFT': {
'MPIFFT_N',
'MPIFFT_maxErr',
'MPIFFT_Procs',
},
'StarFFT': {'FFT_N'},
'SingleFFT': {'FFT_N'},
'Latency/Bandwidth': {},
'HPL': {
'HPL_N',
'HPL_NB',
'HPL_nprow',
'HPL_npcol',
'HPL_depth',
'HPL_nbdiv',
'HPL_nbmin',
'HPL_ctop',
},
}
# The names of the benchmarks.
HPCC_BENCHMARKS = sorted(HPCC_METRIC_MAP)
flags.DEFINE_enum(
'hpcc_math_library', HPCC_MATH_LIBRARY_OPEN_BLAS, [
HPCC_MATH_LIBRARY_OPEN_BLAS, HPCC_MATH_LIBRARY_MKL,
HPCC_MATH_LIBRARY_AMD_BLIS
], 'The math library to use when compiling hpcc: openblas, mkl, or '
'amdblis. The default is openblas.')
flags.DEFINE_list(
'hpcc_benchmarks', [], 'A list of benchmarks in HPCC to run. If none are '
'specified (the default), then all of the benchmarks are run. In 1.5.0, '
'the benchmarks may include the following: %s' % ', '.join(HPCC_BENCHMARKS))
flags.register_validator(
'hpcc_benchmarks',
lambda hpcc_benchmarks: set(hpcc_benchmarks).issubset(set(HPCC_BENCHMARKS)))
FLAGS = flags.FLAGS
def _LimitBenchmarksToRun(vm, selected_hpcc_benchmarks):
"""Limits the benchmarks to run.
This function copies hpcc.c to the local machine, comments out code that runs
benchmarks not listed in selected_hpcc_benchmarks, and then copies hpcc.c back
to the remote machine.
Args:
vm: The machine where hpcc.c was installed.
selected_hpcc_benchmarks: A set of benchmarks to run.
"""
remote_hpcc_path = posixpath.join(HPCC_DIR, 'src', 'hpcc.c')
local_hpcc_path = os.path.join(vm_util.GetTempDir(), 'hpcc.c')
vm.PullFile(local_hpcc_path, remote_hpcc_path)
with open(local_hpcc_path) as f:
lines = f.readlines()
# Process the main file, commenting out benchmarks that should not be run.
commenting = False
with open(local_hpcc_path, 'w') as f:
for line in lines:
# Decide whether to continue commenting out code for each benchmark. This
# is determined by searching for the comment that starts each benchmark.
match = re.search(r'\/\*\s+(.*?)\s+\*\/', line)
if match and match.group(1) in HPCC_BENCHMARKS:
commenting = match.group(1) not in selected_hpcc_benchmarks
# Start writing once the per-benchmark code is complete. This happens at
# the hpcc_end: label.
if re.search('hpcc_end:', line):
commenting = False
f.write('// %s' % line if commenting else line)
vm.PushFile(local_hpcc_path, remote_hpcc_path)
def _Install(vm):
"""Installs the HPCC package on the VM."""
vm.Install('wget')
vm.Install('openmpi')
vm.InstallPreprovisionedPackageData(PACKAGE_NAME, PREPROVISIONED_DATA.keys(),
linux_packages.INSTALL_DIR)
vm.RemoteCommand('cd %s && tar xvfz %s' %
(linux_packages.INSTALL_DIR, HPCC_TAR))
if FLAGS.hpcc_benchmarks:
_LimitBenchmarksToRun(vm, set(FLAGS.hpcc_benchmarks))
if FLAGS.hpcc_math_library == HPCC_MATH_LIBRARY_OPEN_BLAS:
_CompileHpccOpenblas(vm)
elif FLAGS.hpcc_math_library == HPCC_MATH_LIBRARY_MKL:
_CompileHpccMKL(vm)
elif FLAGS.hpcc_math_library == HPCC_MATH_LIBRARY_AMD_BLIS:
_CompileHpccAmdBlis(vm)
else:
raise errors.Setup.InvalidFlagConfigurationError(
'Unexpected hpcc_math_library option encountered.')
def _CompileHpccOpenblas(vm):
"""Compile HPCC with OpenBlas."""
vm.Install('openblas')
vm.RemoteCommand(
'cp %s/hpl/setup/%s %s' %
(HPCC_DIR, HPCC_MAKEFILE_CBLAS, HPCC_MAKEFILE_PATH_OPEN_BLAS))
sed_cmd = ('sed -i -e "/^MP/d" -e "s|gcc|mpicc|" -e "s|g77|mpicc|" '
'-e "s|\\$(HOME)/netlib/ARCHIVES/Linux_PII|%s|" '
'-e "s|libcblas.*|libopenblas.a|" '
'-e "s|-funroll-loops|-funroll-loops -std=c99|" '
'-e "s|\\-lm|\\-lgfortran \\-lm|" %s' %
(re.escape(openblas.OPENBLAS_DIR), HPCC_MAKEFILE_PATH_OPEN_BLAS))
vm.RemoteCommand(sed_cmd)
vm.RemoteCommand('cd %s; make arch=OPEN_BLAS' % HPCC_DIR)
def _CompileHpccAmdBlis(vm):
"""Compile HPCC with AMD BLIS."""
vm.Install('amdblis')
vm.RemoteCommand('cp %s/hpl/setup/%s %s' %
(HPCC_DIR, HPCC_MAKEFILE_CBLAS, HPCC_MAKEFILE_PATH_AMD_BLIS))
sed_cmd = ('sed -i -e "/^MP/d" -e "s|gcc|mpicc|" -e "s|g77|mpicc|" '
'-e "s|\\$(HOME)/netlib/ARCHIVES/Linux_PII|%s|" '
'-e "s|libcblas.*|lib/zen/libblis.a|" '
'-e "s|-funroll-loops|-funroll-loops -std=c99|" '
'-e "s|\\-lm|\\-lgfortran \\-lm|" %s' %
(re.escape(amdblis.AMDBLIS_DIR), HPCC_MAKEFILE_PATH_AMD_BLIS))
vm.RemoteCommand(sed_cmd)
vm.RemoteCommand('cd %s; make arch=AMD_BLIS' % HPCC_DIR)
def _CompileHpccMKL(vm):
"""Compiling HPCC with Intel MKL.
The following link provides instructions of using intel MKL in hpcc_benchmark.
https://software.intel.com/en-us/articles/performance-tools-for-software-developers-use-of-intel-mkl-in-hpcc-benchmark
TODO(user):The free version MKL pacakage does not have
'interfaces/fftw2x_cdft' which is the MPI FFTW 2.x interfaces to the
Intel MKL Cluster FFT. Such that we have to at first install OpenBlas and
build hpcc binary using OpenBlas. Need to investigate how to build hpcc
binary without 'interfaces/fftw2x_cdft'.
Args:
vm: VirtualMachine object. The VM to install hpcc.
"""
_CompileHpccOpenblas(vm)
vm.RemoteCommand('cd %s; rm hpcc' % HPCC_DIR)
vm.Install('mkl')
vm.RemoteCommand('cp %s/hpl/setup/%s %s' % (HPCC_DIR, HPCC_MAKEFILE_CBLAS,
HPCC_MAKEFILE_PATH_MKL))
mkl_lalib = ('-Wl,--start-group $(LAdir)/libfftw2xc_double_gnu.a '
'$(LAdir)/libfftw2xf_double_gnu.a '
'$(LAdir)/libmkl_intel_lp64.a '
'$(LAdir)/libmkl_intel_thread.a '
'$(LAdir)/libmkl_core.a '
'$(LAdir)/libmkl_blas95_lp64.a '
'-Wl,--end-group')
mkl_ccflags = (' -Wl,--no-as-needed -ldl -lmpi -liomp5 -lpthread -lm '
'-DUSING_FFTW -DMKL_INT=long -DLONG_IS_64BITS')
sed_cmd_mkl = (
'sed -i -e "/^MP/d" -e "s|gcc|mpicc|" -e "s|g77|mpicc|" '
'-e "s|\\$(HOME)/netlib/ARCHIVES/Linux_PII|'
'/opt/intel/mkl/lib/intel64|" '
'-e "s|\\$(LAdir)/libcblas.*|%s|" '
'-e "s|\\-lm|\\-lgfortran \\-lm|" '
'-e "/CCFLAGS / s|$|%s|" %s' %
(re.escape(mkl_lalib), re.escape(mkl_ccflags), HPCC_MAKEFILE_PATH_MKL))
vm.RemoteCommand(sed_cmd_mkl)
vm.RemoteCommand('source /opt/intel/compilers_and_libraries/linux/bin/'
'compilervars.sh -arch intel64 -platform linux && '
'cd %s; make arch=intel64' % HPCC_DIR)
def YumInstall(vm):
"""Installs the HPCC package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the HPCC package on the VM."""
_Install(vm)
|
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.vacuum import DOMAIN
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a vacuum."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_actions = [
{
"domain": DOMAIN,
"type": "clean",
"device_id": device_entry.id,
"entity_id": "vacuum.test_5678",
},
{
"domain": DOMAIN,
"type": "dock",
"device_id": device_entry.id,
"entity_id": "vacuum.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_action(hass):
"""Test for turn_on and turn_off actions."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event_dock"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "vacuum.entity",
"type": "dock",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_clean"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "vacuum.entity",
"type": "clean",
},
},
]
},
)
dock_calls = async_mock_service(hass, "vacuum", "return_to_base")
clean_calls = async_mock_service(hass, "vacuum", "start")
hass.bus.async_fire("test_event_dock")
await hass.async_block_till_done()
assert len(dock_calls) == 1
assert len(clean_calls) == 0
hass.bus.async_fire("test_event_clean")
await hass.async_block_till_done()
assert len(dock_calls) == 1
assert len(clean_calls) == 1
|
import logging
import threading
from flask import jsonify
from flask import request
from flask_cors import CORS
from kalliope._version import version_str
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.RestAPI.utils import requires_auth
from kalliope.core.RestAPI.views.neurons_view import NeuronsView
from kalliope.core.RestAPI.views.settings_views import SettingsView
from kalliope.core.RestAPI.views.synapses_views import SynapsesView
from kalliope.core.Utils.FileManager import FileManager
from gevent.pywsgi import WSGIServer
logging.basicConfig()
logger = logging.getLogger("kalliope")
UPLOAD_FOLDER = '/tmp/kalliope/tmp_uploaded_audio'
ALLOWED_EXTENSIONS = {'wav'}
class FlaskAPI(threading.Thread):
def __init__(self, app, port=5000, brain=None, allowed_cors_origin=False):
"""
:param app: Flask API
:param port: Port to listen
:param brain: Brain object
:type brain: Brain
"""
super(FlaskAPI, self).__init__()
self.app = app
self.port = port
self.brain = brain
self.allowed_cors_origin = allowed_cors_origin
# get current settings
sl = SettingLoader()
self.settings = sl.settings
# configure the upload folder
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
# create the temp folder
FileManager.create_directory(UPLOAD_FOLDER)
# Flask configuration remove default Flask behaviour to encode to ASCII
self.app.url_map.strict_slashes = False
self.app.config['JSON_AS_ASCII'] = False
if self.allowed_cors_origin is not False:
CORS(app, resources={r"/*": {"origins": allowed_cors_origin}}, supports_credentials=True)
# Add routing rules
self.app.add_url_rule('/', view_func=self.get_main_page, methods=['GET'])
self.app.add_url_rule('/shutdown/', view_func=self.shutdown_server, methods=['POST'])
# Register blue prints
self.synapses_blueprint = SynapsesView('synapses',
__name__,
app=self.app,
brain=self.brain,
settings=self.settings)
self.app.register_blueprint(self.synapses_blueprint)
self.settings_blueprint = SettingsView('settings',
__name__,
app=self.app,
brain=self.brain,
settings=self.settings)
self.app.register_blueprint(self.settings_blueprint)
self.neurons_blueprint = NeuronsView('neurons',
__name__,
app=self.app,
brain=self.brain,
settings=self.settings)
self.app.register_blueprint(self.neurons_blueprint)
def run(self):
http_server = WSGIServer(('', 5000), self.app)
http_server.serve_forever()
@requires_auth
def get_main_page(self):
logger.debug("[FlaskAPI] get_main_page")
data = {
"Kalliope version": "%s" % version_str
}
return jsonify(data), 200
@requires_auth
def shutdown_server(self):
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
return "Shutting down..."
|
from os import path as path
import numpy as np
from ...utils import _check_option, get_subjects_dir, _check_fname
from ...surface import (complete_surface_info, read_surface, read_curvature,
_read_patch)
class Surface(object):
"""Container for a brain surface.
It is used for storing vertices, faces and morphometric data
(curvature) of a hemisphere mesh.
Parameters
----------
subject_id : string
Name of subject
hemi : {'lh', 'rh'}
Which hemisphere to load
surf : string
Name of the surface to load (eg. inflated, orig ...).
subjects_dir : str | None
If not None, this directory will be used as the subjects directory
instead of the value set using the SUBJECTS_DIR environment variable.
offset : float | None
If 0.0, the surface will be offset such that the medial
wall is aligned with the origin. If None, no offset will
be applied. If != 0.0, an additional offset will be used.
units : str
Can be 'm' or 'mm' (default).
Attributes
----------
bin_curv : numpy.ndarray
Curvature values stored as non-negative integers.
coords : numpy.ndarray
nvtx x 3 array of vertex (x, y, z) coordinates.
curv : numpy.ndarray
Vector representation of surface morpometry (curvature) values as
loaded from a file.
grey_curv : numpy.ndarray
Normalized morphometry (curvature) data, used in order to get
a gray cortex.
faces : numpy.ndarray
nfaces x 3 array of defining mesh triangles.
hemi : {'lh', 'rh'}
Which hemisphere to load.
nn : numpy.ndarray
Vertex normals for a triangulated surface.
offset : float | None
If float, align inside edge of each hemisphere to center + offset.
If None, do not change coordinates (default).
subject_id : string
Name of subject.
surf : string
Name of the surface to load (eg. inflated, orig ...).
units : str
Can be 'm' or 'mm' (default).
"""
def __init__(self, subject_id, hemi, surf, subjects_dir=None, offset=None,
units='mm'):
hemis = ('lh', 'rh')
if hemi not in hemis:
raise ValueError('hemi should be either "lh" or "rh",' +
'given value {0}'.format(hemi))
if offset is not None and ((not isinstance(offset, float)) and
(not isinstance(offset, int))):
raise ValueError('offset should either float or int, given ' +
'type {0}'.format(type(offset).__name__))
self.units = _check_option('units', units, ('mm', 'm'))
self.subject_id = subject_id
self.hemi = hemi
self.surf = surf
self.offset = offset
self.bin_curv = None
self.coords = None
self.curv = None
self.faces = None
self.grey_curv = None
self.nn = None
self.labels = dict()
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
self.data_path = path.join(subjects_dir, subject_id)
def load_geometry(self):
"""Load geometry of the surface.
Parameters
----------
None
Returns
-------
None
"""
if self.surf == 'flat': # special case
fname = path.join(self.data_path, 'surf',
'%s.%s' % (self.hemi, 'cortex.patch.flat'))
_check_fname(fname, overwrite='read', must_exist=True,
name='flatmap surface file')
coords, faces, orig_faces = _read_patch(fname)
else:
coords, faces = read_surface(
path.join(self.data_path, 'surf',
'%s.%s' % (self.hemi, self.surf)))
orig_faces = faces
if self.units == 'm':
coords /= 1000.
if self.offset is not None:
if self.hemi == 'lh':
coords[:, 0] -= (np.max(coords[:, 0]) + self.offset)
else:
coords[:, 0] -= (np.min(coords[:, 0]) + self.offset)
surf = dict(rr=coords, tris=faces)
complete_surface_info(surf, copy=False, verbose=False)
nn = surf['nn']
self.coords = coords
self.faces = faces
self.orig_faces = orig_faces
self.nn = nn
def __len__(self):
"""Return number of vertices."""
return len(self.coords)
@property
def x(self):
return self.coords[:, 0]
@property
def y(self):
return self.coords[:, 1]
@property
def z(self):
return self.coords[:, 2]
def load_curvature(self):
"""Load in curvature values from the ?h.curv file."""
curv_path = path.join(self.data_path, 'surf', '%s.curv' % self.hemi)
self.curv = read_curvature(curv_path, binary=False)
self.bin_curv = np.array(self.curv > 0, np.int64)
# morphometry (curvature) normalization in order to get gray cortex
# TODO: delete self.grey_curv after cortex parameter
# will be fully supported
color = (self.curv > 0).astype(float)
color = 0.5 - (color - 0.5) / 3
color = color[:, np.newaxis] * [1, 1, 1]
self.grey_curv = color
|
import unittest
from trashcli.empty import EmptyCmd
from mock import Mock, call
class TestTrashEmptyCmd(unittest.TestCase):
def setUp(self):
self.empty_all_trashdirs = Mock()
self.empty_trashdir = Mock()
self.cmd = EmptyCmd(None, None, None, None, None, None, None, None, None)
self.cmd.empty_all_trashdirs = self.empty_all_trashdirs
self.cmd.empty_trashdir = self.empty_trashdir
def test_default_behaviour_is_emtpy_all_trashdirs(self):
self.cmd.run('trash-empty')
assert [call()] == self.empty_all_trashdirs.mock_calls
assert [] == self.empty_trashdir.mock_calls
def test(self):
self.cmd.run('trash-empty', '--trash-dir', 'specific')
assert [] == self.empty_all_trashdirs.mock_calls
assert [call('specific')] == self.empty_trashdir.mock_calls
|
import json
import shlex
import subprocess
import sys
import pkg_resources
import requests
RELEASES_URL = "https://api.github.com/repos/{repo}/releases"
def run_command(cmd):
"""
Run a command line (with no shell).
Returns a tuple:
bool: true if the command succeeded.
str: the output of the command.
"""
proc = subprocess.run(
shlex.split(cmd),
shell=False,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
output = proc.stdout.decode("utf-8")
succeeded = proc.returncode == 0
return succeeded, output
def does_tag_exist(tag_name):
"""
Does `tag_name` exist as a tag in git?
"""
return run_command(f"git rev-parse --verify {tag_name}")[0]
def check_ok(resp):
"""
Check that the Requests response object was successful.
Raise an exception if not.
"""
if not resp:
print(f"text: {resp.text!r}")
resp.raise_for_status()
def github_paginated(session, url):
"""
Get all the results from a paginated GitHub url.
"""
while True:
resp = session.get(url)
check_ok(resp)
yield from resp.json()
next_link = resp.links.get("next", None)
if not next_link:
break
url = next_link["url"]
def get_releases(session, repo):
"""
Get all the releases from a name/project repo.
Returns:
A dict mapping tag names to release dictionaries.
"""
url = RELEASES_URL.format(repo=repo)
releases = { r['tag_name']: r for r in github_paginated(session, url) }
return releases
def release_for_relnote(relnote):
"""
Turn a release note dict into the data needed by GitHub for a release.
"""
tag = f"coverage-{relnote['version']}"
return {
"tag_name": tag,
"name": tag,
"body": relnote["text"],
"draft": False,
"prerelease": relnote["prerelease"],
}
def create_release(session, repo, relnote):
"""
Create a new GitHub release.
"""
print(f"Creating {relnote['version']}")
data = release_for_relnote(relnote)
resp = session.post(RELEASES_URL.format(repo=repo), json=data)
check_ok(resp)
def update_release(session, url, relnote):
"""
Update an existing GitHub release.
"""
print(f"Updating {relnote['version']}")
data = release_for_relnote(relnote)
resp = session.patch(url, json=data)
check_ok(resp)
def update_github_releases(json_filename, repo):
"""
Read the json file, and create or update releases in GitHub.
"""
gh_session = requests.Session()
releases = get_releases(gh_session, repo)
if 0: # if you need to delete all the releases!
for release in releases.values():
print(release["tag_name"])
resp = gh_session.delete(release["url"])
check_ok(resp)
return
with open(json_filename) as jf:
relnotes = json.load(jf)
relnotes.sort(key=lambda rel: pkg_resources.parse_version(rel["version"]))
for relnote in relnotes:
tag = "coverage-" + relnote["version"]
if not does_tag_exist(tag):
continue
exists = tag in releases
if not exists:
create_release(gh_session, repo, relnote)
else:
release = releases[tag]
if release["body"] != relnote["text"]:
url = release["url"]
update_release(gh_session, url, relnote)
if __name__ == "__main__":
update_github_releases(*sys.argv[1:]) # pylint: disable=no-value-for-parameter
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('baz')
def test_hostname(host):
assert 'instance-2' == host.check_output('hostname -s')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/instance-2')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import arrow
from sqlalchemy import func
from lemur import database
from lemur.common.utils import truthiness
from lemur.endpoints.models import Endpoint, Policy, Cipher
from lemur.extensions import metrics
def get_all():
"""
Get all endpoints that are currently in Lemur.
:rtype : List
:return:
"""
query = database.session_query(Endpoint)
return database.find_all(query, Endpoint, {}).all()
def get(endpoint_id):
"""
Retrieves an endpoint given it's ID
:param endpoint_id:
:return:
"""
return database.get(Endpoint, endpoint_id)
def get_by_name(name):
"""
Retrieves an endpoint given it's name.
:param name:
:return:
"""
return database.get(Endpoint, name, field="name")
def get_by_dnsname(dnsname):
"""
Retrieves an endpoint given it's name.
:param dnsname:
:return:
"""
return database.get(Endpoint, dnsname, field="dnsname")
def get_by_dnsname_and_port(dnsname, port):
"""
Retrieves and endpoint by it's dnsname and port.
:param dnsname:
:param port:
:return:
"""
return (
Endpoint.query.filter(Endpoint.dnsname == dnsname)
.filter(Endpoint.port == port)
.scalar()
)
def get_by_source(source_label):
"""
Retrieves all endpoints for a given source.
:param source_label:
:return:
"""
return Endpoint.query.filter(Endpoint.source.label == source_label).all() # noqa
def get_all_pending_rotation():
"""
Retrieves all endpoints which have certificates deployed
that have been replaced.
:return:
"""
return Endpoint.query.filter(Endpoint.replaced.any()).all()
def create(**kwargs):
"""
Creates a new endpoint.
:param kwargs:
:return:
"""
endpoint = Endpoint(**kwargs)
database.create(endpoint)
metrics.send(
"endpoint_added", "counter", 1, metric_tags={"source": endpoint.source.label}
)
return endpoint
def get_or_create_policy(**kwargs):
policy = database.get(Policy, kwargs["name"], field="name")
if not policy:
policy = Policy(**kwargs)
database.create(policy)
return policy
def get_or_create_cipher(**kwargs):
cipher = database.get(Cipher, kwargs["name"], field="name")
if not cipher:
cipher = Cipher(**kwargs)
database.create(cipher)
return cipher
def update(endpoint_id, **kwargs):
endpoint = database.get(Endpoint, endpoint_id)
endpoint.policy = kwargs["policy"]
endpoint.certificate = kwargs["certificate"]
endpoint.source = kwargs["source"]
endpoint.last_updated = arrow.utcnow()
metrics.send(
"endpoint_updated", "counter", 1, metric_tags={"source": endpoint.source.label}
)
database.update(endpoint)
return endpoint
def render(args):
"""
Helper that helps us render the REST Api responses.
:param args:
:return:
"""
query = database.session_query(Endpoint)
filt = args.pop("filter")
if filt:
terms = filt.split(";")
if "active" in filt: # this is really weird but strcmp seems to not work here??
query = query.filter(Endpoint.active == truthiness(terms[1]))
elif "port" in filt:
if terms[1] != "null": # ng-table adds 'null' if a number is removed
query = query.filter(Endpoint.port == terms[1])
elif "ciphers" in filt:
query = query.filter(Cipher.name == terms[1])
else:
query = database.filter(query, Endpoint, terms)
return database.sort_and_page(query, Endpoint, args)
def stats(**kwargs):
"""
Helper that defines some useful statistics about endpoints.
:param kwargs:
:return:
"""
attr = getattr(Endpoint, kwargs.get("metric"))
query = database.db.session.query(attr, func.count(attr))
items = query.group_by(attr).all()
keys = []
values = []
for key, count in items:
keys.append(key)
values.append(count)
return {"labels": keys, "values": values}
|
import os
import os.path
import sys
import shutil
import contextlib
import enum
import argparse
from typing import Iterator, Optional
from PyQt5.QtCore import QStandardPaths
from PyQt5.QtWidgets import QApplication
from qutebrowser.utils import log, debug, message, utils
# The cached locations
_locations = {}
class _Location(enum.Enum):
"""A key for _locations."""
config = enum.auto()
auto_config = enum.auto()
data = enum.auto()
system_data = enum.auto()
cache = enum.auto()
download = enum.auto()
runtime = enum.auto()
config_py = enum.auto()
APPNAME = 'qutebrowser'
class EmptyValueError(Exception):
"""Error raised when QStandardPaths returns an empty value."""
@contextlib.contextmanager
def _unset_organization() -> Iterator[None]:
"""Temporarily unset QApplication.organizationName().
This is primarily needed in config.py.
"""
qapp = QApplication.instance()
if qapp is not None:
orgname = qapp.organizationName()
qapp.setOrganizationName(None) # type: ignore[arg-type]
try:
yield
finally:
if qapp is not None:
qapp.setOrganizationName(orgname)
def _init_config(args: Optional[argparse.Namespace]) -> None:
"""Initialize the location for configs."""
typ = QStandardPaths.ConfigLocation
path = _from_args(typ, args)
if path is None:
if utils.is_windows:
app_data_path = _writable_location(
QStandardPaths.AppDataLocation)
path = os.path.join(app_data_path, 'config')
else:
path = _writable_location(typ)
_create(path)
_locations[_Location.config] = path
_locations[_Location.auto_config] = path
# Override the normal (non-auto) config on macOS
if utils.is_mac:
path = _from_args(typ, args)
if path is None: # pragma: no branch
path = os.path.expanduser('~/.' + APPNAME)
_create(path)
_locations[_Location.config] = path
config_py_file = os.path.join(_locations[_Location.config], 'config.py')
if getattr(args, 'config_py', None) is not None:
assert args is not None
config_py_file = os.path.abspath(args.config_py)
_locations[_Location.config_py] = config_py_file
def config(auto: bool = False) -> str:
"""Get the location for the config directory.
If auto=True is given, get the location for the autoconfig.yml directory,
which is different on macOS.
"""
if auto:
return _locations[_Location.auto_config]
return _locations[_Location.config]
def config_py() -> str:
"""Get the location for config.py.
Usually, config.py is in standarddir.config(), but this can be overridden
with the --config-py argument.
"""
return _locations[_Location.config_py]
def _init_data(args: Optional[argparse.Namespace]) -> None:
"""Initialize the location for data."""
typ = QStandardPaths.DataLocation
path = _from_args(typ, args)
if path is None:
if utils.is_windows:
app_data_path = _writable_location(QStandardPaths.AppDataLocation)
path = os.path.join(app_data_path, 'data')
elif sys.platform.startswith('haiku'):
# HaikuOS returns an empty value for AppDataLocation
config_path = _writable_location(QStandardPaths.ConfigLocation)
path = os.path.join(config_path, 'data')
else:
path = _writable_location(typ)
_create(path)
_locations[_Location.data] = path
# system_data
_locations.pop(_Location.system_data, None) # Remove old state
if utils.is_linux:
path = '/usr/share/' + APPNAME
if os.path.exists(path):
_locations[_Location.system_data] = path
def data(system: bool = False) -> str:
"""Get the data directory.
If system=True is given, gets the system-wide (probably non-writable) data
directory.
"""
if system:
try:
return _locations[_Location.system_data]
except KeyError:
pass
return _locations[_Location.data]
def _init_cache(args: Optional[argparse.Namespace]) -> None:
"""Initialize the location for the cache."""
typ = QStandardPaths.CacheLocation
path = _from_args(typ, args)
if path is None:
if utils.is_windows:
# Local, not Roaming!
data_path = _writable_location(QStandardPaths.DataLocation)
path = os.path.join(data_path, 'cache')
else:
path = _writable_location(typ)
_create(path)
_locations[_Location.cache] = path
def cache() -> str:
return _locations[_Location.cache]
def _init_download(args: Optional[argparse.Namespace]) -> None:
"""Initialize the location for downloads.
Note this is only the default directory as found by Qt.
Therefore, we also don't create it.
"""
typ = QStandardPaths.DownloadLocation
path = _from_args(typ, args)
if path is None:
path = _writable_location(typ)
_locations[_Location.download] = path
def download() -> str:
return _locations[_Location.download]
def _init_runtime(args: Optional[argparse.Namespace]) -> None:
"""Initialize location for runtime data."""
if utils.is_mac or utils.is_windows:
# RuntimeLocation is a weird path on macOS and Windows.
typ = QStandardPaths.TempLocation
else:
typ = QStandardPaths.RuntimeLocation
path = _from_args(typ, args)
if path is None:
try:
path = _writable_location(typ)
except EmptyValueError:
# Fall back to TempLocation when RuntimeLocation is misconfigured
if typ == QStandardPaths.TempLocation:
raise
path = _writable_location( # pragma: no cover
QStandardPaths.TempLocation)
# This is generic, but per-user.
# _writable_location makes sure we have a qutebrowser-specific subdir.
#
# For TempLocation:
# "The returned value might be application-specific, shared among
# other applications for this user, or even system-wide."
#
# Unfortunately this path could get too long for sockets (which have a
# maximum length of 104 chars), so we don't add the username here...
_create(path)
_locations[_Location.runtime] = path
def runtime() -> str:
return _locations[_Location.runtime]
def _writable_location(typ: QStandardPaths.StandardLocation) -> str:
"""Wrapper around QStandardPaths.writableLocation.
Arguments:
typ: A QStandardPaths::StandardLocation member.
"""
typ_str = debug.qenum_key(QStandardPaths, typ)
# Types we are sure we handle correctly below.
assert typ in [
QStandardPaths.ConfigLocation, QStandardPaths.DataLocation,
QStandardPaths.CacheLocation, QStandardPaths.DownloadLocation,
QStandardPaths.RuntimeLocation, QStandardPaths.TempLocation,
QStandardPaths.AppDataLocation], typ_str
with _unset_organization():
path = QStandardPaths.writableLocation(typ)
log.misc.debug("writable location for {}: {}".format(typ_str, path))
if not path:
raise EmptyValueError("QStandardPaths returned an empty value!")
# Qt seems to use '/' as path separator even on Windows...
path = path.replace('/', os.sep)
# Add the application name to the given path if needed.
# This is in order for this to work without a QApplication (and thus
# QStandardsPaths not knowing the application name).
if (typ != QStandardPaths.DownloadLocation and
path.split(os.sep)[-1] != APPNAME):
path = os.path.join(path, APPNAME)
return path
def _from_args(
typ: QStandardPaths.StandardLocation,
args: Optional[argparse.Namespace]
) -> Optional[str]:
"""Get the standard directory from an argparse namespace.
Return:
The overridden path, or None if there is no override.
"""
basedir_suffix = {
QStandardPaths.ConfigLocation: 'config',
QStandardPaths.DataLocation: 'data',
QStandardPaths.CacheLocation: 'cache',
QStandardPaths.DownloadLocation: 'download',
QStandardPaths.RuntimeLocation: 'runtime',
}
if getattr(args, 'basedir', None) is None:
return None
assert args is not None
try:
suffix = basedir_suffix[typ]
except KeyError: # pragma: no cover
return None
return os.path.abspath(os.path.join(args.basedir, suffix))
def _create(path: str) -> None:
"""Create the `path` directory.
From the XDG basedir spec:
If, when attempting to write a file, the destination directory is
non-existent an attempt should be made to create it with permission
0700. If the destination directory exists already the permissions
should not be changed.
"""
if APPNAME == 'qute_test' and path.startswith('/home'): # pragma: no cover
raise Exception("Trying to create directory inside /home during "
"tests, this should not happen.")
os.makedirs(path, 0o700, exist_ok=True)
def _init_dirs(args: argparse.Namespace = None) -> None:
"""Create and cache standard directory locations.
Mainly in a separate function because we need to call it in tests.
"""
_init_config(args)
_init_data(args)
_init_cache(args)
_init_download(args)
_init_runtime(args)
def init(args: Optional[argparse.Namespace]) -> None:
"""Initialize all standard dirs."""
if args is not None:
# args can be None during tests
log.init.debug("Base directory: {}".format(args.basedir))
_init_dirs(args)
_init_cachedir_tag()
if args is not None and getattr(args, 'basedir', None) is None:
if utils.is_mac: # pragma: no cover
_move_macos()
elif utils.is_windows: # pragma: no cover
_move_windows()
def _move_macos() -> None:
"""Move most config files to new location on macOS."""
old_config = config(auto=True) # ~/Library/Preferences/qutebrowser
new_config = config() # ~/.qutebrowser
for f in os.listdir(old_config):
if f not in ['qsettings', 'autoconfig.yml']:
_move_data(os.path.join(old_config, f),
os.path.join(new_config, f))
def _move_windows() -> None:
"""Move the whole qutebrowser directory from Local to Roaming AppData."""
# %APPDATA%\Local\qutebrowser
old_appdata_dir = _writable_location(QStandardPaths.DataLocation)
# %APPDATA%\Roaming\qutebrowser
new_appdata_dir = _writable_location(QStandardPaths.AppDataLocation)
# data subfolder
old_data = os.path.join(old_appdata_dir, 'data')
new_data = os.path.join(new_appdata_dir, 'data')
ok = _move_data(old_data, new_data)
if not ok: # pragma: no cover
return
# config files
new_config_dir = os.path.join(new_appdata_dir, 'config')
_create(new_config_dir)
for f in os.listdir(old_appdata_dir):
if f != 'cache':
_move_data(os.path.join(old_appdata_dir, f),
os.path.join(new_config_dir, f))
def _init_cachedir_tag() -> None:
"""Create CACHEDIR.TAG if it doesn't exist.
See http://www.brynosaurus.com/cachedir/spec.html
"""
cachedir_tag = os.path.join(cache(), 'CACHEDIR.TAG')
if not os.path.exists(cachedir_tag):
try:
with open(cachedir_tag, 'w', encoding='utf-8') as f:
f.write("Signature: 8a477f597d28d172789f06886806bc55\n")
f.write("# This file is a cache directory tag created by "
"qutebrowser.\n")
f.write("# For information about cache directory tags, see:\n")
f.write("# http://www.brynosaurus.com/"
"cachedir/\n")
except OSError:
log.init.exception("Failed to create CACHEDIR.TAG")
def _move_data(old: str, new: str) -> bool:
"""Migrate data from an old to a new directory.
If the old directory does not exist, the migration is skipped.
If the new directory already exists, an error is shown.
Return: True if moving succeeded, False otherwise.
"""
if not os.path.exists(old):
return False
log.init.debug("Migrating data from {} to {}".format(old, new))
if os.path.exists(new):
if not os.path.isdir(new) or os.listdir(new):
message.error("Failed to move data from {} as {} is non-empty!"
.format(old, new))
return False
os.rmdir(new)
try:
shutil.move(old, new)
except OSError as e:
message.error("Failed to move data from {} to {}: {}".format(
old, new, e))
return False
return True
|
import numpy as np
from tensornetwork.block_sparse.utils import intersect, unique
from typing import (List, Optional, Type, Any, Union, Callable)
#TODO (mganahl): clean up implementation of identity charges
class BaseCharge:
"""
Base class for charges of BlockSparseTensor. All user defined charges
should be derived from this class.
Attributes:
* unique_charges: np.ndarray of shape `(m,n)` with `m`
the number of charge types, and `n` the number of unique charges.
* charge_labels: np.ndarray of dtype np.int16. Used for identifying
charges with integer labels. `unique_charges[:, charge_labels]
is the np.ndarray of actual charges.
* charge_types: A list of `type` objects. Stored the different charge types,
on for each row in `unique_charges`.
"""
class Iterator:
def __init__(self, charges: np.ndarray):
self.n = 0
self.charges = charges
def __next__(self):
if self.n < self.charges.shape[0]:
out = self.charges[self.n, :]
self.n += 1
return out
raise StopIteration
def __init__(self,
charges: Union[List, np.ndarray],
charge_labels: Optional[np.ndarray] = None,
charge_types: Optional[List[Type["BaseCharge"]]] = None,
charge_dtype: Optional[Type[np.number]] = np.int16) -> None:
charges = np.asarray(charges)
if charges.ndim == 1:
charges = charges[:, None]
if (charge_types is not None) and (len(charge_types) != charges.shape[1]):
raise ValueError(
"`len(charge_types) = {}` does not match `charges.shape[1]={}`"
.format(len(charge_types), charges.shape[1]))
self.num_symmetries = charges.shape[1]
if charges.shape[1] < 3:
self.label_dtype = np.int16
else:
self.label_dtype = np.int32
if charge_types is None:
charge_types = [type(self)] * self.num_symmetries
self.charge_types = charge_types
if charge_labels is None:
self._unique_charges = None
self._charge_labels = None
self._charges = charges.astype(charge_dtype)
else:
self._charge_labels = np.asarray(charge_labels, dtype=self.label_dtype)
self._unique_charges = charges.astype(charge_dtype)
self._charges = None
@property
def unique_charges(self):
if self._unique_charges is None:
self._unique_charges, self._charge_labels = unique(
self.charges, return_inverse=True)
self._charges = None
return self._unique_charges
@property
def charge_labels(self):
if self._charge_labels is None:
self._unique_charges, self._charge_labels = unique(
self.charges, return_inverse=True)
self._charges = None
return self._charge_labels
@property
def charges(self):
if self._charges is not None:
return self._charges
return self._unique_charges[self._charge_labels, :]
@staticmethod
def fuse(charge1, charge2):
raise NotImplementedError("`fuse` has to be implemented in derived classes")
@staticmethod
def dual_charges(charges):
raise NotImplementedError(
"`dual_charges` has to be implemented in derived classes")
@staticmethod
def identity_charge():
raise NotImplementedError(
"`identity_charge` has to be implemented in derived classes")
@classmethod
def random(cls, dimension: int, minval: int, maxval: int):
raise NotImplementedError(
"`random` has to be implemented in derived classes")
@property
def dim(self):
if self._charge_labels is not None:
return len(self._charge_labels)
return self._charges.shape[0]
@property
def num_unique(self) -> int:
"""
Return the number of different charges in `ChargeCollection`.
"""
return self.unique_charges.shape[0]
def copy(self):
"""
Return a copy of `BaseCharge`.
"""
if self._unique_charges is not None:
charges = self._unique_charges.copy()
labels = self._charge_labels.copy()
else:
charges = self._charges.copy()
labels = None
obj = self.__new__(type(self))
obj.__init__(
charges=charges,
charge_labels=labels,
charge_types=self.charge_types,
charge_dtype=self.dtype)
return obj
@property
def dtype(self):
if self._unique_charges is not None:
return self._unique_charges.dtype
return self._charges.dtype
def __repr__(self):
return 'BaseCharge object:' + '\n charge types: ' + self.names + \
'\n unique charges:' + str(self.charges.T).replace('\n', '\n\t\t ')\
+ '\n'
def __iter__(self):
return self.Iterator(self.charges)
def __len__(self):
if self._charges is not None:
return self.charges.shape[0]
return self._charge_labels.shape[0]
def __eq__(self, target_charges: Union[np.ndarray,
"BaseCharge"]) -> np.ndarray:
if isinstance(target_charges, type(self)):
if len(target_charges) == 0:
raise ValueError('input to __eq__ cannot be an empty charge')
targets = target_charges.charges
else:
if target_charges.ndim == 1:
target_charges = target_charges[:, None]
if target_charges.shape[0] == 0:
raise ValueError('input to __eq__ cannot be an empty np.ndarray')
if target_charges.shape[1] != self.num_symmetries:
raise ValueError("shape of `target_charges = {}` is incompatible with "
"`self.num_symmetries = {}".format(
target_charges.shape, self.num_symmetries))
targets = target_charges
return np.logical_and.reduce(
self.charges[:, :, None] == targets.T[None, :, :], axis=1)
def identity_charges(self, dim: int = 1) -> "BaseCharge":
"""
Returns the identity charge.
Returns:
BaseCharge: The identity charge.
"""
charges = np.concatenate(
[
np.asarray([ct.identity_charge() for ct in self.charge_types],
dtype=self.dtype)[None, :]
] * dim,
axis=0)
obj = self.__new__(type(self))
obj.__init__(
charges=charges, charge_labels=None, charge_types=self.charge_types)
return obj
def __add__(self, other: "BaseCharge") -> "BaseCharge":
"""
Fuse `self` with `other`.
Args:
other: A `BaseCharge` object.
Returns:
BaseCharge: The result of fusing `self` with `other`.
"""
# fuse the unique charges from each index, then compute new unique charges
fused_charges = fuse_ndarray_charges(self.charges, other.charges,
self.charge_types)
obj = self.__new__(type(self))
obj.__init__(fused_charges, charge_types=self.charge_types)
return obj
def dual(self, take_dual: Optional[bool] = False) -> "BaseCharge":
"""
Return the charges of `BaseCharge`, possibly conjugated.
Args:
take_dual: If `True` return the dual charges. If `False` return
regular charges.
Returns:
BaseCharge
"""
if take_dual:
if self._unique_charges is not None:
unique_dual_charges = np.stack([
self.charge_types[n].dual_charges(self._unique_charges[:, n])
for n in range(len(self.charge_types))
],
axis=1)
obj = self.__new__(type(self))
obj.__init__(
unique_dual_charges,
charge_labels=self.charge_labels,
charge_types=self.charge_types)
return obj
dual_charges = np.stack([
self.charge_types[n].dual_charges(self._charges[:, n])
for n in range(len(self.charge_types))
],
axis=1)
obj = self.__new__(type(self))
obj.__init__(
dual_charges, charge_labels=None, charge_types=self.charge_types)
return obj
return self
def __matmul__(self, other):
#some checks
if len(self) != len(other):
raise ValueError(
'__matmul__ requires charges to have the same number of elements')
charges = np.concatenate([self.charges, other.charges], axis=1)
charge_types = self.charge_types + other.charge_types
return BaseCharge(
charges=charges, charge_labels=None, charge_types=charge_types)
def __mul__(self, number: bool) -> "BaseCharge":
if not isinstance(number, (bool, np.bool_)):
raise ValueError(
"can only multiply by `True` or `False`, found {}".format(number))
return self.dual(number)
def intersect(self, other, assume_unique=False, return_indices=False) -> Any:
"""
Compute the intersection of `self` with `other`. See also np.intersect1d.
Args:
other: A BaseCharge object.
assume_unique: If `True` assume that elements are unique.
return_indices: If `True`, return index-labels.
Returns:
If `return_indices=True`:
BaseCharge
np.ndarray: The indices of the first occurrences of the
common values in `self`.
np.ndarray: The indices of the first occurrences of the
common values in `other`.
If `return_indices=False`:
BaseCharge
"""
if isinstance(other, type(self)):
out = intersect(
self.charges,
other.charges,
assume_unique=assume_unique,
axis=0,
return_indices=return_indices)
else:
if other.ndim == 1:
other = other[:, None]
out = intersect(
self.charges,
np.asarray(other),
axis=0,
assume_unique=assume_unique,
return_indices=return_indices)
obj = self.__new__(type(self))
if return_indices:
obj.__init__(
charges=out[0],
charge_labels=np.arange(out[0].shape[0], dtype=self.label_dtype),
charge_types=self.charge_types,
)
return obj, out[1], out[2]
obj.__init__(
charges=out,
charge_labels=np.arange(out.shape[0], dtype=self.label_dtype),
charge_types=self.charge_types,
)
return obj
def unique(self, #pylint: disable=inconsistent-return-statements
return_index: bool = False,
return_inverse: bool = False,
return_counts: bool = False) -> Any:
"""
Compute the unique charges in `BaseCharge`.
See unique for a more detailed explanation. This function
does the same but instead of a np.ndarray, it returns the unique
elements (not neccessarily sorted in standard order) in a `BaseCharge`
object.
Args:
return_index: If `True`, also return the indices of `self.charges`
(along the specified axis,
if provided, or in the flattened array) that result in the unique array.
return_inverse: If `True`, also return the indices of the unique array
(for the specified
axis, if provided) that can be used to reconstruct `self.charges`.
return_counts: If `True`, also return the number of times each unique
item appears in `self.charges`.
Returns:
BaseCharge: The sorted unique values.
np.ndarray: The indices of the first occurrences of the unique values
in the original array. Only provided if `return_index` is True.
np.ndarray: The indices to reconstruct the original array from the
unique array. Only provided if `return_inverse` is True.
np.ndarray: The number of times each of the unique values comes up in the
original array. Only provided if `return_counts` is True.
"""
obj = self.__new__(type(self))
if self._charges is not None:
tmp = unique(
self._charges,
return_index=return_index,
return_inverse=return_inverse,
return_counts=return_counts)
if any([return_index, return_inverse, return_counts]):
unique_charges = tmp[0]
obj.__init__(
charges=unique_charges,
charge_labels=np.arange(
unique_charges.shape[0], dtype=self.label_dtype),
charge_types=self.charge_types)
tmp[0] = obj
else:
obj.__init__(
charges=tmp,
charge_labels=np.arange(tmp.shape[0], dtype=self.label_dtype),
charge_types=self.charge_types)
tmp = obj
return tmp
if self._unique_charges is not None:
if not return_index:
obj.__init__(
charges=self._unique_charges,
charge_labels=np.arange(
self._unique_charges.shape[0], dtype=self.label_dtype),
charge_types=self.charge_types)
out = [obj]
if return_inverse:
out.append(self._charge_labels)
if return_counts:
_, cnts = unique(self._charge_labels, return_counts=True)
out.append(cnts)
if len(out) > 1:
return out
return out[0]
tmp = unique(
self._charge_labels,
return_index=return_index,
return_inverse=return_inverse,
return_counts=return_counts)
unique_charges = self._unique_charges[tmp[0], :]
obj.__init__(
charges=unique_charges,
charge_labels=np.arange(
unique_charges.shape[0], dtype=self.label_dtype),
charge_types=self.charge_types)
tmp[0] = obj
return tmp
def reduce(self,
target_charges: Union[int, np.ndarray],
return_locations: bool = False,
strides: Optional[int] = 1) -> Any:
"""
Reduce the dimension of a
charge to keep only the charge values that intersect target_charges
Args:
target_charges: array of unique charges to keep.
return_locations: If `True`, also return the locations of
target values within `BaseCharge`.
strides: An optional stride value.
Returns:
BaseCharge: charge of reduced dimension.
np.ndarray: If `return_locations = True`; the index locations
of target values.
"""
if isinstance(target_charges, (np.integer, int)):
target_charges = np.asarray([target_charges], dtype=self.dtype)
if target_charges.ndim == 1:
target_charges = target_charges[:, None]
target_charges = np.asarray(target_charges, dtype=self.dtype)
# find intersection of index charges and target charges
reduced_charges, label_to_unique, _ = intersect(
self.unique_charges, target_charges, axis=0, return_indices=True)
num_unique = len(label_to_unique)
# construct the map to the reduced charges
map_to_reduced = np.full(self.dim, fill_value=-1, dtype=self.label_dtype)
map_to_reduced[label_to_unique] = np.arange(
num_unique, dtype=self.label_dtype)
# construct the map to the reduced charges
reduced_ind_labels = map_to_reduced[self.charge_labels]
reduced_locs = reduced_ind_labels >= 0
new_ind_labels = reduced_ind_labels[reduced_locs].astype(self.label_dtype)
obj = self.__new__(type(self))
obj.__init__(reduced_charges, new_ind_labels, self.charge_types)
if return_locations:
return obj, strides * np.flatnonzero(reduced_locs).astype(np.uint32)
return obj
def __getitem__(self, n: Union[List[int], np.ndarray, int]) -> "BaseCharge":
"""
Return the charge-element at position `n`, wrapped into a `BaseCharge`
object.
Args:
n: An integer or `np.ndarray`.
Returns:
BaseCharge: The charges at `n`.
"""
if isinstance(n, (np.integer, int)):
n = np.asarray([n])
n = np.asarray(n)
obj = self.__new__(type(self))
if self._unique_charges is not None:
labels = self.charge_labels[n]
unique_labels, new_labels = unique(labels, return_inverse=True)
unique_charges = self.unique_charges[unique_labels, :]
obj.__init__(unique_charges, new_labels, self.charge_types)
return obj
obj.__init__(
self._charges[n, :], charge_labels=None, charge_types=self.charge_types)
return obj
@property
def names(self):
return repr([ct.__new__(ct).__class__.__name__ for ct in self.charge_types])
class U1Charge(BaseCharge):
"""Charge Class for the U1 symmetry group."""
@staticmethod
def fuse(charge1: np.ndarray, charge2: np.ndarray) -> np.ndarray:
return np.add.outer(charge1, charge2).ravel()
@staticmethod
def dual_charges(charges: np.ndarray) -> np.ndarray:
return charges * charges.dtype.type(-1)
@staticmethod
def identity_charge() -> np.ndarray:
return np.int16(0)
@classmethod
def random(cls, dimension: int, minval: int, maxval: int) -> BaseCharge:
charges = np.random.randint(minval, maxval + 1, dimension, dtype=np.int16)
return cls(charges=charges)
class Z2Charge(BaseCharge):
"""Charge Class for the Z2 symmetry group."""
def __init__(self,
charges: Union[List, np.ndarray],
charge_labels: Optional[np.ndarray] = None,
charge_types: Optional[List[Type["BaseCharge"]]] = None,
charge_dtype: Optional[Type[np.number]] = np.int16) -> None:
#do some checks before calling the base class constructor
unique_charges = unique(np.ravel(charges))
if not np.all(np.isin(unique_charges, [0, 1])):
raise ValueError("Z2 charges can only be 0 or 1, found {}".format(unique))
super().__init__(
charges,
charge_labels,
charge_types=[type(self)],
charge_dtype=charge_dtype)
@staticmethod
def fuse(charge1: np.ndarray, charge2: np.ndarray) -> np.ndarray:
#pylint: disable=no-member
return np.bitwise_xor.outer(charge1, charge2).ravel()
@staticmethod
def dual_charges(charges: np.ndarray) -> np.ndarray:
return charges
@staticmethod
def identity_charge() -> np.ndarray:
return np.int16(0)
@classmethod
def random(cls,
dimension: int,
minval: int = 0,
maxval: int = 1) -> BaseCharge:
if minval != 0 or maxval != 1:
raise ValueError("Z2 charges can only take values 0 or 1")
charges = np.random.randint(0, 2, dimension, dtype=np.int16)
return cls(charges=charges)
def ZNCharge(n: int) -> Callable:
"""Contstructor for charge classes of the ZN symmetry groups.
Args:
n: The module of the symmetry group.
Returns:
A charge class of your given ZN symmetry group.
"""
if n < 2:
raise ValueError(f"n must be >= 2, found {n}")
class ModularCharge(BaseCharge):
def __init__(self,
charges: Union[List, np.ndarray],
charge_labels: Optional[np.ndarray] = None,
charge_types: Optional[List[Type["BaseCharge"]]] = None,
charge_dtype: Optional[Type[np.number]] = np.int16) -> None:
unique_charges = unique(np.ravel(charges))
if not np.all(np.isin(unique_charges, list(range(n)))):
raise ValueError(f"Z{n} charges must be in range({n}), found: {unique}")
super().__init__(
charges,
charge_labels,
charge_types=[type(self)],
charge_dtype=charge_dtype)
@staticmethod
def fuse(charge1: np.ndarray, charge2: np.ndarray) -> np.ndarray:
return np.add.outer(charge1, charge2).ravel() % n
@staticmethod
def dual_charges(charges: np.ndarray) -> np.ndarray:
return (n - charges) % n
@staticmethod
def identity_charge() -> np.ndarray:
return np.int16(0)
@classmethod
def random(cls,
dimension: int,
minval: int = 0,
maxval: int = n - 1) -> BaseCharge:
if maxval >= n:
raise ValueError(f"maxval must be less than n={n}, got {maxval}")
if minval < 0:
raise ValueError(f"minval must be greater than 0, found {minval}")
# No need for the mod due to the checks above.
charges = np.random.randint(minval, maxval + 1, dimension, dtype=np.int16)
return cls(charges=charges)
return ModularCharge
def fuse_ndarray_charges(charges_A: np.ndarray, charges_B: np.ndarray,
charge_types: List[Type[BaseCharge]]) -> np.ndarray:
"""
Fuse the quantum numbers of two indices under their kronecker addition.
Args:
charges_A (np.ndarray): n-by-D1 dimensional array integers encoding charges,
with n the number of symmetries and D1 the index dimension.
charges__B (np.ndarray): n-by-D2 dimensional array of charges.
charge_types: A list of types of the charges.
Returns:
np.ndarray: n-by-(D1 * D2) dimensional array of the fused charges.
"""
comb_charges = [0] * len(charge_types)
for n, ct in enumerate(charge_types):
comb_charges[n] = ct.fuse(charges_A[:, n], charges_B[:, n])[:, None]
return np.concatenate(comb_charges, axis=1)
def fuse_charges(charges: List[BaseCharge], flows: List[bool]) -> BaseCharge:
"""
Fuse all `charges` into a new charge.
Charges are fused from "right to left",
in accordance with row-major order.
Args:
charges: A list of charges to be fused.
flows: A list of flows, one for each element in `charges`.
Returns:
BaseCharge: The result of fusing `charges`.
"""
if len(charges) != len(flows):
raise ValueError(
"`charges` and `flows` are of unequal lengths {} != {}".format(
len(charges), len(flows)))
fused_charges = charges[0] * flows[0]
for n in range(1, len(charges)):
fused_charges = fused_charges + charges[n] * flows[n]
return fused_charges
def charge_equal(c1: BaseCharge, c2: BaseCharge) -> bool:
"""
Compare two BaseCharges `c1` and `c2`.
Return `True` if they are equal, else `False`.
"""
res = True
if c1.dim != c2.dim:
return False
res = True
if c1._unique_charges is not None and c2._unique_charges is not None:
if c1._unique_charges.shape != c2._unique_charges.shape:
res = False
elif not np.all(c1._unique_charges == c2._unique_charges):
res = False
elif not np.all(c1.charge_labels == c2.charge_labels):
res = False
return res
if c1._charges is not None and c2._charges is not None:
if c1._charges.shape != c2._charges.shape:
res = False
elif not np.all(c1._charges == c2._charges):
res = False
return res
if c1.charges.shape != c2.charges.shape:
res = False
elif not np.all(c1.charges == c2.charges):
res = False
return res
|
import pytest
from mock import create_autospec, sentinel
from pymongo.database import Database
from pymongo.errors import PyMongoError, OperationFailure
from arctic import auth
def test_authenticate():
db = create_autospec(Database)
db.authenticate.return_value = sentinel.ret
assert auth.authenticate(db, sentinel.user, sentinel.password) == sentinel.ret
def test_authenticate_fails():
db = create_autospec(Database)
error = "command SON([('saslStart', 1), ('mechanism', 'SCRAM-SHA-1'), ('payload', Binary('n,,n=foo,r=OTI3MzA3MTEzMTIx', 0)), ('autoAuthorize', 1)]) on namespace admin.$cmd failed: Authentication failed."
db.authenticate.side_effect = OperationFailure(error)
assert auth.authenticate(db, sentinel.user, sentinel.password) is False
def test_authenticate_fails_exception():
db = create_autospec(Database)
db.authenticate.side_effect = PyMongoError("error")
with pytest.raises(PyMongoError):
assert auth.authenticate(db, sentinel.user, sentinel.password) is False
|
import unittest
import numpy as np
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.experimental.links.model.fcis import ProposalTargetCreator
from chainercv.utils import generate_random_bbox
from chainercv.utils import mask_to_bbox
class TestProposalTargetCreator(unittest.TestCase):
n_sample = 128
n_class = 21
pos_ratio = 0.25
mask_size = 21
def setUp(self):
n_roi = 1024
n_mask = 10
img_size = (392, 512)
self.roi = generate_random_bbox(n_roi, img_size, 16, 250)
self.mask = np.random.uniform(
size=(n_mask, img_size[0], img_size[1])) > 0.5
self.label = np.random.randint(
0, self.n_class - 1, size=(n_mask,), dtype=np.int32)
self.proposal_target_creator = ProposalTargetCreator(
n_sample=self.n_sample,
pos_ratio=self.pos_ratio)
def check_proposal_target_creator(
self, roi, mask, label, proposal_target_creator):
xp = cuda.get_array_module(roi)
bbox = mask_to_bbox(mask)
sample_roi, gt_roi_mask, gt_roi_label, gt_roi_loc =\
proposal_target_creator(
roi, mask, label, bbox, mask_size=self.mask_size)
# Test types
self.assertIsInstance(sample_roi, xp.ndarray)
self.assertIsInstance(gt_roi_loc, xp.ndarray)
self.assertIsInstance(gt_roi_mask, xp.ndarray)
self.assertIsInstance(gt_roi_label, xp.ndarray)
sample_roi = cuda.to_cpu(sample_roi)
gt_roi_loc = cuda.to_cpu(gt_roi_loc)
gt_roi_mask = cuda.to_cpu(gt_roi_mask)
gt_roi_label = cuda.to_cpu(gt_roi_label)
# Test shapes
self.assertEqual(sample_roi.shape, (self.n_sample, 4))
self.assertEqual(gt_roi_loc.shape, (self.n_sample, 4))
self.assertEqual(
gt_roi_mask.shape, (self.n_sample, self.mask_size, self.mask_size))
self.assertEqual(gt_roi_label.shape, (self.n_sample,))
# Test foreground and background labels
np.testing.assert_equal(np.sum(gt_roi_label >= 0), self.n_sample)
n_pos = np.sum(gt_roi_label >= 1)
n_neg = np.sum(gt_roi_label == 0)
self.assertLessEqual(n_pos, self.n_sample * self.pos_ratio)
self.assertLessEqual(n_neg, self.n_sample - n_pos)
def test_proposal_target_creator_cpu(self):
self.check_proposal_target_creator(
self.roi, self.mask, self.label,
self.proposal_target_creator)
@attr.gpu
def test_proposal_target_creator_gpu(self):
self.check_proposal_target_creator(
cuda.to_gpu(self.roi),
cuda.to_gpu(self.mask),
cuda.to_gpu(self.label),
self.proposal_target_creator)
testing.run_module(__name__, __file__)
|
import unittest
from trashcli.put import TrashPutReporter
class TestTrashPutReporter(unittest.TestCase):
def test_it_should_record_failures(self):
reporter = TrashPutReporter(self)
assert False == reporter.some_file_has_not_be_trashed
reporter.unable_to_trash_file('a file')
assert True == reporter.some_file_has_not_be_trashed
assert 'cannot trash non existent \'a file\'' == self.warning_msg
def warning(self, msg):
self.warning_msg = msg
|
import unittest
import os.path
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.basemap import Basemap
class TestMatplotlib(unittest.TestCase):
def test_plot(self):
plt.plot(np.linspace(0,1,50), np.random.rand(50))
plt.savefig("plot1.png")
self.assertTrue(os.path.isfile("plot1.png"))
def test_basemap(self):
Basemap(width=100,height=100,projection='aeqd',
lat_0=40,lon_0=-105)
|
from cms.plugin_rendering import ContentRenderer
from cms.models.placeholdermodel import Placeholder
from cms.templatetags.cms_tags import RenderPlaceholder as DefaultRenderPlaceholder
from django import template
from django.contrib.auth.models import AnonymousUser
from django.http.request import HttpRequest
from django.utils.html import strip_tags
from sekizai.context_processors import sekizai
register = template.Library()
class EmulateHttpRequest(HttpRequest):
"""
Use this class to emulate a HttpRequest object.
"""
def __init__(self, language_code=None):
super().__init__()
self.environ = {}
self.method = 'GET'
if language_code:
self.LANGUAGE_CODE = language_code
self.user = AnonymousUser()
self.current_page = None
class RenderPlaceholder(DefaultRenderPlaceholder):
"""
Modified templatetag render_placeholder to be used for rendering the search index templates.
"""
def _get_value(self, context, editable=True, **kwargs):
renderer = ContentRenderer(context['request'])
placeholder = kwargs.get('placeholder')
if not placeholder:
return ''
if isinstance(placeholder, str):
placeholder = Placeholder.objects.get(slot=placeholder)
content = renderer.render_placeholder(
placeholder=placeholder,
context=context,
language=kwargs.get('language'),
editable=editable,
use_cache=False,
width=kwargs.get('width'),
)
return strip_tags(content).replace('\n', '').replace('\t', '')
def get_value(self, context, **kwargs):
context.update(sekizai())
try:
language_code = context['product']._current_language
except (KeyError, AttributeError):
language_code = None
context['request'] = EmulateHttpRequest(language_code)
return self._get_value(context, **kwargs)
register.tag('render_placeholder', RenderPlaceholder)
|
from datetime import timedelta
import logging
from typing import Optional
from aio_geojson_nsw_rfs_incidents import NswRuralFireServiceIncidentsFeedManager
import voluptuous as vol
from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LOCATION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
LENGTH_KILOMETERS,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
_LOGGER = logging.getLogger(__name__)
ATTR_CATEGORY = "category"
ATTR_COUNCIL_AREA = "council_area"
ATTR_EXTERNAL_ID = "external_id"
ATTR_FIRE = "fire"
ATTR_PUBLICATION_DATE = "publication_date"
ATTR_RESPONSIBLE_AGENCY = "responsible_agency"
ATTR_SIZE = "size"
ATTR_STATUS = "status"
ATTR_TYPE = "type"
CONF_CATEGORIES = "categories"
DEFAULT_RADIUS_IN_KM = 20.0
SCAN_INTERVAL = timedelta(minutes=5)
SIGNAL_DELETE_ENTITY = "nsw_rural_fire_service_feed_delete_{}"
SIGNAL_UPDATE_ENTITY = "nsw_rural_fire_service_feed_update_{}"
SOURCE = "nsw_rural_fire_service_feed"
VALID_CATEGORIES = ["Advice", "Emergency Warning", "Not Applicable", "Watch and Act"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_CATEGORIES, default=[]): vol.All(
cv.ensure_list, [vol.In(VALID_CATEGORIES)]
),
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS_IN_KM): vol.Coerce(float),
}
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up the NSW Rural Fire Service Feed platform."""
scan_interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
coordinates = (
config.get(CONF_LATITUDE, hass.config.latitude),
config.get(CONF_LONGITUDE, hass.config.longitude),
)
radius_in_km = config[CONF_RADIUS]
categories = config.get(CONF_CATEGORIES)
# Initialize the entity manager.
manager = NswRuralFireServiceFeedEntityManager(
hass, async_add_entities, scan_interval, coordinates, radius_in_km, categories
)
async def start_feed_manager(event):
"""Start feed manager."""
await manager.async_init()
async def stop_feed_manager(event):
"""Stop feed manager."""
await manager.async_stop()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_feed_manager)
hass.async_create_task(manager.async_update())
class NswRuralFireServiceFeedEntityManager:
"""Feed Entity Manager for NSW Rural Fire Service GeoJSON feed."""
def __init__(
self,
hass,
async_add_entities,
scan_interval,
coordinates,
radius_in_km,
categories,
):
"""Initialize the Feed Entity Manager."""
self._hass = hass
websession = aiohttp_client.async_get_clientsession(hass)
self._feed_manager = NswRuralFireServiceIncidentsFeedManager(
websession,
self._generate_entity,
self._update_entity,
self._remove_entity,
coordinates,
filter_radius=radius_in_km,
filter_categories=categories,
)
self._async_add_entities = async_add_entities
self._scan_interval = scan_interval
self._track_time_remove_callback = None
async def async_init(self):
"""Schedule initial and regular updates based on configured time interval."""
async def update(event_time):
"""Update."""
await self.async_update()
# Trigger updates at regular intervals.
self._track_time_remove_callback = async_track_time_interval(
self._hass, update, self._scan_interval
)
_LOGGER.debug("Feed entity manager initialized")
async def async_update(self):
"""Refresh data."""
await self._feed_manager.update()
_LOGGER.debug("Feed entity manager updated")
async def async_stop(self):
"""Stop this feed entity manager from refreshing."""
if self._track_time_remove_callback:
self._track_time_remove_callback()
_LOGGER.debug("Feed entity manager stopped")
def get_entry(self, external_id):
"""Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id)
async def _generate_entity(self, external_id):
"""Generate new entity."""
new_entity = NswRuralFireServiceLocationEvent(self, external_id)
# Add new entities to HA.
self._async_add_entities([new_entity], True)
async def _update_entity(self, external_id):
"""Update entity."""
async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY.format(external_id))
async def _remove_entity(self, external_id):
"""Remove entity."""
async_dispatcher_send(self._hass, SIGNAL_DELETE_ENTITY.format(external_id))
class NswRuralFireServiceLocationEvent(GeolocationEvent):
"""This represents an external event with NSW Rural Fire Service data."""
def __init__(self, feed_manager, external_id):
"""Initialize entity with data from feed entry."""
self._feed_manager = feed_manager
self._external_id = external_id
self._name = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._category = None
self._publication_date = None
self._location = None
self._council_area = None
self._status = None
self._type = None
self._fire = None
self._size = None
self._responsible_agency = None
self._remove_signal_delete = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_delete = async_dispatcher_connect(
self.hass,
SIGNAL_DELETE_ENTITY.format(self._external_id),
self._delete_callback,
)
self._remove_signal_update = async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_ENTITY.format(self._external_id),
self._update_callback,
)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed from hass."""
self._remove_signal_delete()
self._remove_signal_update()
@callback
def _delete_callback(self):
"""Remove this entity."""
self.hass.async_create_task(self.async_remove())
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for NSW Rural Fire Service location events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
if feed_entry:
self._update_from_feed(feed_entry)
def _update_from_feed(self, feed_entry):
"""Update the internal state from the provided feed entry."""
self._name = feed_entry.title
self._distance = feed_entry.distance_to_home
self._latitude = feed_entry.coordinates[0]
self._longitude = feed_entry.coordinates[1]
self._attribution = feed_entry.attribution
self._category = feed_entry.category
self._publication_date = feed_entry.publication_date
self._location = feed_entry.location
self._council_area = feed_entry.council_area
self._status = feed_entry.status
self._type = feed_entry.type
self._fire = feed_entry.fire
self._size = feed_entry.size
self._responsible_agency = feed_entry.responsible_agency
@property
def icon(self):
"""Return the icon to use in the frontend."""
if self._fire:
return "mdi:fire"
return "mdi:alarm-light"
@property
def source(self) -> str:
"""Return source value of this external event."""
return SOURCE
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return self._name
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return self._distance
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return self._latitude
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return self._longitude
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return LENGTH_KILOMETERS
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_CATEGORY, self._category),
(ATTR_LOCATION, self._location),
(ATTR_ATTRIBUTION, self._attribution),
(ATTR_PUBLICATION_DATE, self._publication_date),
(ATTR_COUNCIL_AREA, self._council_area),
(ATTR_STATUS, self._status),
(ATTR_TYPE, self._type),
(ATTR_FIRE, self._fire),
(ATTR_SIZE, self._size),
(ATTR_RESPONSIBLE_AGENCY, self._responsible_agency),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
|
import pkgutil
from scattertext.Common import SEMIOTIC_SQUARE_HTML_PATH
class ClickableTerms:
@staticmethod
def get_clickable_lexicon(lexicon, plot_interface='plotInterface'):
out = []
for term in lexicon:
clickable_term = ClickableTerms.get_clickable_term(term, plot_interface)
out.append(clickable_term)
return ',\n'.join(out)
@staticmethod
def get_clickable_term(term, plot_interface='plotInterface', other_plot_interface=None):
onclick_js = ClickableTerms._get_onclick_js(term.replace("'", "\\'"), plot_interface, other_plot_interface)
onmouseover_js = (
"{plot_interface}.showToolTipForTerm({plot_interface}.data, {plot_interface}.svg, '%s',"
% (term.replace("'", "\\'"))
+ "{plot_interface}.termDict['%s'])" % (term.replace("'", "\\'"))
)
onmouseout_js = "{plot_interface}.tooltip.transition().style('opacity', 0)"
template = ('<span onclick="' + onclick_js + '" onmouseover="' + onmouseover_js + '" onmouseout="' +
onmouseout_js + '">{term}</span>')
clickable_term = template.format(term=term, plot_interface=plot_interface)
return clickable_term
@staticmethod
def _get_onclick_js(term, plot_interface, other_plot_interface = None):
if other_plot_interface:
return "{other_plot_interface}.drawCategoryAssociation(" \
"{plot_interface}.termDict['{term}'].ci); return false;"\
.format(other_plot_interface=other_plot_interface, plot_interface=plot_interface,
term=term.replace("'", "\\'"))
return "{plot_interface}.displayTermContexts({plot_interface}.data, {plot_interface}.gatherTermContexts(" \
"{plot_interface}.termDict['%s']));" % (term.replace("'", "\'"))
def get_halo_td_style():
return '''
<style>
td {
border-collapse: collapse;
box-sizing: border-box;
color: rgb(0, 0, 0);
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 12px;
height: auto ;
line-height: normal;
text-align: right;
text-size-adjust:100% ;
-webkit-border-horizontal-spacing: 0px;
-webkit-border-vertical-spacing:0px;
}
</style>'''
class HTMLSemioticSquareViz(object):
def __init__(self, semiotic_square):
'''
Parameters
----------
semiotic_square : SemioticSquare
'''
self.semiotic_square_ = semiotic_square
def get_html(self, num_terms=10):
return self._get_style() + self._get_table(num_terms)
def _get_style(self):
return get_halo_td_style()
def _get_table(self, num_terms):
lexicons = self.semiotic_square_.get_lexicons(num_terms=num_terms)
template = self._get_template()
formatters = {category: self._lexicon_to_html(lexicon)
for category, lexicon in lexicons.items()}
formatters.update(self.semiotic_square_.get_labels())
for k, v in formatters.items():
template = template.replace('{' + k + '}', v)
return template
def _lexicon_to_html(self, lexicon):
return ClickableTerms.get_clickable_lexicon(lexicon)
def _get_template(self):
return pkgutil.get_data('scattertext', SEMIOTIC_SQUARE_HTML_PATH).decode('utf-8')
|
import pytest
from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
from homeassistant.components.zha.core.const import (
CONF_BAUDRATE,
CONF_RADIO_TYPE,
CONF_USB_PATH,
DOMAIN,
)
from homeassistant.const import MAJOR_VERSION, MINOR_VERSION
from homeassistant.setup import async_setup_component
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
DATA_RADIO_TYPE = "deconz"
DATA_PORT_PATH = "/dev/serial/by-id/FTDI_USB__-__Serial_Cable_12345678-if00-port0"
@pytest.fixture
def config_entry_v1(hass):
"""Config entry version 1 fixture."""
return MockConfigEntry(
domain=DOMAIN,
data={CONF_RADIO_TYPE: DATA_RADIO_TYPE, CONF_USB_PATH: DATA_PORT_PATH},
version=1,
)
@pytest.mark.parametrize("config", ({}, {DOMAIN: {}}))
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
async def test_migration_from_v1_no_baudrate(hass, config_entry_v1, config):
"""Test migration of config entry from v1."""
config_entry_v1.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, config)
assert config_entry_v1.data[CONF_RADIO_TYPE] == DATA_RADIO_TYPE
assert CONF_DEVICE in config_entry_v1.data
assert config_entry_v1.data[CONF_DEVICE][CONF_DEVICE_PATH] == DATA_PORT_PATH
assert CONF_BAUDRATE not in config_entry_v1.data[CONF_DEVICE]
assert CONF_USB_PATH not in config_entry_v1.data
assert config_entry_v1.version == 2
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
async def test_migration_from_v1_with_baudrate(hass, config_entry_v1):
"""Test migration of config entry from v1 with baudrate in config."""
config_entry_v1.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_BAUDRATE: 115200}})
assert config_entry_v1.data[CONF_RADIO_TYPE] == DATA_RADIO_TYPE
assert CONF_DEVICE in config_entry_v1.data
assert config_entry_v1.data[CONF_DEVICE][CONF_DEVICE_PATH] == DATA_PORT_PATH
assert CONF_USB_PATH not in config_entry_v1.data
assert CONF_BAUDRATE in config_entry_v1.data[CONF_DEVICE]
assert config_entry_v1.data[CONF_DEVICE][CONF_BAUDRATE] == 115200
assert config_entry_v1.version == 2
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
async def test_migration_from_v1_wrong_baudrate(hass, config_entry_v1):
"""Test migration of config entry from v1 with wrong baudrate."""
config_entry_v1.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_BAUDRATE: 115222}})
assert config_entry_v1.data[CONF_RADIO_TYPE] == DATA_RADIO_TYPE
assert CONF_DEVICE in config_entry_v1.data
assert config_entry_v1.data[CONF_DEVICE][CONF_DEVICE_PATH] == DATA_PORT_PATH
assert CONF_USB_PATH not in config_entry_v1.data
assert CONF_BAUDRATE not in config_entry_v1.data[CONF_DEVICE]
assert config_entry_v1.version == 2
@pytest.mark.skipif(
MAJOR_VERSION != 0 or (MAJOR_VERSION == 0 and MINOR_VERSION >= 112),
reason="Not applicaable for this version",
)
@pytest.mark.parametrize(
"zha_config",
(
{},
{CONF_USB_PATH: "str"},
{CONF_RADIO_TYPE: "ezsp"},
{CONF_RADIO_TYPE: "ezsp", CONF_USB_PATH: "str"},
),
)
async def test_config_depreciation(hass, zha_config):
"""Test config option depreciation."""
await async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.zha.async_setup", return_value=True
) as setup_mock:
assert await async_setup_component(hass, DOMAIN, {DOMAIN: zha_config})
assert setup_mock.call_count == 1
|
import asyncio
from Plugwise_Smile.Smile import Smile
from homeassistant.components.plugwise import DOMAIN
from homeassistant.components.plugwise.gateway import async_unload_entry
from homeassistant.config_entries import (
ENTRY_STATE_SETUP_ERROR,
ENTRY_STATE_SETUP_RETRY,
)
from tests.common import AsyncMock
from tests.components.plugwise.common import async_init_integration
async def test_smile_unauthorized(hass, mock_smile_unauth):
"""Test failing unauthorization by Smile."""
entry = await async_init_integration(hass, mock_smile_unauth)
assert entry.state == ENTRY_STATE_SETUP_ERROR
async def test_smile_error(hass, mock_smile_error):
"""Test server error handling by Smile."""
entry = await async_init_integration(hass, mock_smile_error)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_smile_notconnect(hass, mock_smile_notconnect):
"""Connection failure error handling by Smile."""
mock_smile_notconnect.connect.return_value = False
entry = await async_init_integration(hass, mock_smile_notconnect)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_smile_timeout(hass, mock_smile_notconnect):
"""Timeout error handling by Smile."""
mock_smile_notconnect.connect.side_effect = asyncio.TimeoutError
entry = await async_init_integration(hass, mock_smile_notconnect)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_smile_adam_xmlerror(hass, mock_smile_adam):
"""Detect malformed XML by Smile in Adam environment."""
mock_smile_adam.full_update_device.side_effect = Smile.XMLDataMissingError
entry = await async_init_integration(hass, mock_smile_adam)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_entry(hass, mock_smile_adam):
"""Test being able to unload an entry."""
entry = await async_init_integration(hass, mock_smile_adam)
mock_smile_adam.async_reset = AsyncMock(return_value=True)
assert await async_unload_entry(hass, entry)
assert not hass.data[DOMAIN]
|
import re
from absl import flags
from perfkitbenchmarker import flag_util
from perfkitbenchmarker import regex_util
NVIDIA_DRIVER_LOCATION_BASE = 'https://us.download.nvidia.com/tesla'
NVIDIA_TESLA_K80 = 'k80'
NVIDIA_TESLA_P4 = 'p4'
NVIDIA_TESLA_P100 = 'p100'
NVIDIA_TESLA_V100 = 'v100'
NVIDIA_TESLA_T4 = 't4'
NVIDIA_TESLA_A100 = 'a100'
"""Default GPU clocks and autoboost configurations.
Base_clock is the default clock speeds when setting the GPU clocks. Max_clock
is currently unused. The clock speeds are in the format of
[memory_clock in MHz, graphics_clock in MHz].
"""
GPU_DEFAULTS = {
NVIDIA_TESLA_K80: {
'base_clock': [2505, 562],
'max_clock': [2505, 875],
'autoboost_enabled': True,
},
NVIDIA_TESLA_P4: {
'base_clock': [3003, 885],
'max_clock': [3003, 1531],
'autoboost_enabled': None,
},
NVIDIA_TESLA_P100: {
'base_clock': [715, 1189],
'max_clock': [715, 1328],
'autoboost_enabled': None,
},
NVIDIA_TESLA_V100: {
'base_clock': [877, 1312],
'max_clock': [877, 1530],
'autoboost_enabled': None,
},
NVIDIA_TESLA_T4: {
'base_clock': [5001, 585],
'max_clock': [5001, 1590],
'autoboost_enabled': None,
},
NVIDIA_TESLA_A100: {
'base_clock': [1215, 1410],
'max_clock': [1215, 1410],
'autoboost_enabled': None,
},
}
EXTRACT_CLOCK_SPEEDS_REGEX = r'(\d*).*,\s*(\d*)'
flag_util.DEFINE_integerlist('gpu_clock_speeds',
None,
'desired gpu clock speeds in the form '
'[memory clock, graphics clock]')
flags.DEFINE_boolean('gpu_autoboost_enabled', None,
'whether gpu autoboost is enabled')
flags.DEFINE_string('nvidia_driver_version', None,
'The version of nvidia driver to install. '
'For example, "418.67" or "418.87.01"')
flags.DEFINE_string('nvidia_driver_x_library_path', '/usr/lib',
'X library path for nvidia driver installation')
flags.DEFINE_string('nvidia_driver_x_module_path', '/usr/lib/xorg/modules',
'X module path for nvidia driver installation')
flags.DEFINE_boolean('nvidia_driver_persistence_mode', None,
'whether to enable persistence mode on the NVIDIA GPU')
FLAGS = flags.FLAGS
class UnsupportedClockSpeedError(Exception):
pass
class NvidiaSmiParseOutputError(Exception):
pass
class HeterogeneousGpuTypesError(Exception):
pass
class UnsupportedGpuTypeError(Exception):
pass
def CheckNvidiaGpuExists(vm):
"""Returns whether NVIDIA GPU exists or not on the vm.
Args:
vm: The virtual machine to check.
Returns:
True or False depending on whether NVIDIA GPU exists.
"""
vm.Install('pciutils')
output, _ = vm.RemoteCommand('sudo lspci', should_log=True)
regex = re.compile(r'3D controller: NVIDIA Corporation')
return regex.search(output) is not None
def CheckNvidiaSmiExists(vm):
"""Returns whether nvidia-smi is installed or not on a VM.
Args:
vm: The virtual to check.
Returns:
True or False depending on whether nvidia-smi command exists.
"""
resp, _ = vm.RemoteHostCommand('command -v nvidia-smi',
ignore_failure=True,
suppress_warning=True)
return bool(resp.rstrip())
def GetDriverVersion(vm):
"""Returns the NVIDIA driver version as a string.
Args:
vm: Virtual machine to query.
Returns:
String containing NVIDIA driver version installed.
Raises:
NvidiaSmiParseOutputError: If nvidia-smi output cannot be parsed.
"""
stdout, _ = vm.RemoteCommand('nvidia-smi', should_log=True)
regex = r'Driver Version\:\s+(\S+)'
match = re.search(regex, stdout)
if match:
return str(match.group(1))
raise NvidiaSmiParseOutputError('Unable to parse driver version from {}'
.format(stdout))
def GetGpuType(vm):
"""Return the type of NVIDIA gpu(s) installed on the vm.
Args:
vm: Virtual machine to query.
Returns:
Type of gpus installed on the vm as a string.
Raises:
NvidiaSmiParseOutputError: If nvidia-smi output cannot be parsed.
HeterogeneousGpuTypesError: If more than one gpu type is detected.
UnsupportedClockSpeedError: If gpu type is not supported.
Example:
If 'nvidia-smi -L' returns:
GPU 0: Tesla V100-SXM2-16GB (UUID: GPU-1a046bb9-e456-45d3-5a35-52da392d09a5)
GPU 1: Tesla V100-SXM2-16GB (UUID: GPU-56cf4732-054c-4e40-9680-0ec27e97d21c)
GPU 2: Tesla V100-SXM2-16GB (UUID: GPU-4c7685ad-4b3a-8adc-ce20-f3a945127a8a)
GPU 3: Tesla V100-SXM2-16GB (UUID: GPU-0b034e63-22be-454b-b395-382e2d324728)
GPU 4: Tesla V100-SXM2-16GB (UUID: GPU-b0861159-4727-ef2f-ff66-73a765f4ecb6)
GPU 5: Tesla V100-SXM2-16GB (UUID: GPU-16ccaf51-1d1f-babe-9f3d-377e900bf37e)
GPU 6: Tesla V100-SXM2-16GB (UUID: GPU-6eba1fa6-de10-80e9-ec5f-4b8beeff7e12)
GPU 7: Tesla V100-SXM2-16GB (UUID: GPU-cba5a243-219c-df12-013e-1dbc98a8b0de)
GetGpuType() will return:
['V100-SXM2-16GB', 'V100-SXM2-16GB', 'V100-SXM2-16GB', 'V100-SXM2-16GB',
'V100-SXM2-16GB', 'V100-SXM2-16GB', 'V100-SXM2-16GB', 'V100-SXM2-16GB']
"""
stdout, _ = vm.RemoteCommand('nvidia-smi -L', should_log=True)
try:
gpu_types = []
for line in stdout.splitlines():
if not line:
continue
splitted = line.split()
if splitted[2] == 'Tesla':
gpu_types.append(splitted[3])
else:
gpu_types.append(splitted[2])
except:
raise NvidiaSmiParseOutputError('Unable to parse gpu type from {}'
.format(stdout))
if any(gpu_type != gpu_types[0] for gpu_type in gpu_types):
raise HeterogeneousGpuTypesError(
'PKB only supports one type of gpu per VM')
if 'K80' in gpu_types[0]:
return NVIDIA_TESLA_K80
if 'P4' in gpu_types[0]:
return NVIDIA_TESLA_P4
if 'P100' in gpu_types[0]:
return NVIDIA_TESLA_P100
if 'V100' in gpu_types[0]:
return NVIDIA_TESLA_V100
if 'T4' in gpu_types[0]:
return NVIDIA_TESLA_T4
if 'A100' in gpu_types[0]:
return NVIDIA_TESLA_A100
raise UnsupportedClockSpeedError(
'Gpu type {0} is not supported by PKB'.format(gpu_types[0]))
def QueryNumberOfGpus(vm):
"""Returns the number of NVIDIA GPUs on the system.
Args:
vm: Virtual machine to query.
Returns:
Integer indicating the number of NVIDIA GPUs present on the vm.
"""
stdout, _ = vm.RemoteCommand('sudo nvidia-smi --query-gpu=count --id=0 '
'--format=csv', should_log=True)
return int(stdout.split()[1])
def GetPeerToPeerTopology(vm):
"""Returns a string specifying which GPUs can access each other via p2p.
Args:
vm: Virtual machine to operate on.
Example:
If p2p topology from nvidia-smi topo -p2p r looks like this:
0 1 2 3
0 X OK NS NS
1 OK X NS NS
2 NS NS X OK
3 NS NS OK X
GetTopology will return 'Y Y N N;Y Y N N;N N Y Y;N N Y Y'
"""
stdout, _ = vm.RemoteCommand('nvidia-smi topo -p2p r', should_log=True)
lines = [line.split() for line in stdout.splitlines()]
num_gpus = len(lines[0])
results = []
for idx, line in enumerate(lines[1:]):
if idx >= num_gpus:
break
results.append(' '.join(line[1:]))
# Delimit each GPU result with semicolons,
# and simplify the result character set to 'Y' and 'N'.
return (';'.join(results)
.replace('X', 'Y') # replace X (self) with Y
.replace('OK', 'Y') # replace OK with Y
.replace('NS', 'N')) # replace NS (not supported) with N
def SetAndConfirmGpuClocks(vm):
"""Sets and confirms the GPU clock speed and autoboost policy.
The clock values are provided either by the gpu_pcie_bandwidth_clock_speeds
flags, or from gpu-specific defaults. If a device is queried and its
clock speed does not align with what it was just set to, an exception will
be raised.
Args:
vm: The virtual machine to operate on.
Raises:
UnsupportedClockSpeedError: If a GPU did not accept the
provided clock speeds.
"""
gpu_type = GetGpuType(vm)
gpu_clock_speeds = GPU_DEFAULTS[gpu_type]['base_clock']
autoboost_enabled = GPU_DEFAULTS[gpu_type]['autoboost_enabled']
if FLAGS.gpu_clock_speeds is not None:
gpu_clock_speeds = FLAGS.gpu_clock_speeds
if FLAGS.gpu_autoboost_enabled is not None:
autoboost_enabled = FLAGS.gpu_autoboost_enabled
desired_memory_clock = gpu_clock_speeds[0]
desired_graphics_clock = gpu_clock_speeds[1]
EnablePersistenceMode(vm)
SetGpuClockSpeed(vm, desired_memory_clock, desired_graphics_clock)
SetAutoboostDefaultPolicy(vm, autoboost_enabled)
num_gpus = QueryNumberOfGpus(vm)
for i in range(num_gpus):
if QueryGpuClockSpeed(vm, i) != (desired_memory_clock,
desired_graphics_clock):
raise UnsupportedClockSpeedError(
'Unrecoverable error setting GPU #{} clock speed to {},{}'.format(
i, desired_memory_clock, desired_graphics_clock))
def SetGpuClockSpeed(vm, memory_clock_speed, graphics_clock_speed):
"""Sets autoboost and memory and graphics clocks to the specified frequency.
Args:
vm: Virtual machine to operate on.
memory_clock_speed: Desired speed of the memory clock, in MHz.
graphics_clock_speed: Desired speed of the graphics clock, in MHz.
"""
num_gpus = QueryNumberOfGpus(vm)
for device_id in range(num_gpus):
current_clock_speeds = QueryGpuClockSpeed(vm, device_id)
if current_clock_speeds != (memory_clock_speed, graphics_clock_speed):
vm.RemoteCommand('sudo nvidia-smi -ac {},{} --id={}'.format(
memory_clock_speed,
graphics_clock_speed,
device_id
))
def QueryGpuClockSpeed(vm, device_id):
"""Returns the value of the memory and graphics clock.
All clock values are in MHz.
Args:
vm: Virtual machine to operate on.
device_id: Id of GPU device to query.
Returns:
Tuple of clock speeds in MHz in the form (memory clock, graphics clock).
"""
query = ('sudo nvidia-smi --query-gpu=clocks.applications.memory,'
'clocks.applications.graphics --format=csv --id={0}'
.format(device_id))
stdout, _ = vm.RemoteCommand(query, should_log=True)
clock_speeds = stdout.splitlines()[1]
matches = regex_util.ExtractAllMatches(EXTRACT_CLOCK_SPEEDS_REGEX,
clock_speeds)[0]
return (int(matches[0]), int(matches[1]))
def EnablePersistenceMode(vm):
"""Enables persistence mode on the NVIDIA driver.
Args:
vm: Virtual machine to operate on.
"""
vm.RemoteCommand('sudo nvidia-smi -pm 1')
def SetAutoboostDefaultPolicy(vm, autoboost_enabled):
"""Sets the autoboost policy to the specified value.
For each GPU on the VM, this function will set the autoboost policy
to the value specified by autoboost_enabled.
Args:
vm: Virtual machine to operate on.
autoboost_enabled: Bool or None. Value (if any) to set autoboost policy to
"""
if autoboost_enabled is None:
return
num_gpus = QueryNumberOfGpus(vm)
for device_id in range(num_gpus):
current_state = QueryAutoboostPolicy(vm, device_id)
if current_state['autoboost_default'] != autoboost_enabled:
vm.RemoteCommand('sudo nvidia-smi --auto-boost-default={0} --id={1}'
.format(1 if autoboost_enabled else 0, device_id))
def QueryAutoboostPolicy(vm, device_id):
"""Returns the state of autoboost and autoboost_default.
Args:
vm: Virtual machine to operate on.
device_id: Id of GPU device to query.
Returns:
Dict containing values for autoboost and autoboost_default.
Values can be True (autoboost on), False (autoboost off),
and None (autoboost not supported).
Raises:
NvidiaSmiParseOutputError: If output from nvidia-smi can not be parsed.
"""
autoboost_regex = r'Auto Boost\s*:\s*(\S+)'
autoboost_default_regex = r'Auto Boost Default\s*:\s*(\S+)'
query = 'sudo nvidia-smi -q -d CLOCK --id={0}'.format(device_id)
stdout, _ = vm.RemoteCommand(query, should_log=True)
autoboost_match = re.search(autoboost_regex, stdout)
autoboost_default_match = re.search(autoboost_default_regex, stdout)
nvidia_smi_output_string_to_value = {
'On': True,
'Off': False,
'N/A': None,
}
if (autoboost_match is None) or (autoboost_default_match is None):
raise NvidiaSmiParseOutputError('Unable to parse Auto Boost policy from {}'
.format(stdout))
return {
'autoboost': nvidia_smi_output_string_to_value[
autoboost_match.group(1)],
'autoboost_default': nvidia_smi_output_string_to_value[
autoboost_default_match.group(1)]
}
def GetMetadata(vm):
"""Returns gpu-specific metadata as a dict.
Args:
vm: Virtual machine to operate on.
Returns:
A dict of gpu-specific metadata.
"""
clock_speeds = QueryGpuClockSpeed(vm, 0)
autoboost_policy = QueryAutoboostPolicy(vm, 0)
return {
'gpu_memory_clock': clock_speeds[0],
'gpu_graphics_clock': clock_speeds[1],
'gpu_autoboost': autoboost_policy['autoboost'],
'gpu_autoboost_default': autoboost_policy['autoboost_default'],
'nvidia_driver_version': GetDriverVersion(vm),
'gpu_type': GetGpuType(vm),
'num_gpus': QueryNumberOfGpus(vm),
'peer_to_peer_gpu_topology': GetPeerToPeerTopology(vm),
}
def DoPostInstallActions(vm):
"""Perform post NVIDIA driver install action on the vm.
Args:
vm: The virtual machine to operate on.
"""
SetAndConfirmGpuClocks(vm)
def Install(vm):
"""Install NVIDIA GPU driver on the vm.
Args:
vm: The virtual machine to install NVIDIA driver on.
"""
version_to_install = FLAGS.nvidia_driver_version
if not version_to_install:
return
location = ('{base}/{version}/NVIDIA-Linux-x86_64-{version}.run'
.format(base=NVIDIA_DRIVER_LOCATION_BASE,
version=version_to_install))
vm.Install('wget')
tokens = re.split('/', location)
filename = tokens[-1]
vm.RemoteCommand('wget {location} && chmod 755 {filename} '
.format(location=location, filename=filename),
should_log=True)
vm.RemoteCommand('sudo ./{filename} -q -x-module-path={x_module_path} '
'--ui=none -x-library-path={x_library_path} '
'--no-install-compat32-libs'
.format(filename=filename,
x_module_path=FLAGS.nvidia_driver_x_module_path,
x_library_path=FLAGS.nvidia_driver_x_library_path),
should_log=True)
if FLAGS.nvidia_driver_persistence_mode:
EnablePersistenceMode(vm)
|
import eventlet
from kombu import Connection
eventlet.monkey_patch()
def send_many(n):
#: Create connection
#: If hostname, userid, password and virtual_host is not specified
#: the values below are the default, but listed here so it can
#: be easily changed.
with Connection('amqp://guest:guest@localhost:5672//') as connection:
#: SimpleQueue mimics the interface of the Python Queue module.
#: First argument can either be a queue name or a kombu.Queue object.
#: If a name, then the queue will be declared with the name as the
#: queue name, exchange name and routing key.
with connection.SimpleQueue('kombu_demo') as queue:
def send_message(i):
queue.put({'hello': f'world{i}'})
pool = eventlet.GreenPool(10)
for i in range(n):
pool.spawn(send_message, i)
pool.waitall()
if __name__ == '__main__':
send_many(10)
|
from datetime import timedelta
from homeassistant.components.streamlabswater import DOMAIN as STREAMLABSWATER_DOMAIN
from homeassistant.const import VOLUME_GALLONS
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
DEPENDENCIES = ["streamlabswater"]
WATER_ICON = "mdi:water"
MIN_TIME_BETWEEN_USAGE_UPDATES = timedelta(seconds=60)
NAME_DAILY_USAGE = "Daily Water"
NAME_MONTHLY_USAGE = "Monthly Water"
NAME_YEARLY_USAGE = "Yearly Water"
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up water usage sensors."""
client = hass.data[STREAMLABSWATER_DOMAIN]["client"]
location_id = hass.data[STREAMLABSWATER_DOMAIN]["location_id"]
location_name = hass.data[STREAMLABSWATER_DOMAIN]["location_name"]
streamlabs_usage_data = StreamlabsUsageData(location_id, client)
streamlabs_usage_data.update()
add_devices(
[
StreamLabsDailyUsage(location_name, streamlabs_usage_data),
StreamLabsMonthlyUsage(location_name, streamlabs_usage_data),
StreamLabsYearlyUsage(location_name, streamlabs_usage_data),
]
)
class StreamlabsUsageData:
"""Track and query usage data."""
def __init__(self, location_id, client):
"""Initialize the usage data."""
self._location_id = location_id
self._client = client
self._today = None
self._this_month = None
self._this_year = None
@Throttle(MIN_TIME_BETWEEN_USAGE_UPDATES)
def update(self):
"""Query and store usage data."""
water_usage = self._client.get_water_usage_summary(self._location_id)
self._today = round(water_usage["today"], 1)
self._this_month = round(water_usage["thisMonth"], 1)
self._this_year = round(water_usage["thisYear"], 1)
def get_daily_usage(self):
"""Return the day's usage."""
return self._today
def get_monthly_usage(self):
"""Return the month's usage."""
return self._this_month
def get_yearly_usage(self):
"""Return the year's usage."""
return self._this_year
class StreamLabsDailyUsage(Entity):
"""Monitors the daily water usage."""
def __init__(self, location_name, streamlabs_usage_data):
"""Initialize the daily water usage device."""
self._location_name = location_name
self._streamlabs_usage_data = streamlabs_usage_data
self._state = None
@property
def name(self):
"""Return the name for daily usage."""
return f"{self._location_name} {NAME_DAILY_USAGE}"
@property
def icon(self):
"""Return the daily usage icon."""
return WATER_ICON
@property
def state(self):
"""Return the current daily usage."""
return self._streamlabs_usage_data.get_daily_usage()
@property
def unit_of_measurement(self):
"""Return gallons as the unit measurement for water."""
return VOLUME_GALLONS
def update(self):
"""Retrieve the latest daily usage."""
self._streamlabs_usage_data.update()
class StreamLabsMonthlyUsage(StreamLabsDailyUsage):
"""Monitors the monthly water usage."""
@property
def name(self):
"""Return the name for monthly usage."""
return f"{self._location_name} {NAME_MONTHLY_USAGE}"
@property
def state(self):
"""Return the current monthly usage."""
return self._streamlabs_usage_data.get_monthly_usage()
class StreamLabsYearlyUsage(StreamLabsDailyUsage):
"""Monitors the yearly water usage."""
@property
def name(self):
"""Return the name for yearly usage."""
return f"{self._location_name} {NAME_YEARLY_USAGE}"
@property
def state(self):
"""Return the current yearly usage."""
return self._streamlabs_usage_data.get_yearly_usage()
|
import sys
__author__ = "github.com/casperdcl"
__all__ = ['tqdm_pandas']
def tqdm_pandas(tclass, *targs, **tkwargs):
"""
Registers the given `tqdm` instance with
`pandas.core.groupby.DataFrameGroupBy.progress_apply`.
It will even close() the `tqdm` instance upon completion.
Parameters
----------
tclass : tqdm class you want to use (eg, tqdm, tqdm_notebook, etc)
targs and tkwargs : arguments for the tqdm instance
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from tqdm import tqdm, tqdm_pandas
>>>
>>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
>>> tqdm_pandas(tqdm, leave=True) # can use tqdm_gui, optional kwargs, etc
>>> # Now you can use `progress_apply` instead of `apply`
>>> df.groupby(0).progress_apply(lambda x: x**2)
References
----------
https://stackoverflow.com/questions/18603270/
progress-indicator-during-pandas-operations-python
"""
from tqdm import TqdmDeprecationWarning
if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith(
'tqdm_')): # delayed adapter case
TqdmDeprecationWarning("""\
Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.
""", fp_write=getattr(tkwargs.get('file', None), 'write', sys.stderr.write))
tclass.pandas(*targs, **tkwargs)
else:
TqdmDeprecationWarning("""\
Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.
""", fp_write=getattr(tclass.fp, 'write', sys.stderr.write))
type(tclass).pandas(deprecated_t=tclass)
|
import numpy as np
import pytest
import itertools
from tensornetwork.block_sparse.charge import (U1Charge, charge_equal,
fuse_ndarray_charges, BaseCharge)
from tensornetwork.block_sparse.utils import (fuse_ndarrays, _get_strides,
fuse_stride_arrays, unique)
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.blocksparse_utils import (
compute_sparse_lookup, compute_fused_charge_degeneracies,
compute_unique_fused_charges, compute_num_nonzero, reduce_charges,
_find_diagonal_sparse_blocks, _find_transposed_diagonal_sparse_blocks,
get_flat_meta_data, _to_string)
np_dtypes = [np.float64, np.complex128]
np_tensordot_dtypes = [np.float64, np.complex128]
def fuse_many_ndarray_charges(charges, charge_types):
res = fuse_ndarray_charges(charges[0], charges[1], charge_types)
for n in range(2, len(charges)):
res = fuse_ndarray_charges(res, charges[n], charge_types)
return res
def test_flat_meta_data():
i1 = Index([
U1Charge.random(dimension=20, minval=-2, maxval=2),
U1Charge.random(dimension=20, minval=-2, maxval=2)
],
flow=[True, False])
i2 = Index([
U1Charge.random(dimension=20, minval=-2, maxval=2),
U1Charge.random(dimension=20, minval=-2, maxval=2)
],
flow=[False, True])
expected_charges = [
i1._charges[0], i1._charges[1], i2._charges[0], i2._charges[1]
]
expected_flows = [True, False, False, True]
charges, flows = get_flat_meta_data([i1, i2])
np.testing.assert_allclose(flows, expected_flows)
for n, c in enumerate(charges):
assert charge_equal(c, expected_charges[n])
def test_compute_sparse_lookup():
q1 = np.array([-2, 0, -5, 7])
q2 = np.array([-3, 1, -2, 6, 2, -2])
expected_unique = np.array(
[-11, -8, -7, -6, -4, -3, -2, -1, 0, 1, 2, 3, 5, 6, 9, 10])
expected_labels_to_unique = np.array([7, 8, 9])
expected_lookup = np.array([9, 8, 8, 7, 9])
charges = [U1Charge(q1), U1Charge(q2)]
targets = U1Charge(np.array([-1, 0, 1]))
flows = [False, True]
lookup, unique_, labels = compute_sparse_lookup(charges, flows, targets)
np.testing.assert_allclose(lookup, expected_lookup)
np.testing.assert_allclose(expected_unique, np.squeeze(unique_.charges))
np.testing.assert_allclose(labels, expected_labels_to_unique)
@pytest.mark.parametrize('flow', [True, False])
def test_compute_sparse_lookup_non_ordered(flow):
np_flow = np.int(-(np.int(flow) - 0.5) * 2)
charge_labels = np.array([0, 0, 1, 5, 5, 0, 2, 3, 2, 3, 4, 0, 3, 3, 1, 5])
unique_charges = np.array([-1, 0, 1, -5, 7, 2])
np_targets = np.array([-1, 0, 2])
charges = [U1Charge(unique_charges, charge_labels=charge_labels)]
inds = np.nonzero(
np.isin((np_flow * unique_charges)[charge_labels], np_targets))[0]
targets = U1Charge(np_targets)
lookup, unique_, labels = compute_sparse_lookup(charges, [flow], targets)
np.testing.assert_allclose(labels, np.sort(labels))
np.testing.assert_allclose(
np.squeeze(unique_.charges[lookup, :]),
(np_flow * unique_charges)[charge_labels][inds])
def test_compute_fused_charge_degeneracies():
np.random.seed(10)
qs = [np.random.randint(-3, 3, 100) for _ in range(3)]
charges = [U1Charge(q) for q in qs]
flows = [False, True, False]
np_flows = [1, -1, 1]
unique_, degens = compute_fused_charge_degeneracies(charges, flows)
fused = fuse_ndarrays([qs[n] * np_flows[n] for n in range(3)])
exp_unique, exp_degens = unique(fused, return_counts=True)
np.testing.assert_allclose(np.squeeze(unique_.charges), exp_unique)
np.testing.assert_allclose(degens, exp_degens)
def test_compute_unique_fused_charges():
np.random.seed(10)
qs = [np.random.randint(-3, 3, 100) for _ in range(3)]
charges = [U1Charge(q) for q in qs]
flows = [False, True, False]
np_flows = [1, -1, 1]
unique_ = compute_unique_fused_charges(charges, flows)
fused = fuse_ndarrays([qs[n] * np_flows[n] for n in range(3)])
exp_unique = unique(fused)
np.testing.assert_allclose(np.squeeze(unique_.charges), exp_unique)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_compute_num_nonzero(num_charges):
np.random.seed(12)
D = 40
qs = [np.random.randint(-3, 3, (D, num_charges)) for _ in range(3)]
charges = [BaseCharge(q, charge_types=[U1Charge] * num_charges) for q in qs]
flows = [False, True, False]
np_flows = [1, -1, 1]
fused = fuse_many_ndarray_charges([qs[n] * np_flows[n] for n in range(3)],
[U1Charge] * num_charges)
nz1 = compute_num_nonzero(charges, flows)
#pylint: disable=no-member
nz2 = len(
np.nonzero(
np.logical_and.reduce(
fused == np.zeros((1, num_charges), dtype=np.int16), axis=1))[0])
assert nz1 == nz2
def test_reduce_charges():
left_charges = np.asarray([-2, 0, 1, 0, 0]).astype(np.int16)
right_charges = np.asarray([-1, 0, 2, 1]).astype(np.int16)
target_charge = np.zeros((1, 1), dtype=np.int16)
fused_charges = fuse_ndarrays([left_charges, right_charges])
dense_positions = reduce_charges(
[U1Charge(left_charges), U1Charge(right_charges)], [False, False],
target_charge,
return_locations=True)
np.testing.assert_allclose(dense_positions[0].charges, 0)
np.testing.assert_allclose(
dense_positions[1],
np.nonzero(fused_charges == target_charge[0, 0])[0])
def test_reduce_charges_2():
left_charges = np.asarray([[-2, 0, 1, 0, 0], [-3, 0, 2, 1,
0]]).astype(np.int16).T
right_charges = np.asarray([[-1, 0, 2, 1], [-2, 2, 7, 0]]).astype(np.int16).T
target_charge = np.zeros((1, 2), dtype=np.int16)
fused_charges = fuse_ndarray_charges(left_charges, right_charges,
[U1Charge, U1Charge])
dense_positions = reduce_charges([
BaseCharge(left_charges, charge_types=[U1Charge, U1Charge]),
BaseCharge(right_charges, charge_types=[U1Charge, U1Charge])
], [False, False],
target_charge,
return_locations=True)
np.testing.assert_allclose(dense_positions[0].charges, 0)
#pylint: disable=no-member
np.testing.assert_allclose(
dense_positions[1],
np.nonzero(np.logical_and.reduce(fused_charges == target_charge,
axis=1))[0])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_reduce_charges_non_trivial(num_charges):
np.random.seed(10)
left_charges = np.random.randint(-5, 6, (200, num_charges), dtype=np.int16)
right_charges = np.random.randint(-5, 6, (200, num_charges), dtype=np.int16)
target_charge = np.random.randint(-2, 3, (3, num_charges), dtype=np.int16)
charge_types = [U1Charge] * num_charges
fused_charges = fuse_ndarray_charges(left_charges, right_charges,
charge_types)
dense_positions = reduce_charges([
BaseCharge(left_charges, charge_types=charge_types),
BaseCharge(right_charges, charge_types=charge_types)
], [False, False],
target_charge,
return_locations=True)
assert np.all(
np.isin(
np.squeeze(dense_positions[0].charges), np.squeeze(target_charge)))
tmp = []
#pylint: disable=unsubscriptable-object
for n in range(target_charge.shape[0]):
#pylint: disable=no-member
tmp.append(
np.logical_and.reduce(
fused_charges == target_charge[n, :][None, :], axis=1))
#pylint: disable=no-member
mask = np.logical_or.reduce(tmp)
np.testing.assert_allclose(dense_positions[1], np.nonzero(mask)[0])
@pytest.mark.parametrize('num_legs', [2, 3, 4])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_find_diagonal_sparse_blocks(num_legs, num_charges):
np.random.seed(10)
np_charges = [
np.random.randint(-5, 5, (60, num_charges), dtype=np.int16)
for _ in range(num_legs)
]
fused = np.stack([
fuse_ndarrays([np_charges[n][:, c]
for n in range(num_legs)])
for c in range(num_charges)
],
axis=1)
left_charges = np.stack([
fuse_ndarrays([np_charges[n][:, c]
for n in range(num_legs // 2)])
for c in range(num_charges)
],
axis=1)
right_charges = np.stack([
fuse_ndarrays(
[np_charges[n][:, c]
for n in range(num_legs // 2, num_legs)])
for c in range(num_charges)
],
axis=1)
#pylint: disable=no-member
nz = np.nonzero(
np.logical_and.reduce(fused == np.zeros((1, num_charges)), axis=1))[0]
linear_locs = np.arange(len(nz))
# pylint: disable=no-member
left_inds, _ = np.divmod(nz, right_charges.shape[0])
left = left_charges[left_inds, :]
unique_left = unique(left)
blocks = []
for n in range(unique_left.shape[0]):
ul = unique_left[n, :][None, :]
#pylint: disable=no-member
blocks.append(linear_locs[np.nonzero(
np.logical_and.reduce(left == ul, axis=1))[0]])
charges = [
BaseCharge(left_charges, charge_types=[U1Charge] * num_charges),
BaseCharge(right_charges, charge_types=[U1Charge] * num_charges)
]
bs, cs, ss = _find_diagonal_sparse_blocks(charges, [False, False], 1)
np.testing.assert_allclose(cs.charges, unique_left)
for b1, b2 in zip(blocks, bs):
assert np.all(b1 == b2)
assert np.sum(np.prod(ss, axis=0)) == np.sum([len(b) for b in bs])
np.testing.assert_allclose(unique_left, cs.charges)
orders = []
bonddims = []
for dim, nl in zip([60, 30, 20], [2, 3, 4]):
o = list(itertools.permutations(np.arange(nl)))
orders.extend(o)
bonddims.extend([dim] * len(o))
@pytest.mark.parametrize('order,D', zip(orders, bonddims))
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_find_transposed_diagonal_sparse_blocks(num_charges, order, D):
order = list(order)
num_legs = len(order)
np.random.seed(10)
np_charges = [
np.random.randint(-5, 5, (D, num_charges), dtype=np.int16)
for _ in range(num_legs)
]
tr_charge_list = []
charge_list = []
for c in range(num_charges):
tr_charge_list.append(
fuse_ndarrays([np_charges[order[n]][:, c] for n in range(num_legs)]))
charge_list.append(
fuse_ndarrays([np_charges[n][:, c] for n in range(num_legs)]))
tr_fused = np.stack(tr_charge_list, axis=1)
fused = np.stack(charge_list, axis=1)
dims = [c.shape[0] for c in np_charges]
strides = _get_strides(dims)
transposed_linear_positions = fuse_stride_arrays(dims,
[strides[o] for o in order])
left_charges = np.stack([
fuse_ndarrays([np_charges[order[n]][:, c]
for n in range(num_legs // 2)])
for c in range(num_charges)
],
axis=1)
right_charges = np.stack([
fuse_ndarrays(
[np_charges[order[n]][:, c]
for n in range(num_legs // 2, num_legs)])
for c in range(num_charges)
],
axis=1)
#pylint: disable=no-member
mask = np.logical_and.reduce(fused == np.zeros((1, num_charges)), axis=1)
nz = np.nonzero(mask)[0]
dense_to_sparse = np.empty(len(mask), dtype=np.int64)
dense_to_sparse[mask] = np.arange(len(nz))
#pylint: disable=no-member
tr_mask = np.logical_and.reduce(
tr_fused == np.zeros((1, num_charges)), axis=1)
tr_nz = np.nonzero(tr_mask)[0]
tr_linear_locs = transposed_linear_positions[tr_nz]
# pylint: disable=no-member
left_inds, _ = np.divmod(tr_nz, right_charges.shape[0])
left = left_charges[left_inds, :]
unique_left = unique(left)
blocks = []
for n in range(unique_left.shape[0]):
ul = unique_left[n, :][None, :]
#pylint: disable=no-member
blocks.append(dense_to_sparse[tr_linear_locs[np.nonzero(
np.logical_and.reduce(left == ul, axis=1))[0]]])
charges = [
BaseCharge(c, charge_types=[U1Charge] * num_charges) for c in np_charges
]
flows = [False] * num_legs
bs, cs, ss = _find_transposed_diagonal_sparse_blocks(
charges, flows, tr_partition=num_legs // 2, order=order)
np.testing.assert_allclose(cs.charges, unique_left)
for b1, b2 in zip(blocks, bs):
assert np.all(b1 == b2)
assert np.sum(np.prod(ss, axis=0)) == np.sum([len(b) for b in bs])
np.testing.assert_allclose(unique_left, cs.charges)
def test_to_string():
R = 5
D = 100
np.random.seed(10)
cs = [U1Charge.random(D, -5, 5) for _ in range(R)]
flows = np.random.choice([True, False], size=R, replace=True)
tr_partition = 3
order = list(np.random.choice(np.arange(R), size=R, replace=False))
actual = _to_string(cs, flows, tr_partition, order)
expected = ''.join([str(c.charges.tostring()) for c in cs] + [
str(np.array(flows).tostring()),
str(tr_partition),
str(np.array(order, dtype=np.int16).tostring())
])
assert actual == expected
|
from __future__ import print_function
import argparse
import base64
import os
_stash = globals()['_stash']
try:
import pyaes
except ImportError:
print('Installing Required packages...')
_stash('pip install pyaes')
import pyaes
class Crypt(object):
def __init__(self, in_filename, out_filename=None):
self.in_filename = in_filename
self.out_filename = out_filename
def aes_encrypt(self, key=None, chunksize=64 * 1024):
self.out_filename = self.out_filename or self.in_filename + '.enc'
if key is None:
key = base64.b64encode(os.urandom(32))[:32]
aes = pyaes.AESModeOfOperationCTR(key)
with open(self.in_filename, 'rb') as infile:
with open(self.out_filename, 'wb') as outfile:
pyaes.encrypt_stream(aes, infile, outfile)
return key
def aes_decrypt(self, key, chunksize=64 * 1024):
self.out_filename = self.out_filename or os.path.splitext(self.in_filename)[0]
aes = pyaes.AESModeOfOperationCTR(key)
with open(self.in_filename, 'rb') as infile:
with open(self.out_filename, 'wb') as outfile:
pyaes.decrypt_stream(aes, infile, outfile)
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument(
'-k',
'--key',
action='store',
default=None,
help='Encrypt/Decrypt Key.',
)
ap.add_argument(
'-d',
'--decrypt',
action='store_true',
default=False,
help='Flag to decrypt.',
)
#ap.add_argument('-t','--type',action='store',choices={'aes','rsa'},default='aes')
ap.add_argument('infile', action='store', help='File to encrypt/decrypt.')
ap.add_argument('outfile', action='store', nargs='?', help='Output file.')
args = ap.parse_args()
crypt = Crypt(args.infile, args.outfile)
if args.decrypt:
crypt.aes_decrypt(args.key.encode())
else:
nk = crypt.aes_encrypt(args.key)
if args.key is None:
print("Key: %s" % nk.decode())
|
from unittest import TestCase
from scattertext.FeatureOuput import FeatureLister
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_corpus_with_meta
class TestFeatureList(TestCase):
def test_main(self):
tdm = build_hamlet_jz_corpus_with_meta()
features = FeatureLister(tdm._mX,
tdm._metadata_idx_store,
tdm.get_num_docs()).output()
expected = [{'cat4': 2, 'cat3': 1}, {'cat4': 2}, {'cat5': 1, 'cat3': 2},
{'cat6': 2, 'cat9': 1},
{'cat4': 2, 'cat3': 1}, {'cat2': 1, 'cat1': 2},
{'cat2': 2, 'cat5': 1},
{'cat4': 1, 'cat3': 2}]
expected = [{'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}]
self.assertEqual(features,
expected)
|
import sys, os, re, json, argparse, time, pytz
import console
from datetime import datetime, timedelta
from difflib import unified_diff, ndiff
#_____________________________________________________
def argue():
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('lhs')
parser.add_argument('rhs')
args = parser.parse_args()
if args.verbose:
json.dump(vars(args),sys.stderr,indent=4)
return args
#_____________________________________________________
def sn(x):
return '%s\n'%x
#_____________________________________________________
def modified(f):
lmt = os.path.getmtime(f)
est = pytz.timezone('Australia/Sydney')
gmt = pytz.timezone('GMT')
tzf = '%Y-%m-%d %H:%M:%S'
gdt = datetime.utcfromtimestamp(lmt)
gdt = gmt.localize(gdt)
adt = est.normalize(gdt.astimezone(est))
return adt.strftime(tzf)
#_____________________________________________________
def diff(lhs,rhs):
if not os.path.isfile(lhs):
sys.stderr.write('%s not a file\n'%lhs)
sys.exit(1)
if os.path.isdir(rhs):
rhs = '%s/%s'%(rhs,os.path.basename(lhs))
if not os.path.isfile(rhs):
sys.stderr.write('%s not a file\n'%rhs)
sys.exit(1)
flhs = open(lhs).readlines()
frhs = open(rhs).readlines()
diffs = unified_diff(
flhs,
frhs,
fromfile=lhs,
tofile=rhs,
fromfiledate=modified(lhs),
tofiledate=modified(rhs)
)
for line in diffs:
if line.startswith('+'):
console.set_color(0,1,0)
if line.startswith('-'):
console.set_color(0,0,1)
sys.stdout.write(line)
console.set_color(1,1,1)
return
#_____________________________________________________
def main():
console.clear()
args = argue()
diff(
args.lhs.rstrip('/'),
args.rhs.rstrip('/')
)
return
#_____________________________________________________
if __name__ == '__main__': main()
|
import boto3
from flask import current_app
from flask_mail import Message
from lemur.extensions import smtp_mail
from lemur.exceptions import InvalidConfiguration
from lemur.plugins.bases import ExpirationNotificationPlugin
from lemur.plugins import lemur_email as email
from lemur.plugins.lemur_email.templates.config import env
from lemur.plugins.utils import get_plugin_option
def render_html(template_name, options, certificates):
"""
Renders the html for our email notification.
:param template_name:
:param options:
:param certificates:
:return:
"""
message = {"options": options, "certificates": certificates}
template = env.get_template("{}.html".format(template_name))
return template.render(
dict(message=message, hostname=current_app.config.get("LEMUR_HOSTNAME"))
)
def send_via_smtp(subject, body, targets):
"""
Attempts to deliver email notification via SMTP.
:param subject:
:param body:
:param targets:
:return:
"""
msg = Message(
subject, recipients=targets, sender=current_app.config.get("LEMUR_EMAIL")
)
msg.body = "" # kinda a weird api for sending html emails
msg.html = body
smtp_mail.send(msg)
def send_via_ses(subject, body, targets):
"""
Attempts to deliver email notification via SES service.
:param subject:
:param body:
:param targets:
:return:
"""
ses_region = current_app.config.get("LEMUR_SES_REGION", "us-east-1")
client = boto3.client("ses", region_name=ses_region)
source_arn = current_app.config.get("LEMUR_SES_SOURCE_ARN")
args = {
"Source": current_app.config.get("LEMUR_EMAIL"),
"Destination": {"ToAddresses": targets},
"Message": {
"Subject": {"Data": subject, "Charset": "UTF-8"},
"Body": {"Html": {"Data": body, "Charset": "UTF-8"}},
},
}
if source_arn:
args["SourceArn"] = source_arn
client.send_email(**args)
class EmailNotificationPlugin(ExpirationNotificationPlugin):
title = "Email"
slug = "email-notification"
description = "Sends expiration email notifications"
version = email.VERSION
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur"
additional_options = [
{
"name": "recipients",
"type": "str",
"required": True,
"validation": r"^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
"helpMessage": "Comma delimited list of email addresses",
}
]
def __init__(self, *args, **kwargs):
"""Initialize the plugin with the appropriate details."""
sender = current_app.config.get("LEMUR_EMAIL_SENDER", "ses").lower()
if sender not in ["ses", "smtp"]:
raise InvalidConfiguration("Email sender type {0} is not recognized.")
@staticmethod
def send(notification_type, message, targets, options, **kwargs):
if not targets:
return
readable_notification_type = ' '.join(map(lambda x: x.capitalize(), notification_type.split('_')))
subject = f"Lemur: {readable_notification_type} Notification"
body = render_html(notification_type, options, message)
s_type = current_app.config.get("LEMUR_EMAIL_SENDER", "ses").lower()
if s_type == "ses":
send_via_ses(subject, body, targets)
elif s_type == "smtp":
send_via_smtp(subject, body, targets)
@staticmethod
def get_recipients(options, additional_recipients, **kwargs):
notification_recipients = get_plugin_option("recipients", options)
if notification_recipients:
notification_recipients = notification_recipients.split(",")
return list(set(notification_recipients + additional_recipients))
|
import sys
ALIASES = {'-h': 'help', '--help': 'help',
'--version': 'version',
}
class CLI:
""" Command line interface class. Commands are simply defined as methods.
"""
def __init__(self, args=None):
if args is None:
return
command = args[0] if args else 'help'
command = ALIASES.get(command, command)
if command not in self.get_command_names():
raise RuntimeError('Invalid command %r' % command)
func = getattr(self, 'cmd_' + command)
func(*args[1:])
def get_command_names(self):
commands = [d[4:] for d in dir(self) if d.startswith('cmd_')]
commands.sort()
return commands
def get_global_help(self):
lines = []
lines.append('Flexx command line interface')
lines.append(' python -m flexx <command> [args]')
lines.append('')
for command in self.get_command_names():
doc = getattr(self, 'cmd_' + command).__doc__
if doc:
summary = doc.strip().splitlines()[0]
lines.append('%s %s' % (command.ljust(15), summary))
return '\n'.join(lines)
def cmd_help(self, command=None):
""" show information on how to use this command.
"""
if command:
if command not in self.get_command_names():
raise RuntimeError('Invalid command %r' % command)
doc = getattr(self, 'cmd_' + command).__doc__
if doc:
lines = doc.strip().splitlines()
doc = '\n'.join([lines[0]] + [line[8:] for line in lines[1:]])
print('%s - %s' % (command, doc))
else:
print('%s - no docs' % command)
else:
print(self.get_global_help())
def cmd_version(self):
""" print the version number
"""
import sys
try:
import flexx
except ImportError:
sys.path.insert(0, '.')
import flexx
print(flexx.__version__)
def cmd_info(self, port=None):
""" show info on flexx server process corresponding to given port,
e.g. flexx info 8080
The kind of info that is provided is not standardized/documented yet.
"""
if port is None:
return self.cmd_help('info')
port = int(port)
try:
print(http_fetch('http://localhost:%i/flexx/cmd/info' % port))
except FetchError:
print('There appears to be no local server at port %i' % port)
def cmd_stop(self, port=None):
""" stop the flexx server process corresponding to the given port.
"""
if port is None:
return self.cmd_help('stop')
port = int(port)
try:
print(http_fetch('http://localhost:%i/flexx/cmd/stop' % port))
print('stopped server at %i' % port)
except FetchError:
print('There appears to be no local server at port %i' % port)
def cmd_log(self, port=None, level='info'):
""" Start listening to log messages from a server process - STUB
flexx log port level
"""
if port is None:
return self.cmd_help('log')
print('not yet implemented')
#print(http_fetch('http://localhost:%i/flexx/cmd/log' % int(port)))
class FetchError(Exception):
pass
def http_fetch(url):
""" Perform an HTTP request.
"""
from tornado.httpclient import HTTPClient
http_client = HTTPClient()
try:
response = http_client.fetch(url)
except Exception as err:
raise FetchError('http fetch failed: %s' % str(err))
finally:
http_client.close()
return response.body.decode()
# Prepare docss
_cli_docs = CLI().get_global_help().splitlines()
__doc__ += '\n'.join([' ' + line for line in _cli_docs])
def main():
# Main entry point (see setup.py)
CLI(sys.argv[1:])
if __name__ == '__main__':
main()
|
import json
import os
import responses
from django.conf import settings
from django.core import mail
from django.core.checks import Critical
from django.core.serializers.json import DjangoJSONEncoder
from django.test.utils import override_settings
from django.urls import reverse
from django.utils import timezone
from weblate.auth.models import Group
from weblate.trans.models import Announcement
from weblate.trans.tests.test_views import ViewTestCase
from weblate.trans.tests.utils import get_test_file
from weblate.utils.checks import check_data_writable
from weblate.utils.unittest import tempdir_setting
from weblate.wladmin.models import BackupService, ConfigurationError, SupportStatus
from weblate.wladmin.tasks import configuration_health_check
class AdminTest(ViewTestCase):
"""Test for customized admin interface."""
def setUp(self):
super().setUp()
self.user.is_superuser = True
self.user.save()
def test_index(self):
response = self.client.get(reverse("admin:index"))
self.assertContains(response, "SSH")
def test_manage_index(self):
response = self.client.get(reverse("manage"))
self.assertContains(response, "SSH")
def test_ssh(self):
response = self.client.get(reverse("manage-ssh"))
self.assertContains(response, "SSH keys")
@tempdir_setting("DATA_DIR")
def test_ssh_generate(self):
self.assertEqual(check_data_writable(), [])
response = self.client.get(reverse("manage-ssh"))
self.assertContains(response, "Generate SSH key")
response = self.client.post(reverse("manage-ssh"), {"action": "generate"})
self.assertContains(response, "Created new SSH key")
response = self.client.get(reverse("manage-ssh-key"))
self.assertContains(response, "PRIVATE KEY")
@tempdir_setting("DATA_DIR")
def test_ssh_add(self):
self.assertEqual(check_data_writable(), [])
try:
oldpath = os.environ["PATH"]
os.environ["PATH"] = ":".join((get_test_file(""), os.environ["PATH"]))
# Verify there is button for adding
response = self.client.get(reverse("manage-ssh"))
self.assertContains(response, "Add host key")
# Add the key
response = self.client.post(
reverse("manage-ssh"), {"action": "add-host", "host": "github.com"}
)
self.assertContains(response, "Added host key for github.com")
finally:
os.environ["PATH"] = oldpath
# Check the file contains it
hostsfile = os.path.join(settings.DATA_DIR, "ssh", "known_hosts")
with open(hostsfile) as handle:
self.assertIn("github.com", handle.read())
@tempdir_setting("BACKUP_DIR")
def test_backup(self):
def do_post(**payload):
return self.client.post(reverse("manage-backups"), payload, follow=True)
response = do_post(repository=settings.BACKUP_DIR)
self.assertContains(response, settings.BACKUP_DIR)
service = BackupService.objects.get()
response = do_post(service=service.pk, trigger="1")
self.assertContains(response, "triggered")
response = do_post(service=service.pk, toggle="1")
self.assertContains(response, "Turned off")
response = do_post(service=service.pk, remove="1")
self.assertNotContains(response, settings.BACKUP_DIR)
def test_performace(self):
response = self.client.get(reverse("manage-performance"))
self.assertContains(response, "weblate.E005")
def test_error(self):
ConfigurationError.objects.create(name="Test error", message="FOOOOOOOOOOOOOO")
response = self.client.get(reverse("manage-performance"))
self.assertContains(response, "FOOOOOOOOOOOOOO")
ConfigurationError.objects.filter(name="Test error").delete()
response = self.client.get(reverse("manage-performance"))
self.assertNotContains(response, "FOOOOOOOOOOOOOO")
def test_report(self):
response = self.client.get(reverse("manage-repos"))
self.assertContains(response, "On branch master")
def test_create_project(self):
response = self.client.get(reverse("admin:trans_project_add"))
self.assertContains(response, "Required fields are marked in bold")
def test_create_component(self):
response = self.client.get(reverse("admin:trans_component_add"))
self.assertContains(response, "Import speed documentation")
def test_component(self):
"""Test for custom component actions."""
self.assert_custom_admin(reverse("admin:trans_component_changelist"))
def test_project(self):
"""Test for custom project actions."""
self.assert_custom_admin(reverse("admin:trans_project_changelist"))
def assert_custom_admin(self, url):
"""Test for (sub)project custom admin."""
response = self.client.get(url)
self.assertContains(response, "Update VCS repository")
for action in "force_commit", "update_checks", "update_from_git":
response = self.client.post(
url, {"_selected_action": "1", "action": action}
)
self.assertRedirects(response, url)
def test_configuration_health_check(self):
# Run checks internally
configuration_health_check()
# List of triggered checks remotely
configuration_health_check(
[
Critical(msg="Error", id="weblate.E001"),
Critical(msg="Test Error", id="weblate.E002"),
]
)
all_errors = ConfigurationError.objects.all()
self.assertEqual(len(all_errors), 1)
self.assertEqual(all_errors[0].name, "weblate.E002")
self.assertEqual(all_errors[0].message, "Test Error")
# No triggered checks
configuration_health_check([])
self.assertEqual(ConfigurationError.objects.count(), 0)
def test_post_announcenement(self):
response = self.client.get(reverse("manage-tools"))
self.assertContains(response, "announcement")
self.assertFalse(Announcement.objects.exists())
response = self.client.post(
reverse("manage-tools"),
{"message": "Test message", "category": "info"},
follow=True,
)
self.assertTrue(Announcement.objects.exists())
def test_send_test_email(self, expected="Test e-mail sent"):
response = self.client.get(reverse("manage-tools"))
self.assertContains(response, "e-mail")
response = self.client.post(
reverse("manage-tools"), {"email": "[email protected]"}, follow=True
)
self.assertContains(response, expected)
if expected == "Test e-mail sent":
self.assertEqual(len(mail.outbox), 1)
def test_invite_user(self):
response = self.client.get(reverse("manage-users"))
self.assertContains(response, "E-mail")
response = self.client.post(
reverse("manage-users"),
{
"email": "[email protected]",
"username": "username",
"full_name": "name",
},
follow=True,
)
self.assertContains(response, "User has been invited")
self.assertEqual(len(mail.outbox), 1)
def test_check_user(self):
response = self.client.get(
reverse("manage-users-check"), {"email": self.user.email}
)
self.assertContains(response, "Last login")
@override_settings(
EMAIL_HOST="nonexisting.weblate.org",
EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend",
)
def test_send_test_email_error(self):
self.test_send_test_email("Could not send test e-mail")
@responses.activate
def test_activation_community(self):
responses.add(
responses.POST,
settings.SUPPORT_API_URL,
body=json.dumps(
{
"name": "community",
"backup_repository": "",
"expiry": timezone.now(),
"in_limits": True,
},
cls=DjangoJSONEncoder,
),
)
self.client.post(reverse("manage-activate"), {"secret": "123456"})
status = SupportStatus.objects.get()
self.assertEqual(status.name, "community")
self.assertFalse(BackupService.objects.exists())
@responses.activate
def test_activation_hosted(self):
responses.add(
responses.POST,
settings.SUPPORT_API_URL,
body=json.dumps(
{
"name": "hosted",
"backup_repository": "/tmp/xxx",
"expiry": timezone.now(),
"in_limits": True,
},
cls=DjangoJSONEncoder,
),
)
self.client.post(reverse("manage-activate"), {"secret": "123456"})
status = SupportStatus.objects.get()
self.assertEqual(status.name, "hosted")
backup = BackupService.objects.get()
self.assertEqual(backup.repository, "/tmp/xxx")
self.assertFalse(backup.enabled)
def test_group_management(self):
# Add form
response = self.client.get(reverse("admin:weblate_auth_group_add"))
self.assertContains(response, "Automatic group assignment")
# Create group
name = "Test group"
response = self.client.post(
reverse("admin:weblate_auth_group_add"),
{
"name": name,
"language_selection": "1",
"project_selection": "1",
"autogroup_set-TOTAL_FORMS": "0",
"autogroup_set-INITIAL_FORMS": "0",
},
follow=True,
)
self.assertContains(response, name)
# Edit form
group = Group.objects.get(name=name)
url = reverse("admin:weblate_auth_group_change", kwargs={"object_id": group.pk})
response = self.client.get(url)
self.assertContains(response, "Automatic group assignment")
self.assertContains(response, name)
|
import logging
from typing import Any, Dict, Optional
from urllib.parse import urlparse
from directv import DIRECTV, DIRECTVError
import voluptuous as vol
from homeassistant.components.ssdp import ATTR_SSDP_LOCATION, ATTR_UPNP_SERIAL
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_NAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import (
ConfigType,
DiscoveryInfoType,
HomeAssistantType,
)
from .const import CONF_RECEIVER_ID
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
ERROR_CANNOT_CONNECT = "cannot_connect"
ERROR_UNKNOWN = "unknown"
async def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
session = async_get_clientsession(hass)
directv = DIRECTV(data[CONF_HOST], session=session)
device = await directv.update()
return {CONF_RECEIVER_ID: device.info.receiver_id}
class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for DirecTV."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Set up the instance."""
self.discovery_info = {}
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
if user_input is None:
return self._show_setup_form()
try:
info = await validate_input(self.hass, user_input)
except DIRECTVError:
return self._show_setup_form({"base": ERROR_CANNOT_CONNECT})
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason=ERROR_UNKNOWN)
user_input[CONF_RECEIVER_ID] = info[CONF_RECEIVER_ID]
await self.async_set_unique_id(user_input[CONF_RECEIVER_ID])
self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]})
return self.async_create_entry(title=user_input[CONF_HOST], data=user_input)
async def async_step_ssdp(
self, discovery_info: DiscoveryInfoType
) -> Dict[str, Any]:
"""Handle SSDP discovery."""
host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname
receiver_id = None
if discovery_info.get(ATTR_UPNP_SERIAL):
receiver_id = discovery_info[ATTR_UPNP_SERIAL][4:] # strips off RID-
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {"name": host}})
self.discovery_info.update(
{CONF_HOST: host, CONF_NAME: host, CONF_RECEIVER_ID: receiver_id}
)
try:
info = await validate_input(self.hass, self.discovery_info)
except DIRECTVError:
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason=ERROR_UNKNOWN)
self.discovery_info[CONF_RECEIVER_ID] = info[CONF_RECEIVER_ID]
await self.async_set_unique_id(self.discovery_info[CONF_RECEIVER_ID])
self._abort_if_unique_id_configured(
updates={CONF_HOST: self.discovery_info[CONF_HOST]}
)
return await self.async_step_ssdp_confirm()
async def async_step_ssdp_confirm(
self, user_input: ConfigType = None
) -> Dict[str, Any]:
"""Handle a confirmation flow initiated by SSDP."""
if user_input is None:
return self.async_show_form(
step_id="ssdp_confirm",
description_placeholders={"name": self.discovery_info[CONF_NAME]},
errors={},
)
return self.async_create_entry(
title=self.discovery_info[CONF_NAME],
data=self.discovery_info,
)
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
"""Show the setup form to the user."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
errors=errors or {},
)
|
import asyncio
from homeassistant.components import camera, microsoft_face as mf
from homeassistant.components.microsoft_face import (
ATTR_CAMERA_ENTITY,
ATTR_GROUP,
ATTR_PERSON,
DOMAIN,
SERVICE_CREATE_GROUP,
SERVICE_CREATE_PERSON,
SERVICE_DELETE_GROUP,
SERVICE_DELETE_PERSON,
SERVICE_FACE_PERSON,
SERVICE_TRAIN_GROUP,
)
from homeassistant.const import ATTR_NAME
from homeassistant.setup import setup_component
from tests.async_mock import patch
from tests.common import assert_setup_component, get_test_home_assistant, load_fixture
def create_group(hass, name):
"""Create a new person group.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_NAME: name}
hass.services.call(DOMAIN, SERVICE_CREATE_GROUP, data)
def delete_group(hass, name):
"""Delete a person group.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_NAME: name}
hass.services.call(DOMAIN, SERVICE_DELETE_GROUP, data)
def train_group(hass, group):
"""Train a person group.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_GROUP: group}
hass.services.call(DOMAIN, SERVICE_TRAIN_GROUP, data)
def create_person(hass, group, name):
"""Create a person in a group.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_GROUP: group, ATTR_NAME: name}
hass.services.call(DOMAIN, SERVICE_CREATE_PERSON, data)
def delete_person(hass, group, name):
"""Delete a person in a group.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_GROUP: group, ATTR_NAME: name}
hass.services.call(DOMAIN, SERVICE_DELETE_PERSON, data)
def face_person(hass, group, person, camera_entity):
"""Add a new face picture to a person.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_GROUP: group, ATTR_PERSON: person, ATTR_CAMERA_ENTITY: camera_entity}
hass.services.call(DOMAIN, SERVICE_FACE_PERSON, data)
class TestMicrosoftFaceSetup:
"""Test the microsoft face component."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.config = {mf.DOMAIN: {"api_key": "12345678abcdef"}}
self.endpoint_url = f"https://westus.{mf.FACE_API_URL}"
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_setup_component(self, mock_update):
"""Set up component."""
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_setup_component_wrong_api_key(self, mock_update):
"""Set up component without api key."""
with assert_setup_component(0, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, {mf.DOMAIN: {}})
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_setup_component_test_service(self, mock_update):
"""Set up component."""
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
assert self.hass.services.has_service(mf.DOMAIN, "create_group")
assert self.hass.services.has_service(mf.DOMAIN, "delete_group")
assert self.hass.services.has_service(mf.DOMAIN, "train_group")
assert self.hass.services.has_service(mf.DOMAIN, "create_person")
assert self.hass.services.has_service(mf.DOMAIN, "delete_person")
assert self.hass.services.has_service(mf.DOMAIN, "face_person")
def test_setup_component_test_entities(self, aioclient_mock):
"""Set up component."""
aioclient_mock.get(
self.endpoint_url.format("persongroups"),
text=load_fixture("microsoft_face_persongroups.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group1/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group2/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
assert len(aioclient_mock.mock_calls) == 3
entity_group1 = self.hass.states.get("microsoft_face.test_group1")
entity_group2 = self.hass.states.get("microsoft_face.test_group2")
assert entity_group1 is not None
assert entity_group2 is not None
assert (
entity_group1.attributes["Ryan"] == "25985303-c537-4467-b41d-bdb45cd95ca1"
)
assert (
entity_group1.attributes["David"] == "2ae4935b-9659-44c3-977f-61fac20d0538"
)
assert (
entity_group2.attributes["Ryan"] == "25985303-c537-4467-b41d-bdb45cd95ca1"
)
assert (
entity_group2.attributes["David"] == "2ae4935b-9659-44c3-977f-61fac20d0538"
)
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_service_groups(self, mock_update, aioclient_mock):
"""Set up component, test groups services."""
aioclient_mock.put(
self.endpoint_url.format("persongroups/service_group"),
status=200,
text="{}",
)
aioclient_mock.delete(
self.endpoint_url.format("persongroups/service_group"),
status=200,
text="{}",
)
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
create_group(self.hass, "Service Group")
self.hass.block_till_done()
entity = self.hass.states.get("microsoft_face.service_group")
assert entity is not None
assert len(aioclient_mock.mock_calls) == 1
delete_group(self.hass, "Service Group")
self.hass.block_till_done()
entity = self.hass.states.get("microsoft_face.service_group")
assert entity is None
assert len(aioclient_mock.mock_calls) == 2
def test_service_person(self, aioclient_mock):
"""Set up component, test person services."""
aioclient_mock.get(
self.endpoint_url.format("persongroups"),
text=load_fixture("microsoft_face_persongroups.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group1/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group2/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
assert len(aioclient_mock.mock_calls) == 3
aioclient_mock.post(
self.endpoint_url.format("persongroups/test_group1/persons"),
text=load_fixture("microsoft_face_create_person.json"),
)
aioclient_mock.delete(
self.endpoint_url.format(
"persongroups/test_group1/persons/"
"25985303-c537-4467-b41d-bdb45cd95ca1"
),
status=200,
text="{}",
)
create_person(self.hass, "test group1", "Hans")
self.hass.block_till_done()
entity_group1 = self.hass.states.get("microsoft_face.test_group1")
assert len(aioclient_mock.mock_calls) == 4
assert entity_group1 is not None
assert (
entity_group1.attributes["Hans"] == "25985303-c537-4467-b41d-bdb45cd95ca1"
)
delete_person(self.hass, "test group1", "Hans")
self.hass.block_till_done()
entity_group1 = self.hass.states.get("microsoft_face.test_group1")
assert len(aioclient_mock.mock_calls) == 5
assert entity_group1 is not None
assert "Hans" not in entity_group1.attributes
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_service_train(self, mock_update, aioclient_mock):
"""Set up component, test train groups services."""
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
aioclient_mock.post(
self.endpoint_url.format("persongroups/service_group/train"),
status=200,
text="{}",
)
train_group(self.hass, "Service Group")
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
@patch(
"homeassistant.components.camera.async_get_image",
return_value=camera.Image("image/jpeg", b"Test"),
)
def test_service_face(self, camera_mock, aioclient_mock):
"""Set up component, test person face services."""
aioclient_mock.get(
self.endpoint_url.format("persongroups"),
text=load_fixture("microsoft_face_persongroups.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group1/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group2/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
self.config["camera"] = {"platform": "demo"}
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
assert len(aioclient_mock.mock_calls) == 3
aioclient_mock.post(
self.endpoint_url.format(
"persongroups/test_group2/persons/"
"2ae4935b-9659-44c3-977f-61fac20d0538/persistedFaces"
),
status=200,
text="{}",
)
face_person(self.hass, "test_group2", "David", "camera.demo_camera")
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 4
assert aioclient_mock.mock_calls[3][2] == b"Test"
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_service_status_400(self, mock_update, aioclient_mock):
"""Set up component, test groups services with error."""
aioclient_mock.put(
self.endpoint_url.format("persongroups/service_group"),
status=400,
text="{'error': {'message': 'Error'}}",
)
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
create_group(self.hass, "Service Group")
self.hass.block_till_done()
entity = self.hass.states.get("microsoft_face.service_group")
assert entity is None
assert len(aioclient_mock.mock_calls) == 1
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=None,
)
def test_service_status_timeout(self, mock_update, aioclient_mock):
"""Set up component, test groups services with timeout."""
aioclient_mock.put(
self.endpoint_url.format("persongroups/service_group"),
status=400,
exc=asyncio.TimeoutError(),
)
with assert_setup_component(3, mf.DOMAIN):
setup_component(self.hass, mf.DOMAIN, self.config)
create_group(self.hass, "Service Group")
self.hass.block_till_done()
entity = self.hass.states.get("microsoft_face.service_group")
assert entity is None
assert len(aioclient_mock.mock_calls) == 1
|
import asyncio
import contextlib
import platform
import sys
import codecs
import logging
import traceback
from datetime import datetime, timedelta
import aiohttp
import discord
import pkg_resources
from colorama import Fore, Style, init
from pkg_resources import DistributionNotFound
from redbot.core import data_manager
from redbot.core.commands import RedHelpFormatter, HelpSettings
from redbot.core.i18n import (
Translator,
set_contextual_locale,
set_contextual_regional_format,
set_contextual_locales_from_guild,
)
from .utils import AsyncIter
from .. import __version__ as red_version, version_info as red_version_info, VersionInfo
from . import commands
from .config import get_latest_confs
from .utils._internal_utils import (
fuzzy_command_search,
format_fuzzy_results,
expected_version,
fetch_latest_red_version_info,
)
from .utils.chat_formatting import inline, bordered, format_perms_list, humanize_timedelta
from rich.table import Table
from rich.columns import Columns
from rich.panel import Panel
from rich.text import Text
log = logging.getLogger("red")
init()
INTRO = r"""[red]
______ _ ______ _ _ ______ _
| ___ \ | | | _ (_) | | | ___ \ | |
| |_/ /___ __| | ______ | | | |_ ___ ___ ___ _ __ __| | | |_/ / ___ | |_
| // _ \/ _` | |______| | | | | / __|/ __/ _ \| '__/ _` | | ___ \/ _ \| __|
| |\ \ __/ (_| | | |/ /| \__ \ (_| (_) | | | (_| | | |_/ / (_) | |_
\_| \_\___|\__,_| |___/ |_|___/\___\___/|_| \__,_| \____/ \___/ \__|
"""
_ = Translator(__name__, __file__)
def init_events(bot, cli_flags):
@bot.event
async def on_connect():
if bot._uptime is None:
log.info("Connected to Discord. Getting ready...")
@bot.event
async def on_ready():
if bot._uptime is not None:
return
bot._uptime = datetime.utcnow()
guilds = len(bot.guilds)
users = len(set([m for m in bot.get_all_members()]))
app_info = await bot.application_info()
if app_info.team:
if bot._use_team_features:
bot.owner_ids.update(m.id for m in app_info.team.members)
elif bot._owner_id_overwrite is None:
bot.owner_ids.add(app_info.owner.id)
bot._app_owners_fetched = True
try:
invite_url = discord.utils.oauth_url(app_info.id)
except:
invite_url = "Could not fetch invite url"
prefixes = cli_flags.prefix or (await bot._config.prefix())
lang = await bot._config.locale()
red_pkg = pkg_resources.get_distribution("Red-DiscordBot")
dpy_version = discord.__version__
table_general_info = Table(show_edge=False, show_header=False)
table_general_info.add_row("Prefixes", ", ".join(prefixes))
table_general_info.add_row("Language", lang)
table_general_info.add_row("Red version", red_version)
table_general_info.add_row("Discord.py version", dpy_version)
table_general_info.add_row("Storage type", data_manager.storage_type())
table_counts = Table(show_edge=False, show_header=False)
# String conversion is needed as Rich doesn't deal with ints
table_counts.add_row("Shards", str(bot.shard_count))
table_counts.add_row("Servers", str(guilds))
if bot.intents.members: # Lets avoid 0 Unique Users
table_counts.add_row("Unique Users", str(users))
outdated_red_message = ""
rich_outdated_message = ""
with contextlib.suppress(aiohttp.ClientError, asyncio.TimeoutError):
pypi_version, py_version_req = await fetch_latest_red_version_info()
outdated = pypi_version and pypi_version > red_version_info
if outdated:
outdated_red_message = _(
"Your Red instance is out of date! {} is the current "
"version, however you are using {}!"
).format(pypi_version, red_version)
rich_outdated_message = (
f"[red]Outdated version![/red]\n"
f"[red]!!![/red]Version {pypi_version} is available, "
f"but you're using {red_version}[red]!!![/red]"
)
current_python = platform.python_version()
extra_update = _(
"\n\nWhile the following command should work in most scenarios as it is "
"based on your current OS, environment, and Python version, "
"**we highly recommend you to read the update docs at <{docs}> and "
"make sure there is nothing else that "
"needs to be done during the update.**"
).format(docs="https://docs.discord.red/en/stable/update_red.html")
if expected_version(current_python, py_version_req):
installed_extras = []
for extra, reqs in red_pkg._dep_map.items():
if extra is None or extra in {"dev", "all"}:
continue
try:
pkg_resources.require(req.name for req in reqs)
except pkg_resources.DistributionNotFound:
pass
else:
installed_extras.append(extra)
if installed_extras:
package_extras = f"[{','.join(installed_extras)}]"
else:
package_extras = ""
extra_update += _(
"\n\nTo update your bot, first shutdown your "
"bot then open a window of {console} (Not as admin) and "
"run the following:\n\n"
).format(
console=_("Command Prompt")
if platform.system() == "Windows"
else _("Terminal")
)
extra_update += (
'```"{python}" -m pip install -U Red-DiscordBot{package_extras}```'.format(
python=sys.executable, package_extras=package_extras
)
)
else:
extra_update += _(
"\n\nYou have Python `{py_version}` and this update "
"requires `{req_py}`; you cannot simply run the update command.\n\n"
"You will need to follow the update instructions in our docs above, "
"if you still need help updating after following the docs go to our "
"#support channel in <https://discord.gg/red>"
).format(py_version=current_python, req_py=py_version_req)
outdated_red_message += extra_update
bot._rich_console.print(INTRO)
if guilds:
bot._rich_console.print(
Columns(
[Panel(table_general_info, title=str(bot.user.name)), Panel(table_counts)],
equal=True,
align="center",
)
)
else:
bot._rich_console.print(Columns([Panel(table_general_info, title=str(bot.user.name))]))
bot._rich_console.print(
"Loaded {} cogs with {} commands".format(len(bot.cogs), len(bot.commands))
)
if invite_url:
bot._rich_console.print(
f"\nInvite URL: {Text(invite_url, style=f'link {invite_url}')}"
)
# We generally shouldn't care if the client supports it or not as Rich deals with it.
if not guilds:
bot._rich_console.print(
f"Looking for a quick guide on setting up Red? Checkout {Text('https://start.discord.red', style='link https://start.discord.red}')}"
)
if rich_outdated_message:
bot._rich_console.print(rich_outdated_message)
if not bot.owner_ids:
# we could possibly exit here in future
log.warning("Bot doesn't have any owner set!")
bot._color = discord.Colour(await bot._config.color())
bot._red_ready.set()
if outdated_red_message:
await bot.send_to_owners(outdated_red_message)
@bot.event
async def on_command_completion(ctx: commands.Context):
await bot._delete_delay(ctx)
@bot.event
async def on_command_error(ctx, error, unhandled_by_cog=False):
if not unhandled_by_cog:
if hasattr(ctx.command, "on_error"):
return
if ctx.cog:
if commands.Cog._get_overridden_method(ctx.cog.cog_command_error) is not None:
return
if not isinstance(error, commands.CommandNotFound):
asyncio.create_task(bot._delete_delay(ctx))
if isinstance(error, commands.MissingRequiredArgument):
await ctx.send_help()
elif isinstance(error, commands.ArgParserFailure):
msg = _("`{user_input}` is not a valid value for `{command}`").format(
user_input=error.user_input, command=error.cmd
)
if error.custom_help_msg:
msg += f"\n{error.custom_help_msg}"
await ctx.send(msg)
if error.send_cmd_help:
await ctx.send_help()
elif isinstance(error, commands.ConversionFailure):
if error.args:
await ctx.send(error.args[0])
else:
await ctx.send_help()
elif isinstance(error, commands.UserInputError):
await ctx.send_help()
elif isinstance(error, commands.DisabledCommand):
disabled_message = await bot._config.disabled_command_msg()
if disabled_message:
await ctx.send(disabled_message.replace("{command}", ctx.invoked_with))
elif isinstance(error, commands.CommandInvokeError):
log.exception(
"Exception in command '{}'".format(ctx.command.qualified_name),
exc_info=error.original,
)
message = _(
"Error in command '{command}'. Check your console or logs for details."
).format(command=ctx.command.qualified_name)
exception_log = "Exception in command '{}'\n" "".format(ctx.command.qualified_name)
exception_log += "".join(
traceback.format_exception(type(error), error, error.__traceback__)
)
bot._last_exception = exception_log
await ctx.send(inline(message))
elif isinstance(error, commands.CommandNotFound):
help_settings = await HelpSettings.from_context(ctx)
fuzzy_commands = await fuzzy_command_search(
ctx,
commands=RedHelpFormatter.help_filter_func(
ctx, bot.walk_commands(), help_settings=help_settings
),
)
if not fuzzy_commands:
pass
elif await ctx.embed_requested():
await ctx.send(embed=await format_fuzzy_results(ctx, fuzzy_commands, embed=True))
else:
await ctx.send(await format_fuzzy_results(ctx, fuzzy_commands, embed=False))
elif isinstance(error, commands.BotMissingPermissions):
if bin(error.missing.value).count("1") == 1: # Only one perm missing
msg = _("I require the {permission} permission to execute that command.").format(
permission=format_perms_list(error.missing)
)
else:
msg = _("I require {permission_list} permissions to execute that command.").format(
permission_list=format_perms_list(error.missing)
)
await ctx.send(msg)
elif isinstance(error, commands.UserFeedbackCheckFailure):
if error.message:
await ctx.send(error.message)
elif isinstance(error, commands.NoPrivateMessage):
await ctx.send(_("That command is not available in DMs."))
elif isinstance(error, commands.PrivateMessageOnly):
await ctx.send(_("That command is only available in DMs."))
elif isinstance(error, commands.CheckFailure):
pass
elif isinstance(error, commands.CommandOnCooldown):
if bot._bypass_cooldowns and ctx.author.id in bot.owner_ids:
ctx.command.reset_cooldown(ctx)
new_ctx = await bot.get_context(ctx.message)
await bot.invoke(new_ctx)
return
if delay := humanize_timedelta(seconds=error.retry_after):
msg = _("This command is on cooldown. Try again in {delay}.").format(delay=delay)
else:
msg = _("This command is on cooldown. Try again in 1 second.")
await ctx.send(msg, delete_after=error.retry_after)
elif isinstance(error, commands.MaxConcurrencyReached):
if error.per is commands.BucketType.default:
if error.number > 1:
msg = _(
"Too many people using this command."
" It can only be used {number} times concurrently."
).format(number=error.number)
else:
msg = _(
"Too many people using this command."
" It can only be used once concurrently."
)
elif error.per in (commands.BucketType.user, commands.BucketType.member):
if error.number > 1:
msg = _(
"That command is still completing,"
" it can only be used {number} times per {type} concurrently."
).format(number=error.number, type=error.per.name)
else:
msg = _(
"That command is still completing,"
" it can only be used once per {type} concurrently."
).format(type=error.per.name)
else:
if error.number > 1:
msg = _(
"Too many people using this command."
" It can only be used {number} times per {type} concurrently."
).format(number=error.number, type=error.per.name)
else:
msg = _(
"Too many people using this command."
" It can only be used once per {type} concurrently."
).format(type=error.per.name)
await ctx.send(msg)
else:
log.exception(type(error).__name__, exc_info=error)
@bot.event
async def on_message(message):
await set_contextual_locales_from_guild(bot, message.guild)
await bot.process_commands(message)
discord_now = message.created_at
if (
not bot._checked_time_accuracy
or (discord_now - timedelta(minutes=60)) > bot._checked_time_accuracy
):
system_now = datetime.utcnow()
diff = abs((discord_now - system_now).total_seconds())
if diff > 60:
log.warning(
"Detected significant difference (%d seconds) in system clock to discord's "
"clock. Any time sensitive code may fail.",
diff,
)
bot._checked_time_accuracy = discord_now
@bot.event
async def on_command_add(command: commands.Command):
disabled_commands = await bot._config.disabled_commands()
if command.qualified_name in disabled_commands:
command.enabled = False
guild_data = await bot._config.all_guilds()
async for guild_id, data in AsyncIter(guild_data.items(), steps=100):
disabled_commands = data.get("disabled_commands", [])
if command.qualified_name in disabled_commands:
command.disable_in(discord.Object(id=guild_id))
async def _guild_added(guild: discord.Guild):
disabled_commands = await bot._config.guild(guild).disabled_commands()
for command_name in disabled_commands:
command_obj = bot.get_command(command_name)
if command_obj is not None:
command_obj.disable_in(guild)
@bot.event
async def on_guild_join(guild: discord.Guild):
await _guild_added(guild)
@bot.event
async def on_guild_available(guild: discord.Guild):
# We need to check guild-disabled commands here since some cogs
# are loaded prior to `on_ready`.
await _guild_added(guild)
@bot.event
async def on_guild_leave(guild: discord.Guild):
# Clean up any unneeded checks
disabled_commands = await bot._config.guild(guild).disabled_commands()
for command_name in disabled_commands:
command_obj = bot.get_command(command_name)
if command_obj is not None:
command_obj.enable_in(guild)
@bot.event
async def on_cog_add(cog: commands.Cog):
confs = get_latest_confs()
for c in confs:
uuid = c.unique_identifier
group_data = c.custom_groups
await bot._config.custom("CUSTOM_GROUPS", c.cog_name, uuid).set(group_data)
def _get_startup_screen_specs():
"""Get specs for displaying the startup screen on stdout.
This is so we don't get encoding errors when trying to print unicode
emojis to stdout (particularly with Windows Command Prompt).
Returns
-------
`tuple`
Tuple in the form (`str`, `str`, `bool`) containing (in order) the
on symbol, off symbol and whether or not the border should be pure ascii.
"""
encoder = codecs.getencoder(sys.stdout.encoding)
check_mark = "\N{SQUARE ROOT}"
try:
encoder(check_mark)
except UnicodeEncodeError:
on_symbol = "[X]"
off_symbol = "[ ]"
else:
on_symbol = check_mark
off_symbol = "X"
try:
encoder("┌┐└┘─│") # border symbols
except UnicodeEncodeError:
ascii_border = True
else:
ascii_border = False
return on_symbol, off_symbol, ascii_border
|
from contextlib import suppress
import numpy as np
import pandas as pd
import pytest
import xarray as xr
from xarray.coding import variables
from xarray.conventions import decode_cf_variable, encode_cf_variable
from . import assert_equal, assert_identical, requires_dask
with suppress(ImportError):
import dask.array as da
def test_CFMaskCoder_decode():
original = xr.Variable(("x",), [0, -1, 1], {"_FillValue": -1})
expected = xr.Variable(("x",), [0, np.nan, 1])
coder = variables.CFMaskCoder()
encoded = coder.decode(original)
assert_identical(expected, encoded)
encoding_with_dtype = {
"dtype": np.dtype("float64"),
"_FillValue": np.float32(1e20),
"missing_value": np.float64(1e20),
}
encoding_without_dtype = {
"_FillValue": np.float32(1e20),
"missing_value": np.float64(1e20),
}
CFMASKCODER_ENCODE_DTYPE_CONFLICT_TESTS = {
"numeric-with-dtype": ([0.0, -1.0, 1.0], encoding_with_dtype),
"numeric-without-dtype": ([0.0, -1.0, 1.0], encoding_without_dtype),
"times-with-dtype": (pd.date_range("2000", periods=3), encoding_with_dtype),
}
@pytest.mark.parametrize(
("data", "encoding"),
CFMASKCODER_ENCODE_DTYPE_CONFLICT_TESTS.values(),
ids=list(CFMASKCODER_ENCODE_DTYPE_CONFLICT_TESTS.keys()),
)
def test_CFMaskCoder_encode_missing_fill_values_conflict(data, encoding):
original = xr.Variable(("x",), data, encoding=encoding)
encoded = encode_cf_variable(original)
assert encoded.dtype == encoded.attrs["missing_value"].dtype
assert encoded.dtype == encoded.attrs["_FillValue"].dtype
with pytest.warns(variables.SerializationWarning):
roundtripped = decode_cf_variable("foo", encoded)
assert_identical(roundtripped, original)
def test_CFMaskCoder_missing_value():
expected = xr.DataArray(
np.array([[26915, 27755, -9999, 27705], [25595, -9999, 28315, -9999]]),
dims=["npts", "ntimes"],
name="tmpk",
)
expected.attrs["missing_value"] = -9999
decoded = xr.decode_cf(expected.to_dataset())
encoded, _ = xr.conventions.cf_encoder(decoded, decoded.attrs)
assert_equal(encoded["tmpk"], expected.variable)
decoded.tmpk.encoding["_FillValue"] = -9940
with pytest.raises(ValueError):
encoded, _ = xr.conventions.cf_encoder(decoded, decoded.attrs)
@requires_dask
def test_CFMaskCoder_decode_dask():
original = xr.Variable(("x",), [0, -1, 1], {"_FillValue": -1}).chunk()
expected = xr.Variable(("x",), [0, np.nan, 1])
coder = variables.CFMaskCoder()
encoded = coder.decode(original)
assert isinstance(encoded.data, da.Array)
assert_identical(expected, encoded)
# TODO(shoyer): port other fill-value tests
# TODO(shoyer): parameterize when we have more coders
def test_coder_roundtrip():
original = xr.Variable(("x",), [0.0, np.nan, 1.0])
coder = variables.CFMaskCoder()
roundtripped = coder.decode(coder.encode(original))
assert_identical(original, roundtripped)
@pytest.mark.parametrize("dtype", "u1 u2 i1 i2 f2 f4".split())
def test_scaling_converts_to_float32(dtype):
original = xr.Variable(
("x",), np.arange(10, dtype=dtype), encoding=dict(scale_factor=10)
)
coder = variables.CFScaleOffsetCoder()
encoded = coder.encode(original)
assert encoded.dtype == np.float32
roundtripped = coder.decode(encoded)
assert_identical(original, roundtripped)
assert roundtripped.dtype == np.float32
|
import voluptuous as vol
from homeassistant.auth.providers import homeassistant as auth_ha
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api import decorators
from homeassistant.exceptions import Unauthorized
async def async_setup(hass):
"""Enable the Home Assistant views."""
hass.components.websocket_api.async_register_command(websocket_create)
hass.components.websocket_api.async_register_command(websocket_delete)
hass.components.websocket_api.async_register_command(websocket_change_password)
hass.components.websocket_api.async_register_command(
websocket_admin_change_password
)
return True
@decorators.websocket_command(
{
vol.Required("type"): "config/auth_provider/homeassistant/create",
vol.Required("user_id"): str,
vol.Required("username"): str,
vol.Required("password"): str,
}
)
@websocket_api.require_admin
@websocket_api.async_response
async def websocket_create(hass, connection, msg):
"""Create credentials and attach to a user."""
provider = auth_ha.async_get_provider(hass)
user = await hass.auth.async_get_user(msg["user_id"])
if user is None:
connection.send_error(msg["id"], "not_found", "User not found")
return
if user.system_generated:
connection.send_error(
msg["id"],
"system_generated",
"Cannot add credentials to a system generated user.",
)
return
try:
await provider.async_add_auth(msg["username"], msg["password"])
except auth_ha.InvalidUser:
connection.send_error(msg["id"], "username_exists", "Username already exists")
return
credentials = await provider.async_get_or_create_credentials(
{"username": msg["username"]}
)
await hass.auth.async_link_user(user, credentials)
connection.send_result(msg["id"])
@decorators.websocket_command(
{
vol.Required("type"): "config/auth_provider/homeassistant/delete",
vol.Required("username"): str,
}
)
@websocket_api.require_admin
@websocket_api.async_response
async def websocket_delete(hass, connection, msg):
"""Delete username and related credential."""
provider = auth_ha.async_get_provider(hass)
credentials = await provider.async_get_or_create_credentials(
{"username": msg["username"]}
)
# if not new, an existing credential exists.
# Removing the credential will also remove the auth.
if not credentials.is_new:
await hass.auth.async_remove_credentials(credentials)
connection.send_result(msg["id"])
return
try:
await provider.async_remove_auth(msg["username"])
except auth_ha.InvalidUser:
connection.send_error(
msg["id"], "auth_not_found", "Given username was not found."
)
return
connection.send_result(msg["id"])
@decorators.websocket_command(
{
vol.Required("type"): "config/auth_provider/homeassistant/change_password",
vol.Required("current_password"): str,
vol.Required("new_password"): str,
}
)
@websocket_api.async_response
async def websocket_change_password(hass, connection, msg):
"""Change current user password."""
user = connection.user
if user is None:
connection.send_error(msg["id"], "user_not_found", "User not found")
return
provider = auth_ha.async_get_provider(hass)
username = None
for credential in user.credentials:
if credential.auth_provider_type == provider.type:
username = credential.data["username"]
break
if username is None:
connection.send_error(
msg["id"], "credentials_not_found", "Credentials not found"
)
return
try:
await provider.async_validate_login(username, msg["current_password"])
except auth_ha.InvalidAuth:
connection.send_error(msg["id"], "invalid_password", "Invalid password")
return
await provider.async_change_password(username, msg["new_password"])
connection.send_result(msg["id"])
@decorators.websocket_command(
{
vol.Required(
"type"
): "config/auth_provider/homeassistant/admin_change_password",
vol.Required("user_id"): str,
vol.Required("password"): str,
}
)
@decorators.require_admin
@decorators.async_response
async def websocket_admin_change_password(hass, connection, msg):
"""Change password of any user."""
if not connection.user.is_owner:
raise Unauthorized(context=connection.context(msg))
user = await hass.auth.async_get_user(msg["user_id"])
if user is None:
connection.send_error(msg["id"], "user_not_found", "User not found")
return
provider = auth_ha.async_get_provider(hass)
username = None
for credential in user.credentials:
if credential.auth_provider_type == provider.type:
username = credential.data["username"]
break
if username is None:
connection.send_error(
msg["id"], "credentials_not_found", "Credentials not found"
)
return
try:
await provider.async_change_password(username, msg["password"])
connection.send_result(msg["id"])
except auth_ha.InvalidUser:
connection.send_error(
msg["id"], "credentials_not_found", "Credentials not found"
)
return
|
from copy import deepcopy
import json
import unittest
import requests_mock
from homeassistant import setup
import homeassistant.components.vultr as vultr
from tests.async_mock import patch
from tests.common import get_test_home_assistant, load_fixture
VALID_CONFIG = {"vultr": {"api_key": "ABCDEFG1234567"}}
class TestVultr(unittest.TestCase):
"""Tests the Vultr component."""
def setUp(self):
"""Initialize values for this test case class."""
self.hass = get_test_home_assistant()
self.config = VALID_CONFIG
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop everything that we started."""
self.hass.stop()
@requests_mock.Mocker()
def test_setup(self, mock):
"""Test successful setup."""
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
):
response = vultr.setup(self.hass, self.config)
assert response
def test_setup_no_api_key(self):
"""Test failed setup with missing API Key."""
conf = deepcopy(self.config)
del conf["vultr"]["api_key"]
assert not setup.setup_component(self.hass, vultr.DOMAIN, conf)
|
import math
import struct
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.power', 'getBattery', out_sig='d')
def get_battery(self):
"""
Get mouse's battery level
"""
self.logger.debug("DBus call get_battery")
driver_path = self.get_driver_path('charge_level')
with open(driver_path, 'r') as driver_file:
battery_255 = float(driver_file.read().strip())
if battery_255 < 0:
return -1.0
battery_100 = (battery_255 / 255) * 100
return battery_100
@endpoint('razer.device.power', 'isCharging', out_sig='b')
def is_charging(self):
"""
Get charging status
"""
self.logger.debug("DBus call is_charging")
driver_path = self.get_driver_path('charge_status')
with open(driver_path, 'r') as driver_file:
return bool(int(driver_file.read().strip()))
@endpoint('razer.device.power', 'setIdleTime', in_sig='q')
def set_idle_time(self, idle_time):
"""
Set the idle time of the mouse in seconds
:param idle_time: Idle time in seconds (unsigned short)
:type idle_time: int
"""
self.logger.debug("DBus call set_idle_time")
driver_path = self.get_driver_path('device_idle_time')
with open(driver_path, 'w') as driver_file:
driver_file.write(str(idle_time))
@endpoint('razer.device.power', 'getIdleTime', out_sig='q')
def get_idle_time(self):
"""
Get the idle time of the mouse in seconds
:return: Idle time in seconds (unsigned short)
:rtype: int
"""
self.logger.debug("DBus call get_idle_time")
driver_path = self.get_driver_path('device_idle_time')
with open(driver_path, 'r') as driver_file:
result = driver_file.read()
result = int(result.strip())
return result
@endpoint('razer.device.power', 'setLowBatteryThreshold', in_sig='y')
def set_low_battery_threshold(self, threshold):
"""
Set the low battery threshold as a percentage
:param threshold: Battery threshold as a percentage
:type threshold: int
"""
self.logger.debug("DBus call set_low_battery_threshold")
driver_path = self.get_driver_path('charge_low_threshold')
threshold = math.floor((threshold / 100) * 255)
with open(driver_path, 'w') as driver_file:
driver_file.write(str(threshold))
@endpoint('razer.device.power', 'getLowBatteryThreshold', out_sig='y')
def get_low_battery_threshold(self):
"""
Get the low battery threshold as a percentage
:return: Battery threshold as a percentage
:rtype: int
"""
self.logger.debug("DBus call get_low_battery_threshold")
driver_path = self.get_driver_path('charge_low_threshold')
with open(driver_path, 'r') as driver_file:
result = driver_file.read()
result = int(result.strip())
return round((result / 255) * 100)
@endpoint('razer.device.lighting.power', 'setChargeEffect', in_sig='y')
def set_charge_effect(self, charge_effect):
"""
Set the charging effect.
If 0x00 then it will use the current mouse's effect
If 0x01 it will use the charge colour
:param charge_effect: Charge effect
:type charge_effect: int
:return:
"""
self.logger.debug("DBus call set_charge_effect")
driver_path = self.get_driver_path('charge_effect')
with open(driver_path, 'wb') as driver_file:
driver_file.write(bytes([charge_effect]))
@endpoint('razer.device.lighting.power', 'setChargeColour', in_sig='yyy')
def set_charge_colour(self, red, green, blue):
"""
Set the charge colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_charge_colour")
driver_path = self.get_driver_path('charge_colour')
payload = bytes([red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.dpi', 'setDPI', in_sig='qq')
def set_dpi_xy(self, dpi_x, dpi_y):
"""
Set the DPI on the mouse, Takes in 4 bytes big-endian
:param dpi_x: X DPI
:type dpi_x: int
:param dpi_y: Y DPI
:type dpi_x: int
"""
self.logger.debug("DBus call set_dpi_xy")
driver_path = self.get_driver_path('dpi')
if self._testing:
with open(driver_path, 'w') as driver_file:
if dpi_y == -1:
driver_file.write("{}".format(dpi_x))
else:
driver_file.write("{}:{}".format(dpi_x, dpi_y))
return
# If the application requests just one value to be written
if dpi_y == -1:
dpi_bytes = struct.pack('>H', dpi_x)
else:
dpi_bytes = struct.pack('>HH', dpi_x, dpi_y)
self.dpi[0] = dpi_x
self.dpi[1] = dpi_y
self.set_persistence(None, "dpi_x", dpi_x)
self.set_persistence(None, "dpi_y", dpi_y)
# constrain DPI to maximum
if hasattr(self, 'DPI_MAX'):
if self.dpi[0] > self.DPI_MAX:
self.dpi[0] = self.DPI_MAX
if self.dpi[1] > self.DPI_MAX:
self.dpi[1] = self.DPI_MAX
with open(driver_path, 'wb') as driver_file:
driver_file.write(dpi_bytes)
@endpoint('razer.device.dpi', 'getDPI', out_sig='ai')
def get_dpi_xy(self):
"""
get the DPI on the mouse
:return: List of X, Y DPI
:rtype: list of int
"""
self.logger.debug("DBus call get_dpi_xy")
driver_path = self.get_driver_path('dpi')
# try retrieving DPI from the hardware.
# if we can't (e.g. because the mouse has been disconnected)
# return the value in local storage.
try:
with open(driver_path, 'r') as driver_file:
result = driver_file.read()
dpi = [int(dpi) for dpi in result.strip().split(':')]
except FileNotFoundError:
return self.dpi
return dpi
@endpoint('razer.device.dpi', 'setDPIStages', in_sig='ya(qq)')
def set_dpi_stages(self, active_stage, dpi_stages):
"""
Set the DPI on the mouse, Takes in pairs of 2 bytes big-endian
:param active_stage: DPI stage to enable
:param dpi_stages: pairs of dpi X and dpi Y for each stage
:type dpi_stages: list of (int, int)
"""
self.logger.debug("DBus call set_dpi_stages")
driver_path = self.get_driver_path('dpi_stages')
dpi_bytes = struct.pack('B', active_stage)
for dpi_x, dpi_y in dpi_stages:
dpi_bytes += struct.pack('>HH', dpi_x, dpi_y)
with open(driver_path, 'wb') as driver_file:
driver_file.write(dpi_bytes)
@endpoint('razer.device.dpi', 'getDPIStages', out_sig='(ya(qq))')
def get_dpi_stages(self):
"""
get the DPI stages on the mouse
:return: List of X, Y DPI
:rtype: (int, list of (int, int))
"""
self.logger.debug("DBus call get_dpi_stages")
driver_path = self.get_driver_path('dpi_stages')
dpi_stages = []
with open(driver_path, 'rb') as driver_file:
result = driver_file.read()
(active_stage,) = struct.unpack('B', result[:1])
result = result[1:]
while len(result) >= 4:
(dpi_x, dpi_y) = struct.unpack('>HH', result[:4])
dpi_stages.append((dpi_x, dpi_y))
result = result[4:]
return (active_stage, dpi_stages)
@endpoint('razer.device.dpi', 'maxDPI', out_sig='i')
def max_dpi(self):
self.logger.debug("DBus call max_dpi")
if hasattr(self, 'DPI_MAX'):
return self.DPI_MAX
else:
return 500
@endpoint('razer.device.dpi', 'availableDPI', out_sig='ai')
def available_dpi(self):
self.logger.debug("DBus call available_dpi")
if hasattr(self, 'AVAILABLE_DPI'):
return self.AVAILABLE_DPI
return []
@endpoint('razer.device.misc', 'setPollRate', in_sig='q')
def set_poll_rate(self, rate):
"""
Set the DPI on the mouse, Takes in 4 bytes big-endian
:param rate: Poll rate
:type rate: int
"""
self.logger.debug("DBus call set_poll_rate")
if rate in (1000, 500, 125):
driver_path = self.get_driver_path('poll_rate')
# remember poll rate
self.poll_rate = rate
with open(driver_path, 'w') as driver_file:
driver_file.write(str(rate))
else:
self.logger.error("Poll rate %d is invalid", rate)
@endpoint('razer.device.misc', 'getPollRate', out_sig='i')
def get_poll_rate(self):
"""
Get the polling rate from the device
:return: Poll rate
:rtype: int
"""
self.logger.debug("DBus call get_poll_rate")
return int(self.poll_rate)
|
from kombu.utils.objects import cached_property
class test_cached_property:
def test_deleting(self):
class X:
xx = False
@cached_property
def foo(self):
return 42
@foo.deleter # noqa
def foo(self, value):
self.xx = value
x = X()
del(x.foo)
assert not x.xx
x.__dict__['foo'] = 'here'
del(x.foo)
assert x.xx == 'here'
def test_when_access_from_class(self):
class X:
xx = None
@cached_property
def foo(self):
return 42
@foo.setter # noqa
def foo(self, value):
self.xx = 10
desc = X.__dict__['foo']
assert X.foo is desc
assert desc.__get__(None) is desc
assert desc.__set__(None, 1) is desc
assert desc.__delete__(None) is desc
assert desc.setter(1)
x = X()
x.foo = 30
assert x.xx == 10
del(x.foo)
|
from pyaehw4a1.aehw4a1 import AehW4a1
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
async def _async_has_devices(hass):
"""Return if there are devices that can be discovered."""
aehw4a1_ip_addresses = await AehW4a1().discovery()
return len(aehw4a1_ip_addresses) > 0
config_entry_flow.register_discovery_flow(
DOMAIN, "Hisense AEH-W4A1", _async_has_devices, config_entries.CONN_CLASS_LOCAL_POLL
)
|
from __future__ import unicode_literals
import logging
import unittest
import smart_open
GZIP_MAGIC = b'\x1f\x8b'
BASE_URL = ('https://raw.githubusercontent.com/RaRe-Technologies/smart_open/'
'master/smart_open/tests/test_data/')
class ReadTest(unittest.TestCase):
def test_read_text(self):
url = BASE_URL + 'crime-and-punishment.txt'
with smart_open.smart_open(url, encoding='utf-8') as fin:
text = fin.read()
self.assertTrue(text.startswith('В начале июля, в чрезвычайно жаркое время,'))
self.assertTrue(text.endswith('улизнуть, чтобы никто не видал.\n'))
def test_read_binary(self):
url = BASE_URL + 'crime-and-punishment.txt'
with smart_open.smart_open(url, 'rb') as fin:
text = fin.read()
self.assertTrue(text.startswith('В начале июля, в чрезвычайно'.encode('utf-8')))
self.assertTrue(text.endswith('улизнуть, чтобы никто не видал.\n'.encode('utf-8')))
def test_read_gzip_text(self):
url = BASE_URL + 'crime-and-punishment.txt.gz'
with smart_open.smart_open(url, encoding='utf-8') as fin:
text = fin.read()
self.assertTrue(text.startswith('В начале июля, в чрезвычайно жаркое время,'))
self.assertTrue(text.endswith('улизнуть, чтобы никто не видал.\n'))
def test_read_gzip_binary(self):
url = BASE_URL + 'crime-and-punishment.txt.gz'
with smart_open.smart_open(url, 'rb', ignore_extension=True) as fin:
binary = fin.read()
self.assertTrue(binary.startswith(GZIP_MAGIC))
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
from aiohttp import ClientError
from homeassistant.components.nightscout.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import CONF_URL
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.nightscout import init_integration
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
async def test_async_setup_raises_entry_not_ready(hass):
"""Test that it throws ConfigEntryNotReady when exception occurs during setup."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_URL: "https://some.url:1234"},
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.nightscout.NightscoutAPI.get_server_status",
side_effect=ClientError(),
):
await hass.config_entries.async_setup(config_entry.entry_id)
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
|
import datetime
class NeverExpires(object):
def expired(self):
return False
class Timer(object):
"""
A simple timer that will indicate when an expiration time has passed.
"""
def __init__(self, expiration):
'Create a timer that expires at `expiration` (UTC datetime)'
self.expiration = expiration
@classmethod
def after(cls, elapsed):
"""
Return a timer that will expire after `elapsed` passes.
"""
return cls(datetime.datetime.utcnow() + elapsed)
def expired(self):
return datetime.datetime.utcnow() >= self.expiration
class LockTimeout(Exception):
'An exception when a lock could not be acquired before a timeout period'
class LockChecker(object):
"""
Keep track of the time and detect if a timeout has expired
"""
def __init__(self, session_id, timeout):
self.session_id = session_id
if timeout:
self.timer = Timer.after(timeout)
else:
self.timer = NeverExpires()
def expired(self):
if self.timer.expired():
raise LockTimeout(
'Timeout acquiring lock for %(session_id)s' % vars(self))
return False
|
import logging
from envoy_reader.envoy_reader import EnvoyReader
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_IP_ADDRESS,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
ENERGY_WATT_HOUR,
POWER_WATT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SENSORS = {
"production": ("Envoy Current Energy Production", POWER_WATT),
"daily_production": ("Envoy Today's Energy Production", ENERGY_WATT_HOUR),
"seven_days_production": (
"Envoy Last Seven Days Energy Production",
ENERGY_WATT_HOUR,
),
"lifetime_production": ("Envoy Lifetime Energy Production", ENERGY_WATT_HOUR),
"consumption": ("Envoy Current Energy Consumption", POWER_WATT),
"daily_consumption": ("Envoy Today's Energy Consumption", ENERGY_WATT_HOUR),
"seven_days_consumption": (
"Envoy Last Seven Days Energy Consumption",
ENERGY_WATT_HOUR,
),
"lifetime_consumption": ("Envoy Lifetime Energy Consumption", ENERGY_WATT_HOUR),
"inverters": ("Envoy Inverter", POWER_WATT),
}
ICON = "mdi:flash"
CONST_DEFAULT_HOST = "envoy"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_IP_ADDRESS, default=CONST_DEFAULT_HOST): cv.string,
vol.Optional(CONF_USERNAME, default="envoy"): cv.string,
vol.Optional(CONF_PASSWORD, default=""): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)): vol.All(
cv.ensure_list, [vol.In(list(SENSORS))]
),
vol.Optional(CONF_NAME, default=""): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Enphase Envoy sensor."""
ip_address = config[CONF_IP_ADDRESS]
monitored_conditions = config[CONF_MONITORED_CONDITIONS]
name = config[CONF_NAME]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
envoy_reader = EnvoyReader(ip_address, username, password)
entities = []
# Iterate through the list of sensors
for condition in monitored_conditions:
if condition == "inverters":
try:
inverters = await envoy_reader.inverters_production()
except requests.exceptions.HTTPError:
_LOGGER.warning(
"Authentication for Inverter data failed during setup: %s",
ip_address,
)
continue
if isinstance(inverters, dict):
for inverter in inverters:
entities.append(
Envoy(
envoy_reader,
condition,
f"{name}{SENSORS[condition][0]} {inverter}",
SENSORS[condition][1],
)
)
else:
entities.append(
Envoy(
envoy_reader,
condition,
f"{name}{SENSORS[condition][0]}",
SENSORS[condition][1],
)
)
async_add_entities(entities)
class Envoy(Entity):
"""Implementation of the Enphase Envoy sensors."""
def __init__(self, envoy_reader, sensor_type, name, unit):
"""Initialize the sensor."""
self._envoy_reader = envoy_reader
self._type = sensor_type
self._name = name
self._unit_of_measurement = unit
self._state = None
self._last_reported = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._type == "inverters":
return {"last_reported": self._last_reported}
return None
async def async_update(self):
"""Get the energy production data from the Enphase Envoy."""
if self._type != "inverters":
_state = await getattr(self._envoy_reader, self._type)()
if isinstance(_state, int):
self._state = _state
else:
_LOGGER.error(_state)
self._state = None
elif self._type == "inverters":
try:
inverters = await (self._envoy_reader.inverters_production())
except requests.exceptions.HTTPError:
_LOGGER.warning(
"Authentication for Inverter data failed during update: %s",
self._envoy_reader.host,
)
if isinstance(inverters, dict):
serial_number = self._name.split(" ")[2]
self._state = inverters[serial_number][0]
self._last_reported = inverters[serial_number][1]
else:
self._state = None
|
import logging
import time
from threading import Event, Thread
import pip
from .guns import LogGun, SqlGun, CustomGun, HttpGun, ScenarioGun, UltimateGun
from .reader import BfgReader, BfgStatsReader
from .widgets import BfgInfoWidget
from ..Phantom import PhantomReader, string_to_df
from .worker import BFGMultiprocessing, BFGGreen
from ..Console import Plugin as ConsolePlugin
from ...common.interfaces import GeneratorPlugin
from ...common.util import FileMultiReader
from ...stepper import StepperWrapper
class Plugin(GeneratorPlugin):
""" Big Fucking Gun plugin """
SECTION = 'bfg'
def __init__(self, core, cfg, name):
super(Plugin, self).__init__(core, cfg, name)
self.close_event = Event()
self._bfg = None
self.log = logging.getLogger(__name__)
self.gun_type = None
self.stepper_wrapper = StepperWrapper(core, cfg)
self.log.info("Initialized BFG")
self.report_filename = "bfgout.log"
self.results_listener = None
self.gun_classes = {
'log': LogGun,
'sql': SqlGun,
'custom': CustomGun,
'http': HttpGun,
'scenario': ScenarioGun,
'ultimate': UltimateGun,
}
@staticmethod
def get_key():
return __file__
def get_available_options(self):
return [
"gun_type", "instances", "cached_stpd", "pip"
]
def configure(self):
self.log.info("Configuring BFG...")
self.stepper_wrapper.read_config()
self.stepper_wrapper.prepare_stepper()
with open(self.report_filename, 'w'):
pass
self.core.add_artifact_file(self.report_filename)
def _write_results_into_file(self):
"""listens for messages on the q, writes to file. """
reader = BfgReader(self.bfg.results, self.close_event)
columns = ['receive_ts', 'tag', 'interval_real', 'connect_time', 'send_time', 'latency', 'receive_time',
'interval_event', 'size_out', 'size_in', 'net_code', 'proto_code']
for entry in reader:
if entry is not None:
entry.receive_ts = entry.receive_ts.round(3)
with open(self.report_filename, 'a') as report_file:
report_file.write(entry.to_csv(index=False, header=False, sep='\t', columns=columns))
time.sleep(0.1)
def get_reader(self, parser=string_to_df):
if self.reader is None:
self.reader = FileMultiReader(self.report_filename, self.close_event)
return PhantomReader(self.reader.get_file(), parser=parser)
def get_stats_reader(self):
if self.stats_reader is None:
self.stats_reader = BfgStatsReader(self.bfg.instance_counter, self.stepper_wrapper.steps)
return self.stats_reader
@property
def bfg(self):
if self._bfg is None:
BFG = BFGGreen if self.get_option("worker_type", "") == "green" else BFGMultiprocessing
self._bfg = BFG(
gun=self.gun,
instances=self.stepper_wrapper.instances,
stpd_filename=self.stepper_wrapper.stpd,
cached_stpd=self.get_option("cached_stpd"),
green_threads_per_instance=int(self.get_option('green_threads_per_instance', 1000)),
)
return self._bfg
def prepare_test(self):
pip_deps = self.get_option("pip", "").splitlines()
self.log.info("Installing with PIP: %s", pip_deps)
if pip_deps:
retcode = pip.main(["install", "--user"] + pip_deps)
if retcode != 0:
raise RuntimeError("Could not install required deps")
import site
from importlib import reload
reload(site)
self.log.info("BFG using ammo type %s", self.get_option("ammo_type"))
gun_type = self.get_option("gun_type")
if gun_type in self.gun_classes:
self.gun = self.gun_classes[gun_type](self.core, self.get_option('gun_config'))
else:
raise NotImplementedError(
'No such gun type implemented: "%s"' % gun_type)
self.results_listener = Thread(target=self._write_results_into_file, name="ResultsQueueListener")
try:
console = self.core.get_plugin_of_type(ConsolePlugin)
except Exception as ex:
self.log.debug("Console not found: %s", ex)
console = None
if console:
widget = BfgInfoWidget()
console.add_info_widget(widget)
self.core.job.aggregator.add_result_listener(widget)
self.log.info("Prepared BFG")
def start_test(self):
self.log.info("Starting BFG")
self.start_time = time.time()
self.bfg.start()
if self.results_listener is not None:
self.results_listener.start()
else:
self.log.fatal("Result listener is not initialized")
def is_test_finished(self):
if self.bfg.running():
return -1
else:
self.log.info("BFG finished")
self.close_event.set()
self.stats_reader.close()
return 0
def end_test(self, retcode):
if self.bfg.running():
self.log.info("Terminating BFG")
self.bfg.stop()
self.close_event.set()
self.stats_reader.close()
return retcode
|
import asyncio
from datetime import timedelta
import logging
from typing import Any, Dict, List
from pyvizio.const import APPS
from pyvizio.util import gen_apps_list_from_url
import voluptuous as vol
from homeassistant.components.media_player import DEVICE_CLASS_TV
from homeassistant.config_entries import ENTRY_STATE_LOADED, SOURCE_IMPORT, ConfigEntry
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_APPS, CONF_DEVICE_CLASS, DOMAIN, VIZIO_SCHEMA
_LOGGER = logging.getLogger(__name__)
def validate_apps(config: ConfigType) -> ConfigType:
"""Validate CONF_APPS is only used when CONF_DEVICE_CLASS == DEVICE_CLASS_TV."""
if (
config.get(CONF_APPS) is not None
and config[CONF_DEVICE_CLASS] != DEVICE_CLASS_TV
):
raise vol.Invalid(
f"'{CONF_APPS}' can only be used if {CONF_DEVICE_CLASS}' is '{DEVICE_CLASS_TV}'"
)
return config
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [vol.All(VIZIO_SCHEMA, validate_apps)])},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["media_player"]
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Component setup, run import config flow for each entry in config."""
if DOMAIN in config:
for entry in config[DOMAIN]:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=entry
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry) -> bool:
"""Load the saved entities."""
hass.data.setdefault(DOMAIN, {})
if (
CONF_APPS not in hass.data[DOMAIN]
and config_entry.data[CONF_DEVICE_CLASS] == DEVICE_CLASS_TV
):
coordinator = VizioAppsDataUpdateCoordinator(hass)
await coordinator.async_refresh()
hass.data[DOMAIN][CONF_APPS] = coordinator
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True
async def async_unload_entry(
hass: HomeAssistantType, config_entry: ConfigEntry
) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in PLATFORMS
]
)
)
# Exclude this config entry because its not unloaded yet
if not any(
entry.state == ENTRY_STATE_LOADED
and entry.entry_id != config_entry.entry_id
and entry.data[CONF_DEVICE_CLASS] == DEVICE_CLASS_TV
for entry in hass.config_entries.async_entries(DOMAIN)
):
hass.data[DOMAIN].pop(CONF_APPS, None)
if not hass.data[DOMAIN]:
hass.data.pop(DOMAIN)
return unload_ok
class VizioAppsDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold Vizio app config data."""
def __init__(self, hass: HomeAssistantType) -> None:
"""Initialize."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=timedelta(days=1),
update_method=self._async_update_data,
)
self.data = APPS
async def _async_update_data(self) -> List[Dict[str, Any]]:
"""Update data via library."""
data = await gen_apps_list_from_url(session=async_get_clientsession(self.hass))
if not data:
raise UpdateFailed
return sorted(data, key=lambda app: app["name"])
|
from __future__ import division
from builtins import range
from .._externals.srm import SRM
from .procrustes import procrustes
import numpy as np
from .format_data import format_data as formatter
from .._shared.helpers import memoize
import warnings
@memoize
def align(data, align='hyper', normalize=None, ndims=None, method=None,
format_data=True):
"""
Aligns a list of arrays
This function takes a list of high dimensional arrays and 'hyperaligns' them
to a 'common' space, or coordinate system following the approach outlined by
Haxby et al, 2011. Hyperalignment uses linear transformations (rotation,
reflection, translation, scaling) to register a group of arrays to a common
space. This can be useful when two or more datasets describe an identical
or similar system, but may not be in same coordinate system. For example,
consider the example of fMRI recordings (voxels by time) from the visual
cortex of a group of subjects watching the same movie: The brain responses
should be highly similar, but the coordinates may not be aligned.
Haxby JV, Guntupalli JS, Connolly AC, Halchenko YO, Conroy BR, Gobbini
MI, Hanke M, and Ramadge PJ (2011) A common, high-dimensional model of
the representational space in human ventral temporal cortex. Neuron 72,
404 -- 416. (used to implement hyperalignment, see https://github.com/PyMVPA/PyMVPA)
Brain Imaging Analysis Kit, http://brainiak.org. (used to implement Shared Response Model [SRM], see https://github.com/IntelPNI/brainiak)
Parameters
----------
data : numpy array, pandas df, or list of arrays/dfs
A list of Numpy arrays or Pandas Dataframes
align : str or dict
If str, either 'hyper' or 'SRM'. If 'hyper', alignment algorithm will be
hyperalignment. If 'SRM', alignment algorithm will be shared response
model. You can also pass a dictionary for finer control, where the 'model'
key is a string that specifies the model and the params key is a dictionary
of parameter values (default : 'hyper').
format_data : bool
Whether or not to first call the format_data function (default: True).
normalize : None
Deprecated argument. Please use new analyze function to perform
combinations of transformations
ndims : None
Deprecated argument. Please use new analyze function to perform
combinations of transformations
Returns
----------
aligned : list
An aligned list of numpy arrays
"""
# if model is None, just return data
if align is None:
return data
elif isinstance(align, dict):
if align['model'] is None:
return data
else:
if method is not None:
warnings.warn('The method argument will be deprecated. Please use align. See the API docs for more info: http://hypertools.readthedocs.io/en/latest/hypertools.tools.align.html#hypertools.tools.align')
align = method
if align is True:
warnings.warn("Setting align=True will be deprecated. Please specify the \
type of alignment, i.e. align='hyper'. See API docs for more info: http://hypertools.readthedocs.io/en/latest/hypertools.tools.align.html#hypertools.tools.align")
align = 'hyper'
# common format
if format_data:
data = formatter(data, ppca=True)
if len(data) is 1:
warnings.warn('Data in list of length 1 can not be aligned. '
'Skipping the alignment.')
if data[0].shape[1] >= data[0].shape[0]:
warnings.warn('The number of features exceeds number of samples. This can lead \
to overfitting. We recommend reducing the dimensionality to be \
less than the number of samples prior to hyperalignment.')
if (align == 'hyper') or (method == 'hyper'):
##STEP 0: STANDARDIZE SIZE AND SHAPE##
sizes_0 = [x.shape[0] for x in data]
sizes_1 = [x.shape[1] for x in data]
#find the smallest number of rows
R = min(sizes_0)
C = max(sizes_1)
m = [np.empty((R,C), dtype=np.ndarray)] * len(data)
for idx,x in enumerate(data):
y = x[0:R,:]
missing = C - y.shape[1]
add = np.zeros((y.shape[0], missing))
y = np.append(y, add, axis=1)
m[idx]=y
##STEP 1: TEMPLATE##
for x in range(0, len(m)):
if x==0:
template = np.copy(m[x])
else:
next = procrustes(m[x], template / (x + 1))
template += next
template /= len(m)
##STEP 2: NEW COMMON TEMPLATE##
#align each subj to the template from STEP 1
template2 = np.zeros(template.shape)
for x in range(0, len(m)):
next = procrustes(m[x], template)
template2 += next
template2 /= len(m)
#STEP 3 (below): ALIGN TO NEW TEMPLATE
aligned = [np.zeros(template2.shape)] * len(m)
for x in range(0, len(m)):
next = procrustes(m[x], template2)
aligned[x] = next
return aligned
elif (align == 'SRM') or (method == 'SRM'):
data = [i.T for i in data]
srm = SRM(features=np.min([i.shape[0] for i in data]))
fit = srm.fit(data)
return [i.T for i in srm.transform(data)]
|
import abc
import hashlib
import logging
from threading import RLock
import numpy as np
import pandas as pd
from bson import Binary
from arctic._config import ARCTIC_AUTO_EXPAND_CHUNK_SIZE
from arctic.serialization.numpy_records import PandasSerializer
from .._compression import compress
from .._config import MAX_DOCUMENT_SIZE
from .._util import NP_OBJECT_DTYPE
from ..exceptions import ArcticSerializationException
ABC = abc.ABCMeta('ABC', (object,), {})
log = logging.getLogger(__name__)
def incremental_checksum(item, curr_sha=None, is_bytes=False):
curr_sha = hashlib.sha1() if curr_sha is None else curr_sha
curr_sha.update(item if is_bytes else item.tostring())
return curr_sha
class LazyIncrementalSerializer(ABC):
def __init__(self, serializer, input_data, chunk_size):
if chunk_size < 1:
raise ArcticSerializationException("LazyIncrementalSerializer can't be initialized "
"with chunk_size < 1 ({})".format(chunk_size))
if not serializer:
raise ArcticSerializationException("LazyIncrementalSerializer can't be initialized "
"with a None serializer object")
self.input_data = input_data
self.chunk_size = chunk_size
self._serializer = serializer
self._initialized = False
self._checksum = None
@abc.abstractmethod
def __len__(self):
pass
@abc.abstractproperty
def generator(self):
pass
@abc.abstractproperty
def generator_bytes(self):
pass
@abc.abstractproperty
def serialize(self):
pass
class IncrementalPandasToRecArraySerializer(LazyIncrementalSerializer):
def __init__(self, serializer, input_data, chunk_size, string_max_len=None):
super(IncrementalPandasToRecArraySerializer, self).__init__(serializer, input_data, chunk_size)
if not isinstance(serializer, PandasSerializer):
raise ArcticSerializationException("IncrementalPandasToRecArraySerializer requires a serializer of "
"type PandasSerializer.")
if not isinstance(input_data, (pd.DataFrame, pd.Series)):
raise ArcticSerializationException("IncrementalPandasToRecArraySerializer requires a pandas DataFrame or "
"Series as data source input.")
if string_max_len and string_max_len < 1:
raise ArcticSerializationException("IncrementalPandasToRecArraySerializer can't be initialized "
"with string_max_len < 1 ({})".format(string_max_len))
self.string_max_len = string_max_len
# The state which needs to be lazily initialized
self._dtype = None
self._shape = None
self._rows_per_chunk = 0
self._total_chunks = 0
self._has_string_object = False
self._lock = RLock()
def _dtype_convert_to_max_len_string(self, input_ndtype, fname):
if input_ndtype.type not in (np.string_, np.unicode_):
return input_ndtype, False
type_sym = 'S' if input_ndtype.type == np.string_ else 'U'
max_str_len = len(max(self.input_data[fname].astype(type_sym), key=len))
str_field_dtype = np.dtype('{}{:d}'.format(type_sym, max_str_len)) if max_str_len > 0 else input_ndtype
return str_field_dtype, True
def _get_dtype(self):
# Serializer is being called only if can_convert_to_records_without_objects() has passed,
# which means that the resulting recarray does not contain objects but only numpy types, string, or unicode
# Serialize the first row to obtain info about row size in bytes (cache first few rows only)
# Also raise an Exception early, if data are not serializable
first_chunk, serialized_dtypes = self._serializer.serialize(
self.input_data[0:10] if len(self) > 0 else self.input_data,
string_max_len=self.string_max_len)
# This is the common case, where first row's dtype represents well the whole dataframe's dtype
if serialized_dtypes is None or \
len(self.input_data) == 0 or \
NP_OBJECT_DTYPE not in self.input_data.dtypes.values:
return first_chunk, serialized_dtypes, False
# Reaching here means we have at least one column of type object
# To correctly serialize incrementally, we need to know the final dtype (type and fixed length),
# using length-conversion information from all values of the object columns
dtype_arr = []
has_string_object = False
for field_name in serialized_dtypes.names: # include all column names, along with the expanded multi-index
field_dtype = serialized_dtypes[field_name]
if field_name not in self.input_data or self.input_data.dtypes[field_name] is NP_OBJECT_DTYPE:
# Note: .hasobject breaks for timezone-aware datetime64 pandas columns, so compare with dtype('O')
# if column is an expanded multi index or doesn't contain objects, the serialized 1st row dtype is safe
field_dtype, with_str_object = self._dtype_convert_to_max_len_string(field_dtype, field_name)
has_string_object |= with_str_object
dtype_arr.append((field_name, field_dtype))
return first_chunk, np.dtype(dtype_arr), has_string_object
def _lazy_init(self):
if self._initialized:
return
with self._lock:
if self._initialized: # intentional double check here
return
# Get the dtype of the serialized array (takes into account object types, converted to fixed length strings)
first_chunk, dtype, has_string_object = self._get_dtype()
# Compute the number of rows which can fit in a chunk
rows_per_chunk = 0
if len(self) > 0 and self.chunk_size > 1:
rows_per_chunk = IncrementalPandasToRecArraySerializer._calculate_rows_per_chunk(self.chunk_size, first_chunk)
# Initialize object's state
self._dtype = dtype
shp = list(first_chunk.shape)
shp[0] = len(self)
self._shape = tuple(shp)
self._has_string_object = has_string_object
self._rows_per_chunk = rows_per_chunk
self._total_chunks = int(np.ceil(float(len(self)) / self._rows_per_chunk)) if rows_per_chunk > 0 else 0
self._initialized = True
@staticmethod
def _calculate_rows_per_chunk(max_chunk_size, chunk):
sze = int(chunk.dtype.itemsize * np.prod(chunk.shape[1:]))
sze = sze if sze < max_chunk_size else max_chunk_size
rows_per_chunk = int(max_chunk_size / sze)
if rows_per_chunk < 1 and ARCTIC_AUTO_EXPAND_CHUNK_SIZE:
# If a row size is larger than chunk_size, use the maximum document size
logging.warning('Chunk size of {} is too small to fit a row ({}). '
'Using maximum document size.'.format(max_chunk_size, MAX_DOCUMENT_SIZE))
# For huge rows, fall-back to using a very large document size, less than max-allowed by MongoDB
rows_per_chunk = int(MAX_DOCUMENT_SIZE / sze)
if rows_per_chunk < 1:
raise ArcticSerializationException("Serialization failed to split data into max sized chunks.")
return rows_per_chunk
def __len__(self):
return len(self.input_data)
@property
def shape(self):
self._lazy_init()
return self._shape
@property
def dtype(self):
self._lazy_init()
return self._dtype
@property
def rows_per_chunk(self):
self._lazy_init()
return self._rows_per_chunk
def checksum(self, from_idx, to_idx):
if self._checksum is None:
self._lazy_init()
total_sha = None
for chunk_bytes, dtype in self.generator_bytes(from_idx=from_idx, to_idx=to_idx):
# TODO: what about compress_array here in batches?
compressed_chunk = compress(chunk_bytes)
total_sha = incremental_checksum(compressed_chunk, curr_sha=total_sha, is_bytes=True)
self._checksum = Binary(total_sha.digest())
return self._checksum
def generator(self, from_idx=None, to_idx=None):
return self._generator(from_idx=from_idx, to_idx=to_idx)
def generator_bytes(self, from_idx=None, to_idx=None):
return self._generator(from_idx=from_idx, to_idx=to_idx, get_bytes=True)
def _generator(self, from_idx, to_idx, get_bytes=False):
# Note that the range is: [from_idx, to_idx)
self._lazy_init()
my_length = len(self)
# Take into account default arguments and negative indexing (from end offset)
from_idx = 0 if from_idx is None else from_idx
if from_idx < 0:
from_idx = my_length + from_idx
to_idx = my_length if to_idx is None else min(to_idx, my_length)
if to_idx < 0:
to_idx = my_length + to_idx
# No data, finish iteration
if my_length == 0 or from_idx >= my_length or from_idx >= to_idx:
return
# Perform serialization for each chunk
while from_idx < to_idx:
curr_stop = min(from_idx + self._rows_per_chunk, to_idx)
chunk, _ = self._serializer.serialize(
self.input_data[from_idx: curr_stop],
string_max_len=self.string_max_len,
forced_dtype=self.dtype if self._has_string_object else None)
# Let the gc collect the intermediate serialized chunk as early as possible
chunk = chunk.tostring() if chunk is not None and get_bytes else chunk
yield chunk, self.dtype, from_idx, curr_stop
from_idx = curr_stop
def serialize(self):
return self._serializer.serialize(self.input_data, self.string_max_len)
|
from io import BytesIO
from vcr.filters import (
remove_headers,
replace_headers,
remove_query_parameters,
replace_query_parameters,
remove_post_data_parameters,
replace_post_data_parameters,
decode_response,
)
from vcr.request import Request
import gzip
import json
from unittest import mock
import zlib
def test_replace_headers():
# This tests all of:
# 1. keeping a header
# 2. removing a header
# 3. replacing a header
# 4. replacing a header using a callable
# 5. removing a header using a callable
# 6. replacing a header that doesn't exist
headers = {"one": ["keep"], "two": ["lose"], "three": ["change"], "four": ["shout"], "five": ["whisper"]}
request = Request("GET", "http://google.com", "", headers)
replace_headers(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.headers == {"one": "keep", "three": "tada", "four": "SHOUT"}
def test_replace_headers_empty():
headers = {"hello": "goodbye", "secret": "header"}
request = Request("GET", "http://google.com", "", headers)
replace_headers(request, [])
assert request.headers == headers
def test_replace_headers_callable():
# This goes beyond test_replace_headers() to ensure that the callable
# receives the expected arguments.
headers = {"hey": "there"}
request = Request("GET", "http://google.com", "", headers)
callme = mock.Mock(return_value="ho")
replace_headers(request, [("hey", callme)])
assert request.headers == {"hey": "ho"}
assert callme.call_args == ((), {"request": request, "key": "hey", "value": "there"})
def test_remove_headers():
# Test the backward-compatible API wrapper.
headers = {"hello": ["goodbye"], "secret": ["header"]}
request = Request("GET", "http://google.com", "", headers)
remove_headers(request, ["secret"])
assert request.headers == {"hello": "goodbye"}
def test_replace_query_parameters():
# This tests all of:
# 1. keeping a parameter
# 2. removing a parameter
# 3. replacing a parameter
# 4. replacing a parameter using a callable
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
uri = "http://g.com/?one=keep&two=lose&three=change&four=shout&five=whisper"
request = Request("GET", uri, "", {})
replace_query_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.query == [("four", "SHOUT"), ("one", "keep"), ("three", "tada")]
def test_remove_all_query_parameters():
uri = "http://g.com/?q=cowboys&w=1"
request = Request("GET", uri, "", {})
replace_query_parameters(request, [("w", None), ("q", None)])
assert request.uri == "http://g.com/"
def test_replace_query_parameters_callable():
# This goes beyond test_replace_query_parameters() to ensure that the
# callable receives the expected arguments.
uri = "http://g.com/?hey=there"
request = Request("GET", uri, "", {})
callme = mock.Mock(return_value="ho")
replace_query_parameters(request, [("hey", callme)])
assert request.uri == "http://g.com/?hey=ho"
assert callme.call_args == ((), {"request": request, "key": "hey", "value": "there"})
def test_remove_query_parameters():
# Test the backward-compatible API wrapper.
uri = "http://g.com/?q=cowboys&w=1"
request = Request("GET", uri, "", {})
remove_query_parameters(request, ["w"])
assert request.uri == "http://g.com/?q=cowboys"
def test_replace_post_data_parameters():
# This tests all of:
# 1. keeping a parameter
# 2. removing a parameter
# 3. replacing a parameter
# 4. replacing a parameter using a callable
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
body = b"one=keep&two=lose&three=change&four=shout&five=whisper"
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.body == b"one=keep&three=tada&four=SHOUT"
def test_replace_post_data_parameters_empty_body():
# This test ensures replace_post_data_parameters doesn't throw exception when body is empty.
body = None
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.body is None
def test_remove_post_data_parameters():
# Test the backward-compatible API wrapper.
body = b"id=secret&foo=bar"
request = Request("POST", "http://google.com", body, {})
remove_post_data_parameters(request, ["id"])
assert request.body == b"foo=bar"
def test_preserve_multiple_post_data_parameters():
body = b"id=secret&foo=bar&foo=baz"
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(request, [("id", None)])
assert request.body == b"foo=bar&foo=baz"
def test_remove_all_post_data_parameters():
body = b"id=secret&foo=bar"
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(request, [("id", None), ("foo", None)])
assert request.body == b""
def test_replace_json_post_data_parameters():
# This tests all of:
# 1. keeping a parameter
# 2. removing a parameter
# 3. replacing a parameter
# 4. replacing a parameter using a callable
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
body = b'{"one": "keep", "two": "lose", "three": "change", "four": "shout", "five": "whisper"}'
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json"
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
request_data = json.loads(request.body.decode("utf-8"))
expected_data = json.loads('{"one": "keep", "three": "tada", "four": "SHOUT"}')
assert request_data == expected_data
def test_remove_json_post_data_parameters():
# Test the backward-compatible API wrapper.
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json"
remove_post_data_parameters(request, ["id"])
request_body_json = json.loads(request.body.decode("utf-8"))
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode("utf-8"))
assert request_body_json == expected_json
def test_remove_all_json_post_data_parameters():
body = b'{"id": "secret", "foo": "bar"}'
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json"
replace_post_data_parameters(request, [("id", None), ("foo", None)])
assert request.body == b"{}"
def test_decode_response_uncompressed():
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["10806"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
}
assert decode_response(recorded_response) == recorded_response
def test_decode_response_deflate():
body = b"deflate message"
deflate_response = {
"body": {"string": zlib.compress(body)},
"headers": {
"access-control-allow-credentials": ["true"],
"access-control-allow-origin": ["*"],
"connection": ["keep-alive"],
"content-encoding": ["deflate"],
"content-length": ["177"],
"content-type": ["application/json"],
"date": ["Wed, 02 Dec 2015 19:44:32 GMT"],
"server": ["nginx"],
},
"status": {"code": 200, "message": "OK"},
}
decoded_response = decode_response(deflate_response)
assert decoded_response["body"]["string"] == body
assert decoded_response["headers"]["content-length"] == [str(len(body))]
def test_decode_response_gzip():
body = b"gzip message"
buf = BytesIO()
f = gzip.GzipFile("a", fileobj=buf, mode="wb")
f.write(body)
f.close()
compressed_body = buf.getvalue()
buf.close()
gzip_response = {
"body": {"string": compressed_body},
"headers": {
"access-control-allow-credentials": ["true"],
"access-control-allow-origin": ["*"],
"connection": ["keep-alive"],
"content-encoding": ["gzip"],
"content-length": ["177"],
"content-type": ["application/json"],
"date": ["Wed, 02 Dec 2015 19:44:32 GMT"],
"server": ["nginx"],
},
"status": {"code": 200, "message": "OK"},
}
decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body
assert decoded_response["headers"]["content-length"] == [str(len(body))]
|
from flexx import flx
class Cookies(flx.PyComponent):
def init(self):
with flx.Widget():
flx.Label(text='Refreshing the page should '
'maintain the value of the line edit.')
self.edit = flx.LineEdit(placeholder_text='username',
text=self.session.get_cookie('username', ''))
@flx.reaction('edit.text')
def _update_cookie(self, *events):
self.session.set_cookie('username', self.edit.text)
if __name__ == '__main__':
m = flx.launch(Cookies, 'browser')
flx.start()
|
from homeassistant.core import callback
from homeassistant.helpers.storage import Store
from .const import DOMAIN
ENTITY_MAP_STORAGE_KEY = f"{DOMAIN}-entity-map"
ENTITY_MAP_STORAGE_VERSION = 1
ENTITY_MAP_SAVE_DELAY = 10
class EntityMapStorage:
"""
Holds a cache of entity structure data from a paired HomeKit device.
HomeKit has a cacheable entity map that describes how an IP or BLE
endpoint is structured. This object holds the latest copy of that data.
An endpoint is made of accessories, services and characteristics. It is
safe to cache this data until the c# discovery data changes.
Caching this data means we can add HomeKit devices to HA immediately at
start even if discovery hasn't seen them yet or they are out of range. It
is also important for BLE devices - accessing the entity structure is
very slow for these devices.
"""
def __init__(self, hass):
"""Create a new entity map store."""
self.hass = hass
self.store = Store(hass, ENTITY_MAP_STORAGE_VERSION, ENTITY_MAP_STORAGE_KEY)
self.storage_data = {}
async def async_initialize(self):
"""Get the pairing cache data."""
raw_storage = await self.store.async_load()
if not raw_storage:
# There is no cached data about HomeKit devices yet
return
self.storage_data = raw_storage.get("pairings", {})
def get_map(self, homekit_id):
"""Get a pairing cache item."""
return self.storage_data.get(homekit_id)
@callback
def async_create_or_update_map(self, homekit_id, config_num, accessories):
"""Create a new pairing cache."""
data = {"config_num": config_num, "accessories": accessories}
self.storage_data[homekit_id] = data
self._async_schedule_save()
return data
@callback
def async_delete_map(self, homekit_id):
"""Delete pairing cache."""
if homekit_id not in self.storage_data:
return
self.storage_data.pop(homekit_id)
self._async_schedule_save()
@callback
def _async_schedule_save(self):
"""Schedule saving the entity map cache."""
self.store.async_delay_save(self._data_to_save, ENTITY_MAP_SAVE_DELAY)
@callback
def _data_to_save(self):
"""Return data of entity map to store in a file."""
return {"pairings": self.storage_data}
|
import numpy as np
from jax import config
import pytest
import tensornetwork
from tensornetwork.linalg import linalg
import tensornetwork.linalg.initialization
from tensornetwork import backends, backend_contextmanager
from tensornetwork.tests import testing_utils
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.charge import U1Charge
from tensornetwork.block_sparse.blocksparsetensor import BlockSparseTensor
from tensornetwork.tensor import Tensor
from tensornetwork.backends.backend_factory import get_backend
# pylint: disable=no-member
config.update("jax_enable_x64", True)
def get_shape(backend, shape):
if backend == 'symmetric':
return [Index(U1Charge.random(s,-1,1), False) for s in shape]
return shape
def get_shape_hermitian(backend, shape):
if backend == 'symmetric':
flows = [True, False]
c = U1Charge.random(shape[0], -1, 1)
return [Index(c, flow) for flow in flows]
return shape
def initialize_tensor(fname, backend, shape, dtype):
shape = get_shape(backend, shape)
be = get_backend(backend)
func = getattr(be, fname)
return Tensor(func(shape=shape, dtype=dtype), backend=be)
def initialize_hermitian_matrix(backend, shape, dtype):
shape = get_shape_hermitian(backend, shape)
be = get_backend(backend)
arr = be.randn(shape=shape, dtype=dtype)
H = arr + be.conj(be.transpose(arr))
return Tensor(H, backend=be)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_eigh_vs_backend(backend, dtype):
np.random.seed(10)
shape = (4, 4)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_hermitian_matrix(backend, shape, dtype)
tn_result = linalg.eigh(tensor)
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
backend_result = backend_obj.eigh(tensor.array)
tn_arrays = [t.array for t in tn_result]
for tn_arr, backend_arr in zip(tn_arrays, backend_result):
testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_expm_vs_backend(backend, dtype):
shape = 6
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = tensornetwork.eye(shape, backend=backend, dtype=dtype)
if backend in ["pytorch"]:
with pytest.raises(NotImplementedError):
tn_result = linalg.expm(tensor)
else:
tn_result = linalg.expm(tensor)
backend_obj = backends.backend_factory.get_backend(backend)
if backend in ["pytorch"]:
with pytest.raises(NotImplementedError):
backend_result = backend_obj.expm(tensor.array)
else:
backend_result = backend_obj.expm(tensor.array)
np.testing.assert_allclose(tn_result.array, backend_result)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_inv_vs_backend(backend, dtype):
np.random.seed(10)
shape = (4, 4)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_hermitian_matrix(backend, shape, dtype)
tn_result = linalg.inv(tensor)
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
backend_result = backend_obj.inv(tensor.array)
testing_utils.assert_allclose(tn_result.array, backend_result, backend_obj)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_norm_vs_backend(backend, dtype):
np.random.seed(10)
shape = (6, 8, 6)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_tensor('randn', backend, shape, dtype)
tn_result = linalg.norm(tensor)
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
backend_result = backend_obj.norm(tensor.array)
assert backend_result == tn_result
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_svd_vs_backend(backend, dtype):
np.random.seed(10)
shape = (3, 6, 4, 6)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_tensor('randn', backend, shape, dtype)
split_axis = 1
max_singular_values = 5
max_trunc_error = 0.1
relative = True
tn_result = linalg.svd(tensor, split_axis,
max_singular_values=max_singular_values,
max_truncation_error=max_trunc_error,
relative=relative)
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
backend_result = backend_obj.svd(tensor.array, split_axis,
max_singular_values=max_singular_values,
max_truncation_error=max_trunc_error,
relative=relative)
tn_arrays = [t.array for t in tn_result]
for tn_arr, backend_arr in zip(tn_arrays, backend_result):
testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_qr_vs_backend(backend, dtype):
np.random.seed(10)
shape = (3, 6, 4, 2)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_tensor('randn', backend, shape, dtype)
split_axis = 1
tn_result = linalg.qr(tensor, split_axis, non_negative_diagonal=False)
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
backend_result = backend_obj.qr(tensor.array, split_axis)
tn_arrays = [t.array for t in tn_result]
for tn_arr, backend_arr in zip(tn_arrays, backend_result):
testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_rq_vs_backend(backend, dtype):
np.random.seed(10)
shape = (3, 6, 4, 2)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_tensor('randn', backend, shape, dtype)
split_axis = 1
tn_result = linalg.rq(tensor, split_axis, non_negative_diagonal=False)
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
backend_result = backend_obj.rq(tensor.array, split_axis)
tn_arrays = [t.array for t in tn_result]
for tn_arr, backend_arr in zip(tn_arrays, backend_result):
testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_qr_default(backend, dtype):
np.random.seed(10)
shape = (3, 6, 4, 2)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_tensor('randn', backend, shape, dtype)
split_axis = 1
tn_result = linalg.qr(tensor, split_axis)
result2 = linalg.qr(tensor, split_axis, non_negative_diagonal=False)
tn_arrays = [t.array for t in tn_result]
arrays2 = [t.array for t in result2]
backend_obj = backends.backend_factory.get_backend(backend)
for tn_arr, arr2 in zip(tn_arrays, arrays2):
testing_utils.assert_allclose(tn_arr, arr2, backend_obj)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
@pytest.mark.parametrize("backend",
['jax', 'symmetric', 'numpy', 'pytorch', 'tensorflow'])
def test_rq_default(backend, dtype):
np.random.seed(10)
shape = (3, 6, 4, 2)
dtype = testing_utils.np_dtype_to_backend(backend, dtype)
tensor = initialize_tensor('randn', backend, shape, dtype)
split_axis = 1
tn_result = linalg.rq(tensor, split_axis)
result2 = linalg.rq(tensor, split_axis, non_negative_diagonal=False)
tn_arrays = [t.array for t in tn_result]
arrays2 = [t.array for t in result2]
backend_obj = backends.backend_factory.get_backend(backend)
for tn_arr, arr2 in zip(tn_arrays, arrays2):
testing_utils.assert_allclose(tn_arr, arr2, backend_obj)
|
import asyncio
import voluptuous as vol
from homeassistant.const import ATTR_GPS_ACCURACY, STATE_HOME
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_utc_time_change
from homeassistant.helpers.typing import ConfigType, GPSType, HomeAssistantType
from homeassistant.loader import bind_hass
from . import legacy, setup
from .config_entry import ( # noqa: F401 pylint: disable=unused-import
async_setup_entry,
async_unload_entry,
)
from .const import (
ATTR_ATTRIBUTES,
ATTR_BATTERY,
ATTR_CONSIDER_HOME,
ATTR_DEV_ID,
ATTR_GPS,
ATTR_HOST_NAME,
ATTR_LOCATION_NAME,
ATTR_MAC,
ATTR_SOURCE_TYPE,
CONF_CONSIDER_HOME,
CONF_NEW_DEVICE_DEFAULTS,
CONF_SCAN_INTERVAL,
CONF_TRACK_NEW,
DEFAULT_CONSIDER_HOME,
DEFAULT_TRACK_NEW,
DOMAIN,
PLATFORM_TYPE_LEGACY,
SOURCE_TYPE_BLUETOOTH,
SOURCE_TYPE_BLUETOOTH_LE,
SOURCE_TYPE_GPS,
SOURCE_TYPE_ROUTER,
)
from .legacy import DeviceScanner # noqa: F401 pylint: disable=unused-import
SERVICE_SEE = "see"
SOURCE_TYPES = (
SOURCE_TYPE_GPS,
SOURCE_TYPE_ROUTER,
SOURCE_TYPE_BLUETOOTH,
SOURCE_TYPE_BLUETOOTH_LE,
)
NEW_DEVICE_DEFAULTS_SCHEMA = vol.Any(
None,
vol.Schema({vol.Optional(CONF_TRACK_NEW, default=DEFAULT_TRACK_NEW): cv.boolean}),
)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_SCAN_INTERVAL): cv.time_period,
vol.Optional(CONF_TRACK_NEW): cv.boolean,
vol.Optional(CONF_CONSIDER_HOME, default=DEFAULT_CONSIDER_HOME): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_NEW_DEVICE_DEFAULTS, default={}): NEW_DEVICE_DEFAULTS_SCHEMA,
}
)
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE.extend(PLATFORM_SCHEMA.schema)
SERVICE_SEE_PAYLOAD_SCHEMA = vol.Schema(
vol.All(
cv.has_at_least_one_key(ATTR_MAC, ATTR_DEV_ID),
{
ATTR_MAC: cv.string,
ATTR_DEV_ID: cv.string,
ATTR_HOST_NAME: cv.string,
ATTR_LOCATION_NAME: cv.string,
ATTR_GPS: cv.gps,
ATTR_GPS_ACCURACY: cv.positive_int,
ATTR_BATTERY: cv.positive_int,
ATTR_ATTRIBUTES: dict,
ATTR_SOURCE_TYPE: vol.In(SOURCE_TYPES),
ATTR_CONSIDER_HOME: cv.time_period,
# Temp workaround for iOS app introduced in 0.65
vol.Optional("battery_status"): str,
vol.Optional("hostname"): str,
},
)
)
@bind_hass
def is_on(hass: HomeAssistantType, entity_id: str):
"""Return the state if any or a specified device is home."""
return hass.states.is_state(entity_id, STATE_HOME)
def see(
hass: HomeAssistantType,
mac: str = None,
dev_id: str = None,
host_name: str = None,
location_name: str = None,
gps: GPSType = None,
gps_accuracy=None,
battery: int = None,
attributes: dict = None,
):
"""Call service to notify you see device."""
data = {
key: value
for key, value in (
(ATTR_MAC, mac),
(ATTR_DEV_ID, dev_id),
(ATTR_HOST_NAME, host_name),
(ATTR_LOCATION_NAME, location_name),
(ATTR_GPS, gps),
(ATTR_GPS_ACCURACY, gps_accuracy),
(ATTR_BATTERY, battery),
)
if value is not None
}
if attributes:
data[ATTR_ATTRIBUTES] = attributes
hass.services.call(DOMAIN, SERVICE_SEE, data)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the device tracker."""
tracker = await legacy.get_tracker(hass, config)
legacy_platforms = await setup.async_extract_config(hass, config)
setup_tasks = [
legacy_platform.async_setup_legacy(hass, tracker)
for legacy_platform in legacy_platforms
]
if setup_tasks:
await asyncio.wait(setup_tasks)
async def async_platform_discovered(p_type, info):
"""Load a platform."""
platform = await setup.async_create_platform_type(hass, config, p_type, {})
if platform is None or platform.type != PLATFORM_TYPE_LEGACY:
return
await platform.async_setup_legacy(hass, tracker, info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
# Clean up stale devices
async_track_utc_time_change(
hass, tracker.async_update_stale, second=range(0, 60, 5)
)
async def async_see_service(call):
"""Service to see a device."""
# Temp workaround for iOS, introduced in 0.65
data = dict(call.data)
data.pop("hostname", None)
data.pop("battery_status", None)
await tracker.async_see(**data)
hass.services.async_register(
DOMAIN, SERVICE_SEE, async_see_service, SERVICE_SEE_PAYLOAD_SCHEMA
)
# restore
await tracker.async_setup_tracked_device()
return True
|
import unittest
from unittest.mock import patch, Mock
from flask import Flask
from lemur.plugins.lemur_acme import plugin, powerdns
class TestPowerdns(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
def setUp(self, mock_dns_provider_service):
self.ACMEIssuerPlugin = plugin.ACMEIssuerPlugin()
self.acme = plugin.AcmeHandler()
mock_dns_provider = Mock()
mock_dns_provider.name = "powerdns"
mock_dns_provider.credentials = "{}"
mock_dns_provider.provider_type = "powerdns"
self.acme.dns_providers_for_domain = {
"www.test.com": [mock_dns_provider],
"test.fakedomain.net": [mock_dns_provider],
}
# Creates a new Flask application for a test duration. In python 3.8, manual push of application context is
# needed to run tests in dev environment without getting error 'Working outside of application context'.
_app = Flask('lemur_test_acme')
self.ctx = _app.app_context()
assert self.ctx
self.ctx.push()
def tearDown(self):
self.ctx.pop()
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
def test_get_zones(self, mock_current_app):
account_number = "1234567890"
path = "a/b/c"
zones = ['example.com', 'test.example.com']
get_response = [{'account': '', 'dnssec': 'False', 'id': 'example.com.', 'kind': 'Master', 'last_check': 0, 'masters': [],
'name': 'example.com.', 'notified_serial': '2019111907', 'serial': '2019111907',
'url': '/api/v1/servers/localhost/zones/example.com.'},
{'account': '', 'dnssec': 'False', 'id': 'bad.example.com.', 'kind': 'Secondary', 'last_check': 0, 'masters': [],
'name': 'bad.example.com.', 'notified_serial': '2018053104', 'serial': '2018053104',
'url': '/api/v1/servers/localhost/zones/bad.example.com.'},
{'account': '', 'dnssec': 'False', 'id': 'test.example.com.', 'kind': 'Master', 'last_check': 0,
'masters': [], 'name': 'test.example.com.', 'notified_serial': '2019112501', 'serial': '2019112501',
'url': '/api/v1/servers/localhost/zones/test.example.com.'}]
powerdns._check_conf = Mock()
powerdns._get = Mock(path)
powerdns._get.side_effect = [get_response]
mock_current_app.config.get = Mock(return_value="localhost")
result = powerdns.get_zones(account_number)
self.assertEqual(result, zones)
def test_get_zone_name(self):
zones = ['example.com', 'test.example.com']
zone = "test.example.com"
domain = "_acme-challenge.test.example.com"
account_number = "1234567890"
powerdns.get_zones = Mock(return_value=zones)
result = powerdns._get_zone_name(domain, account_number)
self.assertEqual(result, zone)
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
def test_create_txt_record_write_only(self, mock_current_app):
domain = "_acme_challenge.test.example.com"
zone = "test.example.com"
token = "ABCDEFGHIJ"
account_number = "1234567890"
change_id = (domain, token)
powerdns._check_conf = Mock()
powerdns._get_txt_records = Mock(return_value=[])
powerdns._get_zone_name = Mock(return_value=zone)
mock_current_app.logger.debug = Mock()
mock_current_app.config.get = Mock(return_value="localhost")
powerdns._patch = Mock()
log_data = {
"function": "create_txt_record",
"fqdn": domain,
"token": token,
"message": "TXT record(s) successfully created"
}
result = powerdns.create_txt_record(domain, token, account_number)
mock_current_app.logger.debug.assert_called_with(log_data)
self.assertEqual(result, change_id)
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
def test_create_txt_record_append(self, mock_current_app):
domain = "_acme_challenge.test.example.com"
zone = "test.example.com"
token = "ABCDEFGHIJ"
account_number = "1234567890"
change_id = (domain, token)
powerdns._check_conf = Mock()
cur_token = "123456"
cur_records = [powerdns.Record({'name': domain, 'content': f"\"{cur_token}\"", 'disabled': False})]
powerdns._get_txt_records = Mock(return_value=cur_records)
powerdns._get_zone_name = Mock(return_value=zone)
mock_current_app.logger.debug = Mock()
mock_current_app.config.get = Mock(return_value="localhost")
powerdns._patch = Mock()
log_data = {
"function": "create_txt_record",
"fqdn": domain,
"token": token,
"message": "TXT record(s) successfully created"
}
expected_path = "/api/v1/servers/localhost/zones/test.example.com."
expected_payload = {
"rrsets": [
{
"name": domain + ".",
"type": "TXT",
"ttl": 300,
"changetype": "REPLACE",
"records": [
{
"content": f"\"{token}\"",
"disabled": False
},
{
"content": f"\"{cur_token}\"",
"disabled": False
}
],
"comments": []
}
]
}
result = powerdns.create_txt_record(domain, token, account_number)
mock_current_app.logger.debug.assert_called_with(log_data)
powerdns._patch.assert_called_with(expected_path, expected_payload)
self.assertEqual(result, change_id)
@patch("lemur.plugins.lemur_acme.powerdns.dnsutil")
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
@patch("lemur.extensions.metrics")
@patch("time.sleep")
def test_wait_for_dns_change(self, mock_sleep, mock_metrics, mock_current_app, mock_dnsutil):
domain = "_acme-challenge.test.example.com"
token1 = "ABCDEFG"
token2 = "HIJKLMN"
zone_name = "test.example.com"
nameserver = "1.1.1.1"
change_id = (domain, token1)
powerdns._check_conf = Mock()
mock_records = (token2, token1)
mock_current_app.config.get = Mock(return_value=1)
powerdns._get_zone_name = Mock(return_value=zone_name)
mock_dnsutil.get_authoritative_nameserver = Mock(return_value=nameserver)
mock_dnsutil.get_dns_records = Mock(return_value=mock_records)
mock_sleep.return_value = False
mock_metrics.send = Mock()
mock_current_app.logger.debug = Mock()
powerdns.wait_for_dns_change(change_id)
log_data = {
"function": "wait_for_dns_change",
"fqdn": domain,
"status": True,
"message": "Record status on PowerDNS authoritative server"
}
mock_current_app.logger.debug.assert_called_with(log_data)
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
def test_delete_txt_record(self, mock_current_app):
domain = "_acme_challenge.test.example.com"
zone = "test.example.com"
token = "ABCDEFGHIJ"
account_number = "1234567890"
change_id = (domain, token)
powerdns._check_conf = Mock()
powerdns._get_zone_name = Mock(return_value=zone)
mock_current_app.logger.debug = Mock()
mock_current_app.config.get = Mock(return_value="localhost")
powerdns._patch = Mock()
log_data = {
"function": "delete_txt_record",
"fqdn": domain,
"token": token,
"message": "Unable to delete TXT record: Token not found in existing TXT records"
}
powerdns.delete_txt_record(change_id, account_number, domain, token)
mock_current_app.logger.debug.assert_called_with(log_data)
|
from datetime import timedelta
import logging
from pylaunches.api import Launches
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Launch Library."
DEFAULT_NAME = "Next launch"
SCAN_INTERVAL = timedelta(hours=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Create the launch sensor."""
name = config[CONF_NAME]
session = async_get_clientsession(hass)
launches = Launches(hass.loop, session)
sensor = [LaunchLibrarySensor(launches, name)]
async_add_entities(sensor, True)
class LaunchLibrarySensor(Entity):
"""Representation of a launch_library Sensor."""
def __init__(self, launches, name):
"""Initialize the sensor."""
self.launches = launches
self._attributes = {}
self._name = name
self._state = None
async def async_update(self):
"""Get the latest data."""
await self.launches.get_launches()
if self.launches.launches is None:
_LOGGER.error("No data received")
return
try:
data = self.launches.launches[0]
self._state = data["name"]
self._attributes["launch_time"] = data["start"]
self._attributes["agency"] = data["agency"]
agency_country_code = data["agency_country_code"]
self._attributes["agency_country_code"] = agency_country_code
self._attributes["stream"] = data["stream"]
self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION
except (KeyError, IndexError) as error:
_LOGGER.debug("Error getting data, %s", error)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Return the icon of the sensor."""
return "mdi:rocket"
@property
def device_state_attributes(self):
"""Return attributes for the sensor."""
return self._attributes
|
from flasgger import Swagger
from flask import Flask, jsonify
app = Flask(__name__)
app.config['SWAGGER'] = {
'title': 'Vendor extension test',
'uiversion': 2,
'x-groupTag': 'Test',
}
swag = Swagger(app)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
assert 'x-groupTag' in specs_data['/apispec_1.json']
assert specs_data['/apispec_1.json']['x-groupTag'] == 'Test'
if __name__ == "__main__":
app.run(debug=True)
|
import os
import sys
import subprocess
import logging
import time
import platform
import re
from functools import partial
from plumbum.path.local import LocalPath, LocalWorkdir
from tempfile import mkdtemp
from contextlib import contextmanager
from plumbum.path.remote import RemotePath
from plumbum.commands import CommandNotFound, ConcreteCommand
from plumbum.machines.session import ShellSession
from plumbum.lib import ProcInfo, IS_WIN32, six, StaticProperty
from plumbum.commands.daemons import win32_daemonize, posix_daemonize
from plumbum.commands.processes import iter_lines
from plumbum.machines.base import BaseMachine
from plumbum.machines.base import PopenAddons
from plumbum.machines.env import BaseEnv
if sys.version_info[0] >= 3:
# python 3 has the new-and-improved subprocess module
from subprocess import Popen, PIPE
has_new_subprocess = True
else:
# otherwise, see if we have subprocess32
try:
from subprocess32 import Popen, PIPE
has_new_subprocess = True
except ImportError:
from subprocess import Popen, PIPE
has_new_subprocess = False
class PlumbumLocalPopen(PopenAddons):
iter_lines = iter_lines
def __init__(self, *args, **kwargs):
self._proc = Popen(*args, **kwargs)
def __iter__(self):
return self.iter_lines()
def __enter__(self):
return self._proc.__enter__()
def __exit__(self, *args, **kwargs):
return self._proc.__exit__(*args, **kwargs)
def __getattr__(self, name):
return getattr(self._proc, name)
if IS_WIN32:
from plumbum.machines._windows import get_pe_subsystem, IMAGE_SUBSYSTEM_WINDOWS_CUI
logger = logging.getLogger("plumbum.local")
#===================================================================================================
# Environment
#===================================================================================================
class LocalEnv(BaseEnv):
"""The local machine's environment; exposes a dict-like interface"""
__slots__ = ()
CASE_SENSITIVE = not IS_WIN32
def __init__(self):
# os.environ already takes care of upper'ing on windows
self._curr = os.environ.copy()
BaseEnv.__init__(self, LocalPath, os.path.pathsep)
if IS_WIN32 and "HOME" not in self and self.home is not None:
self["HOME"] = self.home
def expand(self, expr):
"""Expands any environment variables and home shortcuts found in ``expr``
(like ``os.path.expanduser`` combined with ``os.path.expandvars``)
:param expr: An expression containing environment variables (as ``$FOO``) or
home shortcuts (as ``~/.bashrc``)
:returns: The expanded string"""
prev = os.environ
os.environ = self.getdict()
try:
output = os.path.expanduser(os.path.expandvars(expr))
finally:
os.environ = prev
return output
def expanduser(self, expr):
"""Expand home shortcuts (e.g., ``~/foo/bar`` or ``~john/foo/bar``)
:param expr: An expression containing home shortcuts
:returns: The expanded string"""
prev = os.environ
os.environ = self.getdict()
try:
output = os.path.expanduser(expr)
finally:
os.environ = prev
return output
#===================================================================================================
# Local Commands
#===================================================================================================
class LocalCommand(ConcreteCommand):
__slots__ = ()
QUOTE_LEVEL = 2
def __init__(self, executable, encoding="auto"):
ConcreteCommand.__init__(
self, executable, local.custom_encoding
if encoding == "auto" else encoding)
@property
def machine(self):
return local
def popen(self, args=(), cwd=None, env=None, **kwargs):
if isinstance(args, six.string_types):
args = (args, )
return self.machine._popen(
self.executable,
self.formulate(0, args),
cwd=self.cwd if cwd is None else cwd,
env=self.env if env is None else env,
**kwargs)
#===================================================================================================
# Local Machine
#===================================================================================================
class LocalMachine(BaseMachine):
"""The *local machine* (a singleton object). It serves as an entry point to everything
related to the local machine, such as working directory and environment manipulation,
command creation, etc.
Attributes:
* ``cwd`` - the local working directory
* ``env`` - the local environment
* ``custom_encoding`` - the local machine's default encoding (``sys.getfilesystemencoding()``)
"""
cwd = StaticProperty(LocalWorkdir)
env = LocalEnv()
custom_encoding = sys.getfilesystemencoding()
uname = platform.uname()[0]
def __init__(self):
self._as_user_stack = []
if IS_WIN32:
_EXTENSIONS = [""] + env.get("PATHEXT", ":.exe:.bat").lower().split(
os.path.pathsep)
@classmethod
def _which(cls, progname):
progname = progname.lower()
for p in cls.env.path:
for ext in cls._EXTENSIONS:
fn = p / (progname + ext)
if fn.access("x") and not fn.is_dir():
return fn
return None
else:
@classmethod
def _which(cls, progname):
for p in cls.env.path:
fn = p / progname
if fn.access("x") and not fn.is_dir():
return fn
return None
@classmethod
def which(cls, progname):
"""Looks up a program in the ``PATH``. If the program is not found, raises
:class:`CommandNotFound <plumbum.commands.CommandNotFound>`
:param progname: The program's name. Note that if underscores (``_``) are present
in the name, and the exact name is not found, they will be replaced
in turn by hyphens (``-``) then periods (``.``), and the name will
be looked up again for each alternative
:returns: A :class:`LocalPath <plumbum.machines.local.LocalPath>`
"""
alternatives = [progname]
if "_" in progname:
alternatives.append(progname.replace("_", "-"))
alternatives.append(progname.replace("_", "."))
for pn in alternatives:
path = cls._which(pn)
if path:
return path
raise CommandNotFound(progname, list(cls.env.path))
def path(self, *parts):
"""A factory for :class:`LocalPaths <plumbum.path.local.LocalPath>`.
Usage: ``p = local.path("/usr", "lib", "python2.7")``
"""
parts2 = [str(self.cwd)]
for p in parts:
if isinstance(p, RemotePath):
raise TypeError("Cannot construct LocalPath from %r" % (p, ))
parts2.append(self.env.expanduser(str(p)))
return LocalPath(os.path.join(*parts2))
def __contains__(self, cmd):
try:
self[cmd]
except CommandNotFound:
return False
else:
return True
def __getitem__(self, cmd):
"""Returns a `Command` object representing the given program. ``cmd`` can be a string or
a :class:`LocalPath <plumbum.path.local.LocalPath>`; if it is a path, a command
representing this path will be returned; otherwise, the program name will be looked up
in the system's ``PATH`` (using ``which``). Usage::
ls = local["ls"]
"""
if isinstance(cmd, LocalPath):
return LocalCommand(cmd)
elif not isinstance(cmd, RemotePath):
if "/" in cmd or "\\" in cmd:
# assume path
return LocalCommand(local.path(cmd))
else:
# search for command
return LocalCommand(self.which(cmd))
else:
raise TypeError("cmd must not be a RemotePath: %r" % (cmd, ))
def _popen(self,
executable,
argv,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
cwd=None,
env=None,
new_session=False,
**kwargs):
if new_session:
if has_new_subprocess:
kwargs["start_new_session"] = True
elif IS_WIN32:
kwargs["creationflags"] = kwargs.get(
"creationflags", 0) | subprocess.CREATE_NEW_PROCESS_GROUP
else:
def preexec_fn(prev_fn=kwargs.get("preexec_fn", lambda: None)):
os.setsid()
prev_fn()
kwargs["preexec_fn"] = preexec_fn
if IS_WIN32 and "startupinfo" not in kwargs and stdin not in (
sys.stdin, None):
subsystem = get_pe_subsystem(str(executable))
if subsystem == IMAGE_SUBSYSTEM_WINDOWS_CUI:
# don't open a new console
sui = subprocess.STARTUPINFO()
kwargs["startupinfo"] = sui
if hasattr(subprocess, "_subprocess"):
sui.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW # @UndefinedVariable
sui.wShowWindow = subprocess._subprocess.SW_HIDE # @UndefinedVariable
else:
sui.dwFlags |= subprocess.STARTF_USESHOWWINDOW # @UndefinedVariable
sui.wShowWindow = subprocess.SW_HIDE # @UndefinedVariable
if not has_new_subprocess and "close_fds" not in kwargs:
if IS_WIN32 and (stdin is not None or stdout is not None
or stderr is not None):
# we can't close fds if we're on windows and we want to redirect any std handle
kwargs["close_fds"] = False
else:
kwargs["close_fds"] = True
if cwd is None:
cwd = self.cwd
if env is None:
env = self.env
if isinstance(env, BaseEnv):
env = env.getdict()
if self._as_user_stack:
argv, executable = self._as_user_stack[-1](argv)
logger.debug("Running %r", argv)
proc = PlumbumLocalPopen(
argv,
executable=str(executable),
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=str(cwd),
env=env,
**kwargs) # bufsize = 4096
proc._start_time = time.time()
proc.custom_encoding = self.custom_encoding
proc.argv = argv
return proc
def daemonic_popen(self,
command,
cwd="/",
stdout=None,
stderr=None,
append=True):
"""
On POSIX systems:
Run ``command`` as a UNIX daemon: fork a child process to setpid, redirect std handles to /dev/null,
umask, close all fds, chdir to ``cwd``, then fork and exec ``command``. Returns a ``Popen`` process that
can be used to poll/wait for the executed command (but keep in mind that you cannot access std handles)
On Windows:
Run ``command`` as a "Windows daemon": detach from controlling console and create a new process group.
This means that the command will not receive console events and would survive its parent's termination.
Returns a ``Popen`` object.
.. note:: this does not run ``command`` as a system service, only detaches it from its parent.
.. versionadded:: 1.3
"""
if IS_WIN32:
return win32_daemonize(command, cwd, stdout, stderr, append)
else:
return posix_daemonize(command, cwd, stdout, stderr, append)
if IS_WIN32:
def list_processes(self):
"""
Returns information about all running processes (on Windows: using ``tasklist``)
.. versionadded:: 1.3
"""
import csv
tasklist = local["tasklist"]
output = tasklist("/V", "/FO", "CSV")
if not six.PY3:
# The Py2 csv reader does not support non-ascii values
output = output.encode('ascii', 'ignore')
lines = output.splitlines()
rows = csv.reader(lines)
header = next(rows)
imgidx = header.index('Image Name')
pididx = header.index('PID')
statidx = header.index('Status')
useridx = header.index('User Name')
for row in rows:
yield ProcInfo(
int(row[pididx]), row[useridx], row[statidx], row[imgidx])
else:
def list_processes(self):
"""
Returns information about all running processes (on POSIX systems: using ``ps``)
.. versionadded:: 1.3
"""
ps = self["ps"]
lines = ps("-e", "-o", "pid,uid,stat,args").splitlines()
lines.pop(0) # header
for line in lines:
parts = line.strip().split()
yield ProcInfo(
int(parts[0]), int(parts[1]), parts[2],
" ".join(parts[3:]))
def pgrep(self, pattern):
"""
Process grep: return information about all processes whose command-line args match the given regex pattern
"""
pat = re.compile(pattern)
for procinfo in self.list_processes():
if pat.search(procinfo.args):
yield procinfo
def session(self, new_session=False):
"""Creates a new :class:`ShellSession <plumbum.session.ShellSession>` object; this
invokes ``/bin/sh`` and executes commands on it over stdin/stdout/stderr"""
return ShellSession(self["sh"].popen(new_session=new_session))
@contextmanager
def tempdir(self):
"""A context manager that creates a temporary directory, which is removed when the context
exits"""
dir = self.path(mkdtemp()) # @ReservedAssignment
try:
yield dir
finally:
dir.delete()
@contextmanager
def as_user(self, username=None):
"""Run nested commands as the given user. For example::
head = local["head"]
head("-n1", "/dev/sda1") # this will fail...
with local.as_user():
head("-n1", "/dev/sda1")
:param username: The user to run commands as. If not given, root (or Administrator) is assumed
"""
if IS_WIN32:
if username is None:
username = "Administrator"
self._as_user_stack.append(lambda argv: (["runas", "/savecred", "/user:%s" % (username,),
'"' + " ".join(str(a) for a in argv) + '"'], self.which("runas")))
else:
if username is None:
self._as_user_stack.append(
lambda argv: (["sudo"] + list(argv), self.which("sudo")))
else:
self._as_user_stack.append(lambda argv: (["sudo", "-u", username] + list(argv), self.which("sudo")))
try:
yield
finally:
self._as_user_stack.pop(-1)
def as_root(self):
"""A shorthand for :func:`as_user("root") <plumbum.machines.local.LocalMachine.as_user>`"""
return self.as_user()
python = LocalCommand(sys.executable, custom_encoding)
"""A command that represents the current python interpreter (``sys.executable``)"""
local = LocalMachine()
"""The *local machine* (a singleton object). It serves as an entry point to everything
related to the local machine, such as working directory and environment manipulation,
command creation, etc.
Attributes:
* ``cwd`` - the local working directory
* ``env`` - the local environment
* ``custom_encoding`` - the local machine's default encoding (``sys.getfilesystemencoding()``)
"""
|
import logging
import typing
import voluptuous as vol
from homeassistant.const import (
ATTR_EDITABLE,
CONF_ICON,
CONF_ID,
CONF_NAME,
SERVICE_RELOAD,
)
from homeassistant.core import callback
from homeassistant.helpers import collection
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.helpers.service
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceCallType
_LOGGER = logging.getLogger(__name__)
DOMAIN = "input_select"
CONF_INITIAL = "initial"
CONF_OPTIONS = "options"
ATTR_OPTION = "option"
ATTR_OPTIONS = "options"
SERVICE_SELECT_OPTION = "select_option"
SERVICE_SELECT_NEXT = "select_next"
SERVICE_SELECT_PREVIOUS = "select_previous"
SERVICE_SET_OPTIONS = "set_options"
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CREATE_FIELDS = {
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
vol.Required(CONF_OPTIONS): vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]),
vol.Optional(CONF_INITIAL): cv.string,
vol.Optional(CONF_ICON): cv.icon,
}
UPDATE_FIELDS = {
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_OPTIONS): vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]),
vol.Optional(CONF_INITIAL): cv.string,
vol.Optional(CONF_ICON): cv.icon,
}
def _cv_input_select(cfg):
"""Configure validation helper for input select (voluptuous)."""
options = cfg[CONF_OPTIONS]
initial = cfg.get(CONF_INITIAL)
if initial is not None and initial not in options:
raise vol.Invalid(
f"initial state {initial} is not part of the options: {','.join(options)}"
)
return cfg
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: cv.schema_with_slug_keys(
vol.All(
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_OPTIONS): vol.All(
cv.ensure_list, vol.Length(min=1), [cv.string]
),
vol.Optional(CONF_INITIAL): cv.string,
vol.Optional(CONF_ICON): cv.icon,
},
_cv_input_select,
)
)
},
extra=vol.ALLOW_EXTRA,
)
RELOAD_SERVICE_SCHEMA = vol.Schema({})
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up an input select."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
id_manager = collection.IDManager()
yaml_collection = collection.YamlCollection(
logging.getLogger(f"{__name__}.yaml_collection"), id_manager
)
collection.attach_entity_component_collection(
component, yaml_collection, InputSelect.from_yaml
)
storage_collection = InputSelectStorageCollection(
Store(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
id_manager,
)
collection.attach_entity_component_collection(
component, storage_collection, InputSelect
)
await yaml_collection.async_load(
[{CONF_ID: id_, **cfg} for id_, cfg in config.get(DOMAIN, {}).items()]
)
await storage_collection.async_load()
collection.StorageCollectionWebsocket(
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
).async_setup(hass)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, yaml_collection)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, storage_collection)
async def reload_service_handler(service_call: ServiceCallType) -> None:
"""Reload yaml entities."""
conf = await component.async_prepare_reload(skip_reset=True)
if conf is None:
conf = {DOMAIN: {}}
await yaml_collection.async_load(
[{CONF_ID: id_, **cfg} for id_, cfg in conf.get(DOMAIN, {}).items()]
)
homeassistant.helpers.service.async_register_admin_service(
hass,
DOMAIN,
SERVICE_RELOAD,
reload_service_handler,
schema=RELOAD_SERVICE_SCHEMA,
)
component.async_register_entity_service(
SERVICE_SELECT_OPTION,
{vol.Required(ATTR_OPTION): cv.string},
"async_select_option",
)
component.async_register_entity_service(
SERVICE_SELECT_NEXT,
{},
callback(lambda entity, call: entity.async_offset_index(1)),
)
component.async_register_entity_service(
SERVICE_SELECT_PREVIOUS,
{},
callback(lambda entity, call: entity.async_offset_index(-1)),
)
component.async_register_entity_service(
SERVICE_SET_OPTIONS,
{
vol.Required(ATTR_OPTIONS): vol.All(
cv.ensure_list, vol.Length(min=1), [cv.string]
)
},
"async_set_options",
)
return True
class InputSelectStorageCollection(collection.StorageCollection):
"""Input storage based collection."""
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_select))
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
async def _process_create_data(self, data: typing.Dict) -> typing.Dict:
"""Validate the config is valid."""
return self.CREATE_SCHEMA(data)
@callback
def _get_suggested_id(self, info: typing.Dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_NAME]
async def _update_data(self, data: dict, update_data: typing.Dict) -> typing.Dict:
"""Return a new updated data object."""
update_data = self.UPDATE_SCHEMA(update_data)
return _cv_input_select({**data, **update_data})
class InputSelect(RestoreEntity):
"""Representation of a select input."""
def __init__(self, config: typing.Dict):
"""Initialize a select input."""
self._config = config
self.editable = True
self._current_option = config.get(CONF_INITIAL)
@classmethod
def from_yaml(cls, config: typing.Dict) -> "InputSelect":
"""Return entity instance initialized from yaml storage."""
input_select = cls(config)
input_select.entity_id = f"{DOMAIN}.{config[CONF_ID]}"
input_select.editable = False
return input_select
async def async_added_to_hass(self):
"""Run when entity about to be added."""
await super().async_added_to_hass()
if self._current_option is not None:
return
state = await self.async_get_last_state()
if not state or state.state not in self._options:
self._current_option = self._options[0]
else:
self._current_option = state.state
@property
def should_poll(self):
"""If entity should be polled."""
return False
@property
def name(self):
"""Return the name of the select input."""
return self._config.get(CONF_NAME)
@property
def icon(self):
"""Return the icon to be used for this entity."""
return self._config.get(CONF_ICON)
@property
def _options(self) -> typing.List[str]:
"""Return a list of selection options."""
return self._config[CONF_OPTIONS]
@property
def state(self):
"""Return the state of the component."""
return self._current_option
@property
def state_attributes(self):
"""Return the state attributes."""
return {ATTR_OPTIONS: self._config[ATTR_OPTIONS], ATTR_EDITABLE: self.editable}
@property
def unique_id(self) -> typing.Optional[str]:
"""Return unique id for the entity."""
return self._config[CONF_ID]
@callback
def async_select_option(self, option):
"""Select new option."""
if option not in self._options:
_LOGGER.warning(
"Invalid option: %s (possible options: %s)",
option,
", ".join(self._options),
)
return
self._current_option = option
self.async_write_ha_state()
@callback
def async_offset_index(self, offset):
"""Offset current index."""
current_index = self._options.index(self._current_option)
new_index = (current_index + offset) % len(self._options)
self._current_option = self._options[new_index]
self.async_write_ha_state()
@callback
def async_set_options(self, options):
"""Set options."""
self._current_option = options[0]
self._config[CONF_OPTIONS] = options
self.async_write_ha_state()
async def async_update_config(self, config: typing.Dict) -> None:
"""Handle when the config is updated."""
self._config = config
self.async_write_ha_state()
|
from behave import given
from behave import then
from behave import when
from docker.errors import APIError
from paasta_tools.utils import _run
from paasta_tools.utils import get_docker_client
@given("Docker is available")
def docker_is_available(context):
docker_client = get_docker_client()
assert docker_client.ping()
context.docker_client = docker_client
@given("a running docker container with task id {task_id} and image {image_name}")
def create_docker_container(context, task_id, image_name):
container_name = "paasta-itest-execute-in-containers"
try:
context.docker_client.remove_container(container_name, force=True)
except APIError:
pass
context.docker_client.pull(image_name)
container = context.docker_client.create_container(
name=container_name,
image=image_name,
command="/bin/sleep infinity",
environment={"MESOS_TASK_ID": task_id},
)
context.docker_client.start(container=container.get("Id"))
context.running_container_id = container.get("Id")
@when(
"we paasta_execute_docker_command a command with exit code {code} in container with task id {task_id}"
)
def run_command_in_container(context, code, task_id):
cmd = f'../paasta_tools/paasta_execute_docker_command.py -i {task_id} -c "exit {code}"'
print("Running cmd %s" % cmd)
exit_code, output = _run(cmd)
print(f"Got exitcode {exit_code} with output:\n{output}")
context.return_code = exit_code
@then("the exit code is {code}")
def paasta_execute_docker_command_result(context, code):
assert int(code) == int(context.return_code)
@then("the docker container has at most {num} exec instances")
def check_container_exec_instances(context, num):
"""Modern docker versions remove ExecIDs after they finished, but older
docker versions leave ExecIDs behind. This test is for asserting that
the ExecIDs are cleaned up one way or another"""
container_info = context.docker_client.inspect_container(
context.running_container_id
)
if container_info["ExecIDs"] is None:
execs = []
else:
execs = container_info["ExecIDs"]
print("Container info:\n%s" % container_info)
assert len(execs) <= int(num)
|
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from .const import SIGNAL_PANEL_MESSAGE
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
):
"""Set up for AlarmDecoder sensor."""
entity = AlarmDecoderSensor()
async_add_entities([entity])
return True
class AlarmDecoderSensor(Entity):
"""Representation of an AlarmDecoder keypad."""
def __init__(self):
"""Initialize the alarm panel."""
self._display = ""
self._state = None
self._icon = "mdi:alarm-check"
self._name = "Alarm Panel Display"
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_PANEL_MESSAGE, self._message_callback
)
)
def _message_callback(self, message):
if self._display != message.text:
self._display = message.text
self.schedule_update_ha_state()
@property
def icon(self):
"""Return the icon if any."""
return self._icon
@property
def state(self):
"""Return the overall state."""
return self._display
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
|
import datetime
import os
import sys
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
sys.path = [os.path.abspath(os.path.dirname(os.path.dirname(__file__)))] + sys.path
from auto_ml import Predictor, __version__ as auto_ml_version
from auto_ml.utils_models import load_ml_model
import dill
import numpy as np
import utils_testing as utils
if 'backwards_compatibility' in os.environ.get('TESTS_TO_RUN', 'blank'):
def test_backwards_compatibility_with_version_2_1_6():
np.random.seed(0)
print('auto_ml_version')
print(auto_ml_version)
if auto_ml_version <= '2.9.0':
raise(TypeError)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
saved_ml_pipeline = load_ml_model(os.path.join('tests', 'backwards_compatibility_tests', 'trained_ml_model_v_2_1_6.dill'))
df_titanic_test_dictionaries = df_titanic_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
print('predictions')
print(predictions)
first_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
print('first_score')
print(first_score)
# Make sure our score is good, but not unreasonably good
lower_bound = -0.215
assert lower_bound < first_score < -0.17
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_titanic_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_titanic_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.2 < duration.total_seconds() < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
print('predictions')
print(predictions)
print('df_titanic_test_dictionaries')
print(df_titanic_test_dictionaries)
second_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
print('second_score')
print(second_score)
# Make sure our score is good, but not unreasonably good
assert lower_bound < second_score < -0.17
def train_old_model():
print('auto_ml_version')
print(auto_ml_version)
if auto_ml_version > '2.1.6':
raise(TypeError)
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train)
file_name = ml_predictor.save('trained_ml_model_v_2_1_6.dill')
saved_ml_pipeline = load_ml_model(file_name)
df_titanic_test_dictionaries = df_titanic_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
first_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
# Make sure our score is good, but not unreasonably good
lower_bound = -0.16
assert -0.16 < first_score < -0.135
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_titanic_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_titanic_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.2 < duration.total_seconds() < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
second_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
# Make sure our score is good, but not unreasonably good
assert -0.16 < second_score < -0.135
if __name__ == '__main__':
train_old_model()
|
from unittest import TestCase
from scattertext.test.test_semioticSquare import get_test_semiotic_square
from scattertext.viz.HTMLSemioticSquareViz import HTMLSemioticSquareViz
class TestHTMLSemioticSquareViz(TestCase):
def test_get_html(self):
semsq = get_test_semiotic_square()
html_default = HTMLSemioticSquareViz(semsq).get_html()
html_6 = HTMLSemioticSquareViz(semsq).get_html(num_terms=6)
self.assertNotEqual(html_default, html_6)
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_LOCK,
SERVICE_UNLOCK,
STATE_LOCKED,
STATE_UNLOCKED,
)
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import DOMAIN
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {STATE_LOCKED, STATE_UNLOCKED}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if cur_state.state == state.state:
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
if state.state == STATE_LOCKED:
service = SERVICE_LOCK
elif state.state == STATE_UNLOCKED:
service = SERVICE_UNLOCK
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Lock states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
from flexx.util.testing import run_tests_if_main, skipif, skip, raises
from flexx.event.both_tester import run_in_both, StdoutMismatchError
from flexx import event
loop = event.loop
logger = event.logger
def this_is_js():
return False
class Person(event.Component):
first_name = event.StringProp('John', settable=True)
last_name = event.StringProp('Doe', settable=True)
##
@run_in_both(Person)
def func_ok1():
"""
john doe
john doe
almar klein
"""
p = Person()
print(p.first_name, p.last_name)
p.set_first_name('almar')
p.set_last_name('klein')
print(p.first_name, p.last_name)
loop.iter()
print(p.first_name, p.last_name)
def test_ok1():
assert func_ok1()
##
@run_in_both()
def func_ok2():
"""
bar
----------
foo
"""
if this_is_js():
print('foo')
else:
print('bar')
@run_in_both()
def func_ok3():
"""
bar
"""
if this_is_js():
print('foo')
else:
print('bar')
@run_in_both()
def func_ok4():
"""
foo
"""
if this_is_js():
print('foo')
else:
print('bar')
def test_ok234():
assert func_ok2()
with raises(StdoutMismatchError):
func_ok3()
with raises(StdoutMismatchError):
func_ok4()
##
@run_in_both(Person)
def func_fail():
"""
john doe
almar klein
"""
p = Person()
print(p.first_name, p.last_name)
p.set_first_name('almar')
p.set_last_name('klein')
print(p.first_name, p.last_name)
loop.iter()
print(p.first_name, p.last_name)
def test_fail():
with raises(StdoutMismatchError):
func_fail()
##
@run_in_both()
def func_ok_exception(): # ? indicates that the following text must be present
"""
? AttributeError
"""
try:
raise AttributeError('xx')
except Exception as err:
logger.exception(err)
def test_ok_exception():
assert func_ok_exception()
##
@run_in_both()
def func_fail_exception1(): # This just fails hard
"""
"""
raise AttributeError('xx')
@run_in_both(js=False)
def func_fail_exception2(): # This just fails hard
"""
"""
raise AttributeError('xx')
@run_in_both(py=False)
def func_fail_exception3(): # This just fails hard
"""
"""
raise AttributeError('xx')
def test_fail_exception():
with raises(AttributeError):
func_fail_exception1()
with raises(AttributeError):
func_fail_exception2()
with raises(Exception): # eval_js turns processerror into Exception
func_fail_exception3()
if __name__ == '__main__':
# Run this as a script to
test_ok1()
test_ok234()
test_ok_exception()
test_fail()
test_fail_exception()
|
from typing import Optional
from aioesphomeapi import CoverInfo, CoverOperation, CoverState
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up ESPHome covers based on a config entry."""
await platform_async_setup_entry(
hass,
entry,
async_add_entities,
component_key="cover",
info_type=CoverInfo,
entity_type=EsphomeCover,
state_type=CoverState,
)
class EsphomeCover(EsphomeEntity, CoverEntity):
"""A cover implementation for ESPHome."""
@property
def _static_info(self) -> CoverInfo:
return super()._static_info
@property
def supported_features(self) -> int:
"""Flag supported features."""
flags = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP
if self._static_info.supports_position:
flags |= SUPPORT_SET_POSITION
if self._static_info.supports_tilt:
flags |= SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_SET_TILT_POSITION
return flags
@property
def device_class(self) -> str:
"""Return the class of this device, from component DEVICE_CLASSES."""
return self._static_info.device_class
@property
def assumed_state(self) -> bool:
"""Return true if we do optimistic updates."""
return self._static_info.assumed_state
@property
def _state(self) -> Optional[CoverState]:
return super()._state
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
# pylint: disable=invalid-overridden-method
@esphome_state_property
def is_closed(self) -> Optional[bool]:
"""Return if the cover is closed or not."""
# Check closed state with api version due to a protocol change
return self._state.is_closed(self._client.api_version)
@esphome_state_property
def is_opening(self) -> bool:
"""Return if the cover is opening or not."""
return self._state.current_operation == CoverOperation.IS_OPENING
@esphome_state_property
def is_closing(self) -> bool:
"""Return if the cover is closing or not."""
return self._state.current_operation == CoverOperation.IS_CLOSING
@esphome_state_property
def current_cover_position(self) -> Optional[int]:
"""Return current position of cover. 0 is closed, 100 is open."""
if not self._static_info.supports_position:
return None
return round(self._state.position * 100.0)
@esphome_state_property
def current_cover_tilt_position(self) -> Optional[float]:
"""Return current position of cover tilt. 0 is closed, 100 is open."""
if not self._static_info.supports_tilt:
return None
return self._state.tilt * 100.0
async def async_open_cover(self, **kwargs) -> None:
"""Open the cover."""
await self._client.cover_command(key=self._static_info.key, position=1.0)
async def async_close_cover(self, **kwargs) -> None:
"""Close cover."""
await self._client.cover_command(key=self._static_info.key, position=0.0)
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the cover."""
await self._client.cover_command(key=self._static_info.key, stop=True)
async def async_set_cover_position(self, **kwargs) -> None:
"""Move the cover to a specific position."""
await self._client.cover_command(
key=self._static_info.key, position=kwargs[ATTR_POSITION] / 100
)
async def async_open_cover_tilt(self, **kwargs) -> None:
"""Open the cover tilt."""
await self._client.cover_command(key=self._static_info.key, tilt=1.0)
async def async_close_cover_tilt(self, **kwargs) -> None:
"""Close the cover tilt."""
await self._client.cover_command(key=self._static_info.key, tilt=0.0)
async def async_set_cover_tilt_position(self, **kwargs) -> None:
"""Move the cover tilt to a specific position."""
await self._client.cover_command(
key=self._static_info.key, tilt=kwargs[ATTR_TILT_POSITION] / 100
)
|
import homeassistant.components.image_processing as ip
from homeassistant.const import ATTR_ENTITY_PICTURE
from homeassistant.core import callback
from homeassistant.setup import setup_component
from tests.async_mock import MagicMock, PropertyMock, patch
from tests.common import assert_setup_component, get_test_home_assistant, load_fixture
from tests.components.image_processing import common
def mock_async_subprocess():
"""Get a Popen mock back."""
async_popen = MagicMock()
async def communicate(input=None):
"""Communicate mock."""
fixture = bytes(load_fixture("alpr_stdout.txt"), "utf-8")
return (fixture, None)
async_popen.communicate = communicate
return async_popen
class TestOpenAlprLocalSetup:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_platform(self):
"""Set up platform with one entity."""
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera"},
"region": "eu",
},
"camera": {"platform": "demo"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.openalpr_demo_camera")
def test_setup_platform_name(self):
"""Set up platform with one entity and set name."""
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"region": "eu",
},
"camera": {"platform": "demo"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.test_local")
def test_setup_platform_without_region(self):
"""Set up platform with one entity without region."""
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera"},
},
"camera": {"platform": "demo"},
}
with assert_setup_component(0, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
class TestOpenAlprLocal:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"region": "eu",
},
"camera": {"platform": "demo"},
}
with patch(
"homeassistant.components.openalpr_local.image_processing."
"OpenAlprLocalEntity.should_poll",
new_callable=PropertyMock(return_value=False),
):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
state = self.hass.states.get("camera.demo_camera")
self.url = f"{self.hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}"
self.alpr_events = []
@callback
def mock_alpr_event(event):
"""Mock event."""
self.alpr_events.append(event)
self.hass.bus.listen("image_processing.found_plate", mock_alpr_event)
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch("asyncio.create_subprocess_exec", return_value=mock_async_subprocess())
def test_openalpr_process_image(self, popen_mock, aioclient_mock):
"""Set up and scan a picture and test plates from event."""
aioclient_mock.get(self.url, content=b"image")
common.scan(self.hass, entity_id="image_processing.test_local")
self.hass.block_till_done()
state = self.hass.states.get("image_processing.test_local")
assert popen_mock.called
assert len(self.alpr_events) == 5
assert state.attributes.get("vehicles") == 1
assert state.state == "PE3R2X"
event_data = [
event.data
for event in self.alpr_events
if event.data.get("plate") == "PE3R2X"
]
assert len(event_data) == 1
assert event_data[0]["plate"] == "PE3R2X"
assert event_data[0]["confidence"] == float(98.9371)
assert event_data[0]["entity_id"] == "image_processing.test_local"
|
import errno
import mimetypes
import socket
import sys
from unittest import mock
import urllib.parse
from http.client import HTTPConnection
import cherrypy
from cherrypy._cpcompat import HTTPSConnection
from cherrypy.test import helper
def is_ascii(text):
"""
Return True if the text encodes as ascii.
"""
try:
text.encode('ascii')
return True
except Exception:
pass
return False
def encode_filename(filename):
"""
Given a filename to be used in a multipart/form-data,
encode the name. Return the key and encoded filename.
"""
if is_ascii(filename):
return 'filename', '"{filename}"'.format(**locals())
encoded = urllib.parse.quote(filename, encoding='utf-8')
return 'filename*', "'".join((
'UTF-8',
'', # lang
encoded,
))
def encode_multipart_formdata(files):
"""Return (content_type, body) ready for httplib.HTTP instance.
files: a sequence of (name, filename, value) tuples for multipart uploads.
filename can be a string or a tuple ('filename string', 'encoding')
"""
BOUNDARY = '________ThIs_Is_tHe_bouNdaRY_$'
L = []
for key, filename, value in files:
L.append('--' + BOUNDARY)
fn_key, encoded = encode_filename(filename)
tmpl = \
'Content-Disposition: form-data; name="{key}"; {fn_key}={encoded}'
L.append(tmpl.format(**locals()))
ct = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
L.append('Content-Type: %s' % ct)
L.append('')
L.append(value)
L.append('--' + BOUNDARY + '--')
L.append('')
body = '\r\n'.join(L)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
class HTTPTests(helper.CPWebCase):
def make_connection(self):
if self.scheme == 'https':
return HTTPSConnection('%s:%s' % (self.interface(), self.PORT))
else:
return HTTPConnection('%s:%s' % (self.interface(), self.PORT))
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self, *args, **kwargs):
return 'Hello world!'
@cherrypy.expose
@cherrypy.config(**{'request.process_request_body': False})
def no_body(self, *args, **kwargs):
return 'Hello world!'
@cherrypy.expose
def post_multipart(self, file):
"""Return a summary ("a * 65536\nb * 65536") of the uploaded
file.
"""
contents = file.file.read()
summary = []
curchar = None
count = 0
for c in contents:
if c == curchar:
count += 1
else:
if count:
curchar = chr(curchar)
summary.append('%s * %d' % (curchar, count))
count = 1
curchar = c
if count:
curchar = chr(curchar)
summary.append('%s * %d' % (curchar, count))
return ', '.join(summary)
@cherrypy.expose
def post_filename(self, myfile):
'''Return the name of the file which was uploaded.'''
return myfile.filename
cherrypy.tree.mount(Root())
cherrypy.config.update({'server.max_request_body_size': 30000000})
def test_no_content_length(self):
# "The presence of a message-body in a request is signaled by the
# inclusion of a Content-Length or Transfer-Encoding header field in
# the request's message-headers."
#
# Send a message with neither header and no body. Even though
# the request is of method POST, this should be OK because we set
# request.process_request_body to False for our handler.
c = self.make_connection()
c.request('POST', '/no_body')
response = c.getresponse()
self.body = response.fp.read()
self.status = str(response.status)
self.assertStatus(200)
self.assertBody(b'Hello world!')
# Now send a message that has no Content-Length, but does send a body.
# Verify that CP times out the socket and responds
# with 411 Length Required.
if self.scheme == 'https':
c = HTTPSConnection('%s:%s' % (self.interface(), self.PORT))
else:
c = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
# `_get_content_length` is needed for Python 3.6+
with mock.patch.object(
c,
'_get_content_length',
lambda body, method: None,
create=True):
# `_set_content_length` is needed for Python 2.7-3.5
with mock.patch.object(c, '_set_content_length', create=True):
c.request('POST', '/')
response = c.getresponse()
self.body = response.fp.read()
self.status = str(response.status)
self.assertStatus(411)
def test_post_multipart(self):
alphabet = 'abcdefghijklmnopqrstuvwxyz'
# generate file contents for a large post
contents = ''.join([c * 65536 for c in alphabet])
# encode as multipart form data
files = [('file', 'file.txt', contents)]
content_type, body = encode_multipart_formdata(files)
body = body.encode('Latin-1')
# post file
c = self.make_connection()
c.putrequest('POST', '/post_multipart')
c.putheader('Content-Type', content_type)
c.putheader('Content-Length', str(len(body)))
c.endheaders()
c.send(body)
response = c.getresponse()
self.body = response.fp.read()
self.status = str(response.status)
self.assertStatus(200)
parts = ['%s * 65536' % ch for ch in alphabet]
self.assertBody(', '.join(parts))
def test_post_filename_with_special_characters(self):
"""Testing that we can handle filenames with special characters.
This was reported as a bug in:
* https://github.com/cherrypy/cherrypy/issues/1146/
* https://github.com/cherrypy/cherrypy/issues/1397/
* https://github.com/cherrypy/cherrypy/issues/1694/
"""
# We'll upload a bunch of files with differing names.
fnames = [
'boop.csv', 'foo, bar.csv', 'bar, xxxx.csv', 'file"name.csv',
'file;name.csv', 'file; name.csv', u'test_łóąä.txt',
]
for fname in fnames:
files = [('myfile', fname, 'yunyeenyunyue')]
content_type, body = encode_multipart_formdata(files)
body = body.encode('Latin-1')
# post file
c = self.make_connection()
c.putrequest('POST', '/post_filename')
c.putheader('Content-Type', content_type)
c.putheader('Content-Length', str(len(body)))
c.endheaders()
c.send(body)
response = c.getresponse()
self.body = response.fp.read()
self.status = str(response.status)
self.assertStatus(200)
self.assertBody(fname)
def test_malformed_request_line(self):
if getattr(cherrypy.server, 'using_apache', False):
return self.skip('skipped due to known Apache differences...')
# Test missing version in Request-Line
c = self.make_connection()
c._output(b'geT /')
c._send_output()
if hasattr(c, 'strict'):
response = c.response_class(c.sock, strict=c.strict, method='GET')
else:
# Python 3.2 removed the 'strict' feature, saying:
# "http.client now always assumes HTTP/1.x compliant servers."
response = c.response_class(c.sock, method='GET')
response.begin()
self.assertEqual(response.status, 400)
self.assertEqual(response.fp.read(22), b'Malformed Request-Line')
c.close()
def test_request_line_split_issue_1220(self):
params = {
'intervenant-entreprise-evenement_classaction':
'evenement-mailremerciements',
'_path': 'intervenant-entreprise-evenement',
'intervenant-entreprise-evenement_action-id': 19404,
'intervenant-entreprise-evenement_id': 19404,
'intervenant-entreprise_id': 28092,
}
Request_URI = '/index?' + urllib.parse.urlencode(params)
self.assertEqual(len('GET %s HTTP/1.1\r\n' % Request_URI), 256)
self.getPage(Request_URI)
self.assertBody('Hello world!')
def test_malformed_header(self):
c = self.make_connection()
c.putrequest('GET', '/')
c.putheader('Content-Type', 'text/plain')
# See https://github.com/cherrypy/cherrypy/issues/941
c._output(b're, 1.2.3.4#015#012')
c.endheaders()
response = c.getresponse()
self.status = str(response.status)
self.assertStatus(400)
self.body = response.fp.read(20)
self.assertBody('Illegal header line.')
def test_http_over_https(self):
if self.scheme != 'https':
return self.skip('skipped (not running HTTPS)... ')
# Try connecting without SSL.
conn = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
conn.putrequest('GET', '/', skip_host=True)
conn.putheader('Host', self.HOST)
conn.endheaders()
response = conn.response_class(conn.sock, method='GET')
try:
response.begin()
self.assertEqual(response.status, 400)
self.body = response.read()
self.assertBody('The client sent a plain HTTP request, but this '
'server only speaks HTTPS on this port.')
except socket.error:
e = sys.exc_info()[1]
# "Connection reset by peer" is also acceptable.
if e.errno != errno.ECONNRESET:
raise
def test_garbage_in(self):
# Connect without SSL regardless of server.scheme
c = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
c._output(b'gjkgjklsgjklsgjkljklsg')
c._send_output()
response = c.response_class(c.sock, method='GET')
try:
response.begin()
self.assertEqual(response.status, 400)
self.assertEqual(response.fp.read(22),
b'Malformed Request-Line')
c.close()
except socket.error:
e = sys.exc_info()[1]
# "Connection reset by peer" is also acceptable.
if e.errno != errno.ECONNRESET:
raise
|
import os
import subprocess
from itertools import chain
from typing import List, Optional, Tuple
from django.core.exceptions import ValidationError
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from weblate.addons.events import (
EVENT_COMPONENT_UPDATE,
EVENT_DAILY,
EVENT_POST_COMMIT,
EVENT_POST_PUSH,
EVENT_POST_UPDATE,
EVENT_STORE_POST_LOAD,
)
from weblate.addons.forms import BaseAddonForm
from weblate.trans.exceptions import FileParseError
from weblate.trans.tasks import perform_update
from weblate.trans.util import get_clean_env
from weblate.utils import messages
from weblate.utils.errors import report_error
from weblate.utils.render import render_template
from weblate.utils.validators import validate_filename
class BaseAddon:
events: Tuple[int, ...] = ()
settings_form = None
name = ""
compat = {}
multiple = False
verbose = "Base addon"
description = "Base addon"
icon = "cog.svg"
project_scope = False
repo_scope = False
has_summary = False
alert: Optional[str] = None
trigger_update = False
stay_on_create = False
"""Base class for Weblate addons."""
def __init__(self, storage=None):
self.instance = storage
self.alerts = []
@cached_property
def doc_anchor(self):
return self.get_doc_anchor()
@classmethod
def get_doc_anchor(cls):
return "addon-{}".format(cls.name.replace(".", "-").replace("_", "-"))
@cached_property
def has_settings(self):
return self.settings_form is not None
@classmethod
def get_identifier(cls):
return cls.name
@classmethod
def create_object(cls, component, **kwargs):
from weblate.addons.models import Addon
if component:
# Reallocate to repository
if cls.repo_scope and component.linked_component:
component = component.linked_component
# Clear addon cache
component.drop_addons_cache()
return Addon(
component=component,
name=cls.name,
project_scope=cls.project_scope,
repo_scope=cls.repo_scope,
**kwargs
)
@classmethod
def create(cls, component, **kwargs):
storage = cls.create_object(component, **kwargs)
storage.save(force_insert=True)
result = cls(storage)
result.post_configure()
return result
@classmethod
def get_add_form(cls, user, component, **kwargs):
"""Return configuration form for adding new addon."""
if cls.settings_form is None:
return None
storage = cls.create_object(component)
instance = cls(storage)
# pylint: disable=not-callable
return cls.settings_form(user, instance, **kwargs)
def get_settings_form(self, user, **kwargs):
"""Return configuration form for this addon."""
if self.settings_form is None:
return None
if "data" not in kwargs:
kwargs["data"] = self.instance.configuration
# pylint: disable=not-callable
return self.settings_form(user, self, **kwargs)
def get_ui_form(self):
return self.get_settings_form(None)
def configure(self, settings):
"""Save configuration."""
self.instance.configuration = settings
self.instance.save()
self.post_configure()
def post_configure(self):
# Configure events to current status
self.instance.configure_events(self.events)
# Trigger post events to ensure direct processing
if self.project_scope:
components = self.instance.component.project.component_set.all()
elif self.repo_scope:
if self.instance.component.linked_component:
root = self.instance.component.linked_component
else:
root = self.instance.component
components = [root] + list(root.linked_childs)
else:
components = [self.instance.component]
if EVENT_POST_COMMIT in self.events:
for component in components:
self.post_commit(component)
if EVENT_POST_UPDATE in self.events:
for component in components:
component.commit_pending("addon", None)
self.post_update(component, "", False)
if EVENT_COMPONENT_UPDATE in self.events:
for component in components:
self.component_update(component)
if EVENT_POST_PUSH in self.events:
for component in components:
self.post_push(component)
if EVENT_DAILY in self.events:
for component in components:
self.daily(component)
def save_state(self):
"""Save addon state information."""
self.instance.save(update_fields=["state"])
@classmethod
def can_install(cls, component, user):
"""Check whether addon is compatible with given component."""
for key, values in cls.compat.items():
if getattr(component, key) not in values:
return False
return True
def pre_push(self, component):
"""Hook triggered before repository is pushed upstream."""
return
def post_push(self, component):
"""Hook triggered after repository is pushed upstream."""
return
def pre_update(self, component):
"""Hook triggered before repository is updated from upstream."""
return
def post_update(self, component, previous_head: str, skip_push: bool):
"""
Hook triggered after repository is updated from upstream.
:param str previous_head: HEAD of the repository prior to update, can
be blank on initial clone.
:param bool skip_push: Whether the addon operation should skip pushing
changes upstream. Usually you can pass this to
underlying methods as commit_and_push or
commit_pending.
"""
return
def pre_commit(self, translation, author):
"""Hook triggered before changes are committed to the repository."""
return
def post_commit(self, component):
"""Hook triggered after changes are committed to the repository."""
return
def post_add(self, translation):
"""Hook triggered after new translation is added."""
return
def unit_pre_create(self, unit):
"""Hook triggered before new unit is created."""
return
def store_post_load(self, translation, store):
"""
Hook triggered after a file is parsed and file format class is constructed.
This is useful to modify file format class parameters, for example adjust
how the file will be saved.
"""
return
def daily(self, component):
"""Hook triggered daily."""
return
def component_update(self, component):
return
def execute_process(self, component, cmd, env=None):
component.log_debug("%s addon exec: %s", self.name, " ".join(cmd))
try:
output = subprocess.check_output(
cmd,
env=get_clean_env(env),
cwd=component.full_path,
stderr=subprocess.STDOUT,
universal_newlines=True,
)
component.log_debug("exec result: %s", output)
except (OSError, subprocess.CalledProcessError) as err:
output = getattr(err, "output", "")
component.log_error("failed to exec %s: %s", repr(cmd), err)
for line in output.splitlines():
component.log_error("program output: %s", line)
self.alerts.append(
{
"addon": self.name,
"command": " ".join(cmd),
"output": output,
"error": str(err),
}
)
report_error(cause="Addon script error")
def trigger_alerts(self, component):
if self.alerts:
component.add_alert(self.alert, occurrences=self.alerts)
self.alerts = []
else:
component.delete_alert(self.alert)
def commit_and_push(
self, component, files: Optional[List[str]] = None, skip_push: bool = False
):
if files is None:
files = list(
chain.from_iterable(
translation.filenames
for translation in component.translation_set.iterator()
)
)
files += self.extra_files
repository = component.repository
with repository.lock:
component.commit_files(
template=component.addon_message,
extra_context={"addon_name": self.verbose},
files=files,
skip_push=skip_push,
)
def render_repo_filename(self, template, translation):
component = translation.component
# Render the template
filename = render_template(template, translation=translation)
# Validate filename (not absolute or linking to parent dir)
try:
validate_filename(filename)
except ValidationError:
return None
# Absolute path
filename = os.path.join(component.full_path, filename)
# Check if parent directory exists
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
# Validate if there is not a symlink out of the tree
try:
component.repository.resolve_symlinks(dirname)
if os.path.exists(filename):
component.repository.resolve_symlinks(filename)
except ValueError:
component.log_error("refused to write out of repository: %s", filename)
return None
return filename
@classmethod
def pre_install(cls, component, request):
if cls.trigger_update:
perform_update.delay("Component", component.pk, auto=True)
if component.repo_needs_merge():
messages.warning(
request,
_(
"The repository is outdated, you might not get "
"expected results until you update it."
),
)
class TestAddon(BaseAddon):
"""Testing addong doing nothing."""
settings_form = BaseAddonForm
name = "weblate.base.test"
verbose = "Test addon"
description = "Test addon"
class UpdateBaseAddon(BaseAddon):
"""Base class for addons updating translation files.
It hooks to post update and commits all changed translations.
"""
events = (EVENT_POST_UPDATE,)
def __init__(self, storage=None):
super().__init__(storage)
self.extra_files = []
@staticmethod
def iterate_translations(component):
yield from (
translation
for translation in component.translation_set.iterator()
if not translation.is_source or component.intermediate
)
def update_translations(self, component, previous_head):
raise NotImplementedError()
def post_update(self, component, previous_head: str, skip_push: bool):
try:
self.update_translations(component, previous_head)
except FileParseError:
# Ignore file parse error, it will be properly tracked as an alert
pass
self.commit_and_push(component, skip_push=skip_push)
class TestException(Exception):
pass
class TestCrashAddon(UpdateBaseAddon):
"""Testing addong doing nothing."""
name = "weblate.base.crash"
verbose = "Crash test addon"
description = "Crash test addon"
def update_translations(self, component, previous_head):
if previous_head:
raise TestException("Test error")
@classmethod
def can_install(cls, component, user):
return False
class StoreBaseAddon(BaseAddon):
"""Base class for addons tweaking store."""
events = (EVENT_STORE_POST_LOAD,)
icon = "wrench.svg"
|
from contextlib import contextmanager
from distutils.version import LooseVersion
from functools import partial, wraps
import os
import inspect
from io import StringIO
from shutil import rmtree
import sys
import tempfile
import traceback
from unittest import SkipTest
import warnings
import numpy as np
from numpy.testing import assert_array_equal, assert_allclose
from scipy import linalg
from ._logging import warn, ClosingStringIO
from .numerics import object_diff
def nottest(f):
"""Mark a function as not a test (decorator)."""
f.__test__ = False
return f
def _explain_exception(start=-1, stop=None, prefix='> '):
"""Explain an exception."""
# start=-1 means "only the most recent caller"
etype, value, tb = sys.exc_info()
string = traceback.format_list(traceback.extract_tb(tb)[start:stop])
string = (''.join(string).split('\n') +
traceback.format_exception_only(etype, value))
string = ':\n' + prefix + ('\n' + prefix).join(string)
return string
class _TempDir(str):
"""Create and auto-destroy temp dir.
This is designed to be used with testing modules. Instances should be
defined inside test functions. Instances defined at module level can not
guarantee proper destruction of the temporary directory.
When used at module level, the current use of the __del__() method for
cleanup can fail because the rmtree function may be cleaned up before this
object (an alternative could be using the atexit module instead).
"""
def __new__(self): # noqa: D105
new = str.__new__(self, tempfile.mkdtemp(prefix='tmp_mne_tempdir_'))
return new
def __init__(self): # noqa: D102
self._path = self.__str__()
def __del__(self): # noqa: D105
rmtree(self._path, ignore_errors=True)
def requires_nibabel():
"""Wrap to requires_module with a function call (fewer lines to change)."""
return partial(requires_module, name='nibabel')
def requires_dipy():
"""Check for dipy."""
import pytest
# for some strange reason on CIs we cane get:
#
# can get weird ImportError: dlopen: cannot load any more object
# with static TLS
#
# so let's import everything in the decorator.
try:
from dipy.align import imaffine, imwarp, metrics, transforms # noqa, analysis:ignore
from dipy.align.reslice import reslice # noqa, analysis:ignore
from dipy.align.imaffine import AffineMap # noqa, analysis:ignore
from dipy.align.imwarp import DiffeomorphicMap # noqa, analysis:ignore
except Exception:
have = False
else:
have = True
return pytest.mark.skipif(not have, reason='Requires dipy >= 0.10.1')
def requires_version(library, min_version='0.0'):
"""Check for a library version."""
import pytest
return pytest.mark.skipif(not check_version(library, min_version),
reason=('Requires %s version >= %s'
% (library, min_version)))
def requires_module(function, name, call=None):
"""Skip a test if package is not available (decorator)."""
import pytest
call = ('import %s' % name) if call is None else call
reason = 'Test %s skipped, requires %s.' % (function.__name__, name)
try:
exec(call) in globals(), locals()
except Exception as exc:
if len(str(exc)) > 0 and str(exc) != 'No module named %s' % name:
reason += ' Got exception (%s)' % (exc,)
skip = True
else:
skip = False
return pytest.mark.skipif(skip, reason=reason)(function)
_pandas_call = """
import pandas
version = LooseVersion(pandas.__version__)
if version < '0.8.0':
raise ImportError
"""
_mayavi_call = """
with warnings.catch_warnings(record=True): # traits
from mayavi import mlab
"""
_mne_call = """
if not has_mne_c():
raise ImportError
"""
_fs_call = """
if not has_freesurfer():
raise ImportError
"""
_n2ft_call = """
if 'NEUROMAG2FT_ROOT' not in os.environ:
raise ImportError
"""
requires_pandas = partial(requires_module, name='pandas', call=_pandas_call)
requires_pylsl = partial(requires_module, name='pylsl')
requires_sklearn = partial(requires_module, name='sklearn')
requires_mayavi = partial(requires_module, name='mayavi', call=_mayavi_call)
requires_mne = partial(requires_module, name='MNE-C', call=_mne_call)
def requires_freesurfer(arg):
"""Require Freesurfer."""
if isinstance(arg, str):
# Calling as @requires_freesurfer('progname'): return decorator
# after checking for progname existence
call = """
from . import run_subprocess
run_subprocess([%r, '--version'])
""" % (arg,)
return partial(
requires_module, name='Freesurfer (%s)' % (arg,), call=call)
else:
# Calling directly as @requires_freesurfer: return decorated function
# and just check env var existence
return requires_module(arg, name='Freesurfer', call=_fs_call)
requires_neuromag2ft = partial(requires_module, name='neuromag2ft',
call=_n2ft_call)
requires_vtk = partial(requires_module, name='vtk')
requires_pysurfer = partial(requires_module, name='PySurfer',
call="""import warnings
with warnings.catch_warnings(record=True):
from surfer import Brain""")
requires_good_network = partial(
requires_module, name='good network connection',
call='if int(os.environ.get("MNE_SKIP_NETWORK_TESTS", 0)):\n'
' raise ImportError')
requires_nitime = partial(requires_module, name='nitime')
requires_h5py = partial(requires_module, name='h5py')
def requires_numpydoc(func):
"""Decorate tests that need numpydoc."""
return requires_version('numpydoc', '1.0')(func) # validate needs 1.0
def check_version(library, min_version):
r"""Check minimum library version required.
Parameters
----------
library : str
The library name to import. Must have a ``__version__`` property.
min_version : str
The minimum version string. Anything that matches
``'(\d+ | [a-z]+ | \.)'``. Can also be empty to skip version
check (just check for library presence).
Returns
-------
ok : bool
True if the library exists with at least the specified version.
"""
ok = True
try:
library = __import__(library)
except ImportError:
ok = False
else:
if min_version:
this_version = LooseVersion(
getattr(library, '__version__', '0.0').lstrip('v'))
if this_version < min_version:
ok = False
return ok
def _check_mayavi_version(min_version='4.3.0'):
"""Check mayavi version."""
if not check_version('mayavi', min_version):
raise RuntimeError("Need mayavi >= %s" % min_version)
def _import_mlab():
"""Quietly import mlab."""
with warnings.catch_warnings(record=True):
from mayavi import mlab
return mlab
@contextmanager
def traits_test_context():
"""Context to raise errors in trait handlers."""
from traits.api import push_exception_handler
push_exception_handler(reraise_exceptions=True)
try:
yield
finally:
push_exception_handler(reraise_exceptions=False)
def traits_test(test_func):
"""Raise errors in trait handlers (decorator)."""
@wraps(test_func)
def dec(*args, **kwargs):
with traits_test_context():
return test_func(*args, **kwargs)
return dec
@nottest
def run_tests_if_main():
"""Run tests in a given file if it is run as a script."""
local_vars = inspect.currentframe().f_back.f_locals
if local_vars.get('__name__', '') != '__main__':
return
import pytest
code = pytest.main([local_vars['__file__'], '-v'])
if code:
raise AssertionError('pytest finished with errors (%d)' % (code,))
def run_command_if_main():
"""Run a given command if it's __main__."""
local_vars = inspect.currentframe().f_back.f_locals
if local_vars.get('__name__', '') == '__main__':
local_vars['run']()
class ArgvSetter(object):
"""Temporarily set sys.argv."""
def __init__(self, args=(), disable_stdout=True,
disable_stderr=True): # noqa: D102
self.argv = list(('python',) + args)
self.stdout = ClosingStringIO() if disable_stdout else sys.stdout
self.stderr = ClosingStringIO() if disable_stderr else sys.stderr
def __enter__(self): # noqa: D105
self.orig_argv = sys.argv
sys.argv = self.argv
self.orig_stdout = sys.stdout
sys.stdout = self.stdout
self.orig_stderr = sys.stderr
sys.stderr = self.stderr
return self
def __exit__(self, *args): # noqa: D105
sys.argv = self.orig_argv
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
class SilenceStdout(object):
"""Silence stdout."""
def __init__(self, close=True):
self.close = close
def __enter__(self): # noqa: D105
self.stdout = sys.stdout
sys.stdout = StringIO()
return sys.stdout
def __exit__(self, *args): # noqa: D105
if self.close:
sys.stdout.close()
sys.stdout = self.stdout
def has_nibabel():
"""Determine if nibabel is installed.
Returns
-------
has : bool
True if the user has nibabel.
"""
try:
import nibabel # noqa
except ImportError:
return False
else:
return True
def has_mne_c():
"""Check for MNE-C."""
return 'MNE_ROOT' in os.environ
def has_freesurfer():
"""Check for Freesurfer."""
return 'FREESURFER_HOME' in os.environ
def buggy_mkl_svd(function):
"""Decorate tests that make calls to SVD and intermittently fail."""
@wraps(function)
def dec(*args, **kwargs):
try:
return function(*args, **kwargs)
except np.linalg.LinAlgError as exp:
if 'SVD did not converge' in str(exp):
msg = 'Intel MKL SVD convergence error detected, skipping test'
warn(msg)
raise SkipTest(msg)
raise
return dec
def assert_and_remove_boundary_annot(annotations, n=1):
"""Assert that there are boundary annotations and remove them."""
from ..io.base import BaseRaw
if isinstance(annotations, BaseRaw): # allow either input
annotations = annotations.annotations
for key in ('EDGE', 'BAD'):
idx = np.where(annotations.description == '%s boundary' % key)[0]
assert len(idx) == n
annotations.delete(idx)
def assert_object_equal(a, b):
"""Assert two objects are equal."""
d = object_diff(a, b)
assert d == '', d
def _raw_annot(meas_date, orig_time):
from .. import Annotations, create_info
from ..annotations import _handle_meas_date
from ..io import RawArray
info = create_info(ch_names=10, sfreq=10.)
raw = RawArray(data=np.empty((10, 10)), info=info, first_samp=10)
if meas_date is not None:
meas_date = _handle_meas_date(meas_date)
raw.info['meas_date'] = meas_date
raw.info._check_consistency()
annot = Annotations([.5], [.2], ['dummy'], orig_time)
raw.set_annotations(annotations=annot)
return raw
def _get_data(x, ch_idx):
"""Get the (n_ch, n_times) data array."""
from ..evoked import Evoked
from ..io import BaseRaw
if isinstance(x, BaseRaw):
return x[ch_idx][0]
elif isinstance(x, Evoked):
return x.data[ch_idx]
def _check_snr(actual, desired, picks, min_tol, med_tol, msg, kind='MEG'):
"""Check the SNR of a set of channels."""
actual_data = _get_data(actual, picks)
desired_data = _get_data(desired, picks)
bench_rms = np.sqrt(np.mean(desired_data * desired_data, axis=1))
error = actual_data - desired_data
error_rms = np.sqrt(np.mean(error * error, axis=1))
np.clip(error_rms, 1e-60, np.inf, out=error_rms) # avoid division by zero
snrs = bench_rms / error_rms
# min tol
snr = snrs.min()
bad_count = (snrs < min_tol).sum()
msg = ' (%s)' % msg if msg != '' else msg
assert bad_count == 0, ('SNR (worst %0.2f) < %0.2f for %s/%s '
'channels%s' % (snr, min_tol, bad_count,
len(picks), msg))
# median tol
snr = np.median(snrs)
assert snr >= med_tol, ('%s SNR median %0.2f < %0.2f%s'
% (kind, snr, med_tol, msg))
def assert_meg_snr(actual, desired, min_tol, med_tol=500., chpi_med_tol=500.,
msg=None):
"""Assert channel SNR of a certain level.
Mostly useful for operations like Maxwell filtering that modify
MEG channels while leaving EEG and others intact.
"""
from ..io.pick import pick_types
picks = pick_types(desired.info, meg=True, exclude=[])
picks_desired = pick_types(desired.info, meg=True, exclude=[])
assert_array_equal(picks, picks_desired, err_msg='MEG pick mismatch')
chpis = pick_types(actual.info, meg=False, chpi=True, exclude=[])
chpis_desired = pick_types(desired.info, meg=False, chpi=True, exclude=[])
if chpi_med_tol is not None:
assert_array_equal(chpis, chpis_desired, err_msg='cHPI pick mismatch')
others = np.setdiff1d(np.arange(len(actual.ch_names)),
np.concatenate([picks, chpis]))
others_desired = np.setdiff1d(np.arange(len(desired.ch_names)),
np.concatenate([picks_desired,
chpis_desired]))
assert_array_equal(others, others_desired, err_msg='Other pick mismatch')
if len(others) > 0: # if non-MEG channels present
assert_allclose(_get_data(actual, others),
_get_data(desired, others), atol=1e-11, rtol=1e-5,
err_msg='non-MEG channel mismatch')
_check_snr(actual, desired, picks, min_tol, med_tol, msg, kind='MEG')
if chpi_med_tol is not None and len(chpis) > 0:
_check_snr(actual, desired, chpis, 0., chpi_med_tol, msg, kind='cHPI')
def assert_snr(actual, desired, tol):
"""Assert actual and desired arrays are within some SNR tolerance."""
with np.errstate(divide='ignore'): # allow infinite
snr = (linalg.norm(desired, ord='fro') /
linalg.norm(desired - actual, ord='fro'))
assert snr >= tol, '%f < %f' % (snr, tol)
def assert_stcs_equal(stc1, stc2):
"""Check that two STC are equal."""
assert_allclose(stc1.times, stc2.times)
assert_allclose(stc1.data, stc2.data)
assert_array_equal(stc1.vertices[0], stc2.vertices[0])
assert_array_equal(stc1.vertices[1], stc2.vertices[1])
assert_allclose(stc1.tmin, stc2.tmin)
assert_allclose(stc1.tstep, stc2.tstep)
def _dig_sort_key(dig):
"""Sort dig keys."""
return (dig['kind'], dig['ident'])
def assert_dig_allclose(info_py, info_bin, limit=None):
"""Assert dig allclose."""
from ..bem import fit_sphere_to_headshape
from ..io.constants import FIFF
from ..io.meas_info import Info
from ..channels.montage import DigMontage
# test dig positions
dig_py, dig_bin = info_py, info_bin
if isinstance(dig_py, Info):
assert isinstance(dig_bin, Info)
dig_py, dig_bin = dig_py['dig'], dig_bin['dig']
else:
assert isinstance(dig_bin, DigMontage)
assert isinstance(dig_py, DigMontage)
dig_py, dig_bin = dig_py.dig, dig_bin.dig
info_py = info_bin = None
assert isinstance(dig_py, list)
assert isinstance(dig_bin, list)
dig_py = sorted(dig_py, key=_dig_sort_key)
dig_bin = sorted(dig_bin, key=_dig_sort_key)
assert len(dig_py) == len(dig_bin)
for ii, (d_py, d_bin) in enumerate(zip(dig_py[:limit], dig_bin[:limit])):
for key in ('ident', 'kind', 'coord_frame'):
assert d_py[key] == d_bin[key], key
assert_allclose(d_py['r'], d_bin['r'], rtol=1e-5, atol=1e-5,
err_msg='Failure on %s:\n%s\n%s'
% (ii, d_py['r'], d_bin['r']))
if any(d['kind'] == FIFF.FIFFV_POINT_EXTRA for d in dig_py) and \
info_py is not None:
r_bin, o_head_bin, o_dev_bin = fit_sphere_to_headshape(
info_bin, units='m', verbose='error')
r_py, o_head_py, o_dev_py = fit_sphere_to_headshape(
info_py, units='m', verbose='error')
assert_allclose(r_py, r_bin, atol=1e-6)
assert_allclose(o_dev_py, o_dev_bin, rtol=1e-5, atol=1e-6)
assert_allclose(o_head_py, o_head_bin, rtol=1e-5, atol=1e-6)
@contextmanager
def modified_env(**d):
"""Use a modified os.environ with temporarily replaced key/value pairs.
Parameters
----------
**kwargs : dict
The key/value pairs of environment variables to replace.
"""
orig_env = dict()
for key, val in d.items():
orig_env[key] = os.getenv(key)
if val is not None:
assert isinstance(val, str)
os.environ[key] = val
elif key in os.environ:
del os.environ[key]
try:
yield
finally:
for key, val in orig_env.items():
if val is not None:
os.environ[key] = val
elif key in os.environ:
del os.environ[key]
def _click_ch_name(fig, ch_index=0, button=1):
"""Click on a channel name in a raw/epochs/ICA browse-style plot."""
from ..viz.utils import _fake_click
fig.canvas.draw()
x, y = fig.mne.ax_main.get_yticklabels()[ch_index].get_position()
xrange = np.diff(fig.mne.ax_main.get_xlim())[0]
_fake_click(fig, fig.mne.ax_main, (x - xrange / 50, y),
xform='data', button=button)
def _close_event(fig):
"""Force calling of the MPL figure close event."""
# XXX workaround: plt.close() doesn't spawn close_event on Agg backend
# (check MPL github issue #18609; scheduled to be fixed by MPL 3.4)
try:
fig.canvas.close_event()
except ValueError: # old mpl with Qt
pass # pragma: no cover
|
import re
class MountEntry(object):
"""
Represents a mount entry (device file, mount point and file system type)
"""
def __init__(self, dev, point, fstype, options):
self.dev = dev
self.point = point
self.fstype = fstype
self.options = options.split(",")
def __str__(self):
return "%s on %s type %s (%s)" % (self.dev, self.point, self.fstype,
",".join(self.options))
MOUNT_PATTERN = re.compile(
r"(.+?)\s+on\s+(.+?)\s+type\s+(\S+)(?:\s+\((.+?)\))?")
def mount_table():
"""returns the system's current mount table (a list of
:class:`MountEntry <plumbum.unixutils.MountEntry>` objects)"""
from plumbum.cmd import mount
table = []
for line in mount().splitlines():
m = MOUNT_PATTERN.match(line)
if not m:
continue
table.append(MountEntry(*m.groups()))
return table
def mounted(fs):
"""
Indicates if a the given filesystem (device file or mount point) is currently mounted
"""
return any(fs == entry.dev or fs == entry.point for entry in mount_table())
|
import logging
#
# Prevent regression of #474 and #475
#
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
from smart_open import version # noqa: E402
from .smart_open_lib import open, parse_uri, smart_open, register_compressor # noqa: E402
_WARNING = """smart_open.s3_iter_bucket is deprecated and will stop functioning
in a future version. Please import iter_bucket from the smart_open.s3 module instead:
from smart_open.s3 import iter_bucket as s3_iter_bucket
"""
_WARNED = False
def s3_iter_bucket(
bucket_name,
prefix='',
accept_key=None,
key_limit=None,
workers=16,
retries=3,
**session_kwargs
):
"""Deprecated. Use smart_open.s3.iter_bucket instead."""
global _WARNED
from .s3 import iter_bucket
if not _WARNED:
logger.warning(_WARNING)
_WARNED = True
return iter_bucket(
bucket_name=bucket_name,
prefix=prefix,
accept_key=accept_key,
key_limit=key_limit,
workers=workers,
retries=retries,
session_kwargs=session_kwargs
)
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
|
from copy import deepcopy
DEFAULTS = dict(
color=dict(mag='darkblue', grad='b', eeg='k', eog='k', ecg='m', emg='k',
ref_meg='steelblue', misc='k', stim='k', resp='k', chpi='k',
exci='k', ias='k', syst='k', seeg='saddlebrown', dipole='k',
gof='k', bio='k', ecog='k', hbo='#AA3377', hbr='b',
fnirs_cw_amplitude='k', fnirs_fd_ac_amplitude='k',
fnirs_fd_phase='k', fnirs_od='k', csd='k'),
units=dict(mag='fT', grad='fT/cm', eeg='µV', eog='µV', ecg='µV', emg='µV',
misc='AU', seeg='mV', dipole='nAm', gof='GOF', bio='µV',
ecog='µV', hbo='µM', hbr='µM', ref_meg='fT',
fnirs_cw_amplitude='V', fnirs_fd_ac_amplitude='V',
fnirs_fd_phase='rad', fnirs_od='V', csd='mV/m²'),
# scalings for the units
scalings=dict(mag=1e15, grad=1e13, eeg=1e6, eog=1e6, emg=1e6, ecg=1e6,
misc=1.0, seeg=1e3, dipole=1e9, gof=1.0, bio=1e6, ecog=1e6,
hbo=1e6, hbr=1e6, ref_meg=1e15, fnirs_cw_amplitude=1.0,
fnirs_fd_ac_amplitude=1.0, fnirs_fd_phase=1.,
fnirs_od=1.0, csd=1e3),
# rough guess for a good plot
scalings_plot_raw=dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6,
ecg=5e-4, emg=1e-3, ref_meg=1e-12, misc='auto',
stim=1, resp=1, chpi=1e-4, exci=1, ias=1, syst=1,
seeg=1e-4, bio=1e-6, ecog=1e-4, hbo=10e-6,
hbr=10e-6, whitened=10., fnirs_cw_amplitude=2e-2,
fnirs_fd_ac_amplitude=2e-2, fnirs_fd_phase=2e-1,
fnirs_od=2e-2, csd=200e-4),
scalings_cov_rank=dict(mag=1e12, grad=1e11, eeg=1e5, # ~100x scalings
seeg=1e1, ecog=1e4, hbo=1e4, hbr=1e4),
ylim=dict(mag=(-600., 600.), grad=(-200., 200.), eeg=(-200., 200.),
misc=(-5., 5.), seeg=(-20., 20.), dipole=(-100., 100.),
gof=(0., 1.), bio=(-500., 500.), ecog=(-200., 200.), hbo=(0, 20),
hbr=(0, 20), csd=(-50., 50.)),
titles=dict(mag='Magnetometers', grad='Gradiometers', eeg='EEG', eog='EOG',
ecg='ECG', emg='EMG', misc='misc', seeg='sEEG', bio='BIO',
dipole='Dipole', ecog='ECoG', hbo='Oxyhemoglobin',
ref_meg='Reference Magnetometers',
fnirs_cw_amplitude='fNIRS (CW amplitude)',
fnirs_fd_ac_amplitude='fNIRS (FD AC amplitude)',
fnirs_fd_phase='fNIRS (FD phase)',
fnirs_od='fNIRS (OD)', hbr='Deoxyhemoglobin',
gof='Goodness of fit', csd='Current source density'),
mask_params=dict(marker='o',
markerfacecolor='w',
markeredgecolor='k',
linewidth=0,
markeredgewidth=1,
markersize=4),
coreg=dict(
mri_fid_opacity=1.0,
dig_fid_opacity=1.0,
mri_fid_scale=5e-3,
dig_fid_scale=8e-3,
extra_scale=4e-3,
eeg_scale=4e-3, eegp_scale=20e-3, eegp_height=0.1,
ecog_scale=5e-3,
seeg_scale=5e-3,
fnirs_scale=5e-3,
source_scale=5e-3,
detector_scale=5e-3,
hpi_scale=15e-3,
head_color=(0.988, 0.89, 0.74),
hpi_color=(1., 0., 1.),
extra_color=(1., 1., 1.),
eeg_color=(1., 0.596, 0.588), eegp_color=(0.839, 0.15, 0.16),
ecog_color=(1., 1., 1.),
seeg_color=(1., 1., .3),
fnirs_color=(1., .647, 0.),
source_color=(1., .05, 0.),
detector_color=(.3, .15, .15),
lpa_color=(1., 0., 0.),
nasion_color=(0., 1., 0.),
rpa_color=(0., 0., 1.),
),
noise_std=dict(grad=5e-13, mag=20e-15, eeg=0.2e-6),
eloreta_options=dict(eps=1e-6, max_iter=20, force_equal=False),
depth_mne=dict(exp=0.8, limit=10., limit_depth_chs=True,
combine_xyz='spectral', allow_fixed_depth=False),
depth_sparse=dict(exp=0.8, limit=None, limit_depth_chs='whiten',
combine_xyz='fro', allow_fixed_depth=True),
interpolation_method=dict(eeg='spline', meg='MNE', fnirs='nearest'),
volume_options=dict(
alpha=None, resolution=1., surface_alpha=None, blending='mip',
silhouette_alpha=None, silhouette_linewidth=2.),
)
def _handle_default(k, v=None):
"""Avoid dicts as default keyword arguments.
Use this function instead to resolve default dict values. Example usage::
scalings = _handle_default('scalings', scalings)
"""
this_mapping = deepcopy(DEFAULTS[k])
if v is not None:
if isinstance(v, dict):
this_mapping.update(v)
else:
for key in this_mapping:
this_mapping[key] = v
return this_mapping
HEAD_SIZE_DEFAULT = 0.095 # in [m]
_BORDER_DEFAULT = 'mean'
_EXTRAPOLATE_DEFAULT = 'auto'
|
import os
import pickle
import time
from hashlib import sha256
from radicale import pathutils, storage
from radicale.log import logger
class CollectionCacheMixin:
def _clean_cache(self, folder, names, max_age=None):
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
"""
age_limit = time.time() - max_age if max_age is not None else None
modified = False
for name in names:
if not pathutils.is_safe_filesystem_path_component(name):
continue
if age_limit is not None:
try:
# Race: Another process might have deleted the file.
mtime = os.path.getmtime(os.path.join(folder, name))
except FileNotFoundError:
continue
if mtime > age_limit:
continue
logger.debug("Found expired item in cache: %r", name)
# Race: Another process might have deleted or locked the
# file.
try:
os.remove(os.path.join(folder, name))
except (FileNotFoundError, PermissionError):
continue
modified = True
if modified:
self._storage._sync_directory(folder)
@staticmethod
def _item_cache_hash(raw_text):
_hash = sha256()
_hash.update(storage.CACHE_VERSION)
_hash.update(raw_text)
return _hash.hexdigest()
def _item_cache_content(self, item, cache_hash=None):
text = item.serialize()
if cache_hash is None:
cache_hash = self._item_cache_hash(text.encode(self._encoding))
return (cache_hash, item.uid, item.etag, text, item.name,
item.component_name, *item.time_range)
def _store_item_cache(self, href, item, cache_hash=None):
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
"item")
content = self._item_cache_content(item, cache_hash)
self._storage._makedirs_synced(cache_folder)
try:
# Race: Other processes might have created and locked the
# file.
with self._atomic_write(os.path.join(cache_folder, href),
"wb") as f:
pickle.dump(content, f)
except PermissionError:
pass
return content
def _load_item_cache(self, href, input_hash):
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
"item")
cache_hash = uid = etag = text = name = tag = start = end = None
try:
with open(os.path.join(cache_folder, href), "rb") as f:
cache_hash, *content = pickle.load(f)
if cache_hash == input_hash:
uid, etag, text, name, tag, start, end = content
except FileNotFoundError:
pass
except (pickle.UnpicklingError, ValueError) as e:
logger.warning("Failed to load item cache entry %r in %r: %s",
href, self.path, e, exc_info=True)
return cache_hash, uid, etag, text, name, tag, start, end
def _clean_item_cache(self):
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
"item")
self._clean_cache(cache_folder, (
e.name for e in os.scandir(cache_folder) if not
os.path.isfile(os.path.join(self._filesystem_path, e.name))))
|
import os
import posixpath
import time
from http import client
import pkg_resources
from radicale import httputils, pathutils, web
from radicale.log import logger
MIMETYPES = {
".css": "text/css",
".eot": "application/vnd.ms-fontobject",
".gif": "image/gif",
".html": "text/html",
".js": "application/javascript",
".manifest": "text/cache-manifest",
".png": "image/png",
".svg": "image/svg+xml",
".ttf": "application/font-sfnt",
".txt": "text/plain",
".woff": "application/font-woff",
".woff2": "font/woff2",
".xml": "text/xml"}
FALLBACK_MIMETYPE = "application/octet-stream"
class Web(web.BaseWeb):
def __init__(self, configuration):
super().__init__(configuration)
self.folder = pkg_resources.resource_filename(__name__,
"internal_data")
def get(self, environ, base_prefix, path, user):
assert path == "/.web" or path.startswith("/.web/")
assert pathutils.sanitize_path(path) == path
try:
filesystem_path = pathutils.path_to_filesystem(
self.folder, path[len("/.web"):].strip("/"))
except ValueError as e:
logger.debug("Web content with unsafe path %r requested: %s",
path, e, exc_info=True)
return httputils.NOT_FOUND
if os.path.isdir(filesystem_path) and not path.endswith("/"):
location = posixpath.basename(path) + "/"
return (client.FOUND,
{"Location": location, "Content-Type": "text/plain"},
"Redirected to %s" % location)
if os.path.isdir(filesystem_path):
filesystem_path = os.path.join(filesystem_path, "index.html")
if not os.path.isfile(filesystem_path):
return httputils.NOT_FOUND
content_type = MIMETYPES.get(
os.path.splitext(filesystem_path)[1].lower(), FALLBACK_MIMETYPE)
with open(filesystem_path, "rb") as f:
answer = f.read()
last_modified = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(os.fstat(f.fileno()).st_mtime))
headers = {
"Content-Type": content_type,
"Last-Modified": last_modified}
return client.OK, headers, answer
|
from django.test import SimpleTestCase
from translate.misc.multistring import multistring
from weblate.trans.util import (
cleanup_path,
cleanup_repo_url,
get_string,
translation_percent,
)
class HideCredentialsTest(SimpleTestCase):
def test_http(self):
self.assertEqual(
cleanup_repo_url("http://foo:[email protected]"), "http://example.com"
)
def test_http_user(self):
self.assertEqual(
cleanup_repo_url("http://[email protected]"), "http://example.com"
)
def test_git(self):
self.assertEqual(
cleanup_repo_url("git://git.weblate.org/weblate.git"),
"git://git.weblate.org/weblate.git",
)
def test_github(self):
self.assertEqual(
cleanup_repo_url("[email protected]:WeblateOrg/weblate.git"),
"[email protected]:WeblateOrg/weblate.git",
)
def test_git_hg(self):
self.assertEqual(
cleanup_repo_url("hg::https://bitbucket.org/sumwars/sumwars-code"),
"hg::https://bitbucket.org/sumwars/sumwars-code",
)
class TranslationPercentTest(SimpleTestCase):
def test_common(self):
self.assertAlmostEqual(translation_percent(2, 4), 50.0)
def test_empty(self):
self.assertAlmostEqual(translation_percent(0, 0), 100.0)
def test_none(self):
self.assertAlmostEqual(translation_percent(0, None), 0.0)
def test_untranslated_file(self):
self.assertAlmostEqual(translation_percent(0, 100), 0.0)
def test_almost_untranslated_file(self):
self.assertAlmostEqual(translation_percent(1, 10000000000), 0.1)
def test_translated_file(self):
self.assertAlmostEqual(translation_percent(100, 100), 100.0)
def test_almost_translated_file(self):
self.assertAlmostEqual(translation_percent(99999999, 100000000), 99.9)
class CleanupPathTest(SimpleTestCase):
def test_relative(self):
self.assertEqual(cleanup_path("../*.po"), "*.po")
def test_current(self):
self.assertEqual(cleanup_path("./*.po"), "*.po")
def test_mixed(self):
self.assertEqual(cleanup_path("./../*.po"), "*.po")
def test_slash(self):
self.assertEqual(cleanup_path("/*.po"), "*.po")
def test_double_slash(self):
self.assertEqual(cleanup_path("foo//*.po"), "foo/*.po")
class TextConversionTest(SimpleTestCase):
def test_multistring(self):
self.assertEqual(get_string(multistring(["foo", "bar"])), "foo\x1e\x1ebar")
def test_surrogates(self):
self.assertEqual(
get_string("\ud83d\udc68\u200d\ud83d\udcbbАгенты"), "👨💻Агенты"
)
def test_none(self):
self.assertEqual(get_string(None), "")
def test_int(self):
self.assertEqual(get_string(42), "42")
|
from pylatex import Document
import pylatex.config as cf
def test():
assert type(cf.active) == cf.Default
cf.active = cf.Version1()
assert cf.active.indent
assert Document()._indent
cf.active = cf.Version1(indent=False)
assert not cf.active.indent
assert not Document()._indent
with cf.Version1().use():
assert cf.active.indent
assert Document()._indent
assert not cf.active.indent
assert not Document()._indent
with cf.active.change(indent=True):
assert cf.active.indent
assert Document()._indent
assert not cf.active.indent
assert not Document()._indent
if __name__ == '__main__':
test()
|
Subsets and Splits