text
stringlengths
213
32.3k
from homeassistant.components.binary_sensor import ( DEVICE_CLASS_CONNECTIVITY, DEVICE_CLASS_PLUG, DEVICE_CLASS_POWER, DEVICE_CLASS_SAFETY, BinarySensorEntity, ) from . import DOMAIN async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the KEBA charging station platform.""" if discovery_info is None: return keba = hass.data[DOMAIN] sensors = [ KebaBinarySensor( keba, "Online", "Status", "device_state", DEVICE_CLASS_CONNECTIVITY ), KebaBinarySensor(keba, "Plug", "Plug", "plug_state", DEVICE_CLASS_PLUG), KebaBinarySensor( keba, "State", "Charging State", "charging_state", DEVICE_CLASS_POWER ), KebaBinarySensor( keba, "Tmo FS", "Failsafe Mode", "failsafe_mode_state", DEVICE_CLASS_SAFETY ), ] async_add_entities(sensors) class KebaBinarySensor(BinarySensorEntity): """Representation of a binary sensor of a KEBA charging station.""" def __init__(self, keba, key, name, entity_type, device_class): """Initialize the KEBA Sensor.""" self._key = key self._keba = keba self._name = name self._entity_type = entity_type self._device_class = device_class self._is_on = None self._attributes = {} @property def should_poll(self): """Deactivate polling. Data updated by KebaHandler.""" return False @property def unique_id(self): """Return the unique ID of the binary sensor.""" return f"{self._keba.device_id}_{self._entity_type}" @property def name(self): """Return the name of the device.""" return f"{self._keba.device_name} {self._name}" @property def device_class(self): """Return the class of this sensor.""" return self._device_class @property def is_on(self): """Return true if sensor is on.""" return self._is_on @property def device_state_attributes(self): """Return the state attributes of the binary sensor.""" return self._attributes async def async_update(self): """Get latest cached states from the device.""" if self._key == "Online": self._is_on = self._keba.get_value(self._key) elif self._key == "Plug": self._is_on = self._keba.get_value("Plug_plugged") self._attributes["plugged_on_wallbox"] = self._keba.get_value( "Plug_wallbox" ) self._attributes["plug_locked"] = self._keba.get_value("Plug_locked") self._attributes["plugged_on_EV"] = self._keba.get_value("Plug_EV") elif self._key == "State": self._is_on = self._keba.get_value("State_on") self._attributes["status"] = self._keba.get_value("State_details") self._attributes["max_charging_rate"] = str( self._keba.get_value("Max curr") ) elif self._key == "Tmo FS": self._is_on = not self._keba.get_value("FS_on") self._attributes["failsafe_timeout"] = str(self._keba.get_value("Tmo FS")) self._attributes["fallback_current"] = str(self._keba.get_value("Curr FS")) elif self._key == "Authreq": self._is_on = self._keba.get_value(self._key) == 0 def update_callback(self): """Schedule a state update.""" self.async_schedule_update_ha_state(True) async def async_added_to_hass(self): """Add update callback after being added to hass.""" self._keba.add_update_listener(self.update_callback)
import argparse import logging from collections import defaultdict from collections import namedtuple from typing import Dict from typing import List from typing import Mapping from typing import Sequence from kubernetes.client import V1DeleteOptions from pysensu_yelp import Status from paasta_tools.kubernetes_tools import get_all_pods from paasta_tools.kubernetes_tools import KubeClient from paasta_tools.kubernetes_tools import V1Pod from paasta_tools.monitoring_tools import send_event from paasta_tools.utils import DEFAULT_SOA_DIR log = logging.getLogger(__name__) EvictedPod = namedtuple("EvictedPod", ["podname", "namespace", "eviction_msg"]) def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description="Removes evicted pods and notifies service owners" ) parser.add_argument( "-d", "--soa-dir", dest="soa_dir", default=DEFAULT_SOA_DIR, help="define a different soa config directory", ) parser.add_argument( "-v", "--verbose", action="store_true", dest="verbose", default=False ) parser.add_argument( "-n", "--dry-run", action="store_true", dest="dry_run", default=False ) args = parser.parse_args() return args def get_evicted_pods(pods: Sequence[V1Pod]) -> Sequence[V1Pod]: return [ pod for pod in pods if pod.status.phase == "Failed" and pod.status.reason == "Evicted" ] def get_pod_service(pod: V1Pod) -> str: if pod.metadata.labels is not None: return pod.metadata.labels.get("paasta.yelp.com/service") else: return None def notify_service_owners( services: Mapping[str, Sequence[EvictedPod]], soa_dir: str, dry_run: bool, ) -> None: check_overrides = { "page": False, "alert_after": "0m", "realert_every": 1, "tip": "Pods can be Evicted if they go over the allowed quota for a given resource. Check the Eviction message to figure out which resource quota was breached", } for service in services.keys(): check_name = f"pod-eviction.{service}" check_output = "The following pods have been evicted and will be removed from the cluster:\n" for pod in services[service]: check_output += f"- {pod.podname}: {pod.eviction_msg}\n" if dry_run: log.info(f"Would have notified owners for service {service}") else: log.info(f"Notifying owners for service {service}") send_event( service, check_name, check_overrides, Status.CRITICAL, check_output, soa_dir, ) def remove_pods( client: KubeClient, services: Mapping[str, Sequence[EvictedPod]], dry_run: bool, ) -> None: delete_options = V1DeleteOptions() for service in services: # Do not remove more than 2 pods per run for pod in services[service][0:2]: if dry_run: log.info(f"Would have removed pod {pod.podname}") else: client.core.delete_namespaced_pod( pod.podname, pod.namespace, body=delete_options, grace_period_seconds=0, propagation_policy="Background", ) log.info(f"Removing pod {pod.podname}") def evicted_pods_per_service(client: KubeClient,) -> Mapping[str, Sequence[EvictedPod]]: all_pods = get_all_pods(kube_client=client, namespace="") evicted_pods = get_evicted_pods(all_pods) log.info(f"Pods in evicted state: {[pod.metadata.name for pod in evicted_pods]}") evicted_pods_aggregated: Dict[str, List[EvictedPod]] = defaultdict(list) for pod in evicted_pods: service = get_pod_service(pod) if service: evicted_pods_aggregated[service].append( EvictedPod( pod.metadata.name, pod.metadata.namespace, pod.status.message ) ) else: log.info(f"Could not get service name for pod {pod.metadata.name}") return evicted_pods_aggregated def main() -> None: args = parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) kube_client = KubeClient() evicted_pods = evicted_pods_per_service(kube_client) remove_pods(kube_client, evicted_pods, args.dry_run) if __name__ == "__main__": main()
import os import subprocess import re import diamond.collector import diamond.convertor from diamond.collector import str_to_bool class AmavisCollector(diamond.collector.Collector): # From the source of amavisd-agent and it seems like the three interesting # formats are these: ("x y/h", "xMB yMB/h", "x s y s/msg"), # so this, ugly as it is to hardcode it this way, it should be right. # # The other option would be to directly read and decode amavis' berkeley # db, and I don't even want to get there matchers = [ re.compile(r'^\s*(?P<name>[\w]+)\s+(?P<time>[\d]+) s\s+' r'(?P<frequency>[\d.]+) s/msg\s+\([\w]+\)\s*$'), re.compile(r'^\s*(?P<name>[\w.-]+)\s+(?P<count>[\d]+)\s+' r'(?P<frequency>[\d.]+)/h\s+(?P<percentage>[\d.]+) %' r'\s\([\w]+\)\s*$'), re.compile(r'^\s*(?P<name>[\w.-]+)\s+(?P<size>[\d]+)MB\s+' r'(?P<frequency>[\d.]+)MB/h\s+(?P<percentage>[\d.]+) %' r'\s\([\w]+\)\s*$'), ] def get_default_config_help(self): config_help = super(AmavisCollector, self).get_default_config_help() config_help.update({ 'amavisd_exe': 'The path to amavisd-agent', 'use_sudo': 'Call amavisd-agent using sudo', 'sudo_exe': 'The path to sudo', 'sudo_user': 'The user to use if using sudo', }) return config_help def get_default_config(self): config = super(AmavisCollector, self).get_default_config() config.update({ 'path': 'amavis', 'amavisd_exe': '/usr/sbin/amavisd-agent', 'use_sudo': False, 'sudo_exe': '/usr/bin/sudo', 'sudo_user': 'amavis', }) return config def collect(self): """ Collect memory stats """ try: if str_to_bool(self.config['use_sudo']): # Use -u instead of --user as the former is more portable. Not # all versions of sudo support the long form --user. cmdline = [ self.config['sudo_exe'], '-u', self.config['sudo_user'], '--', self.config['amavisd_exe'], '-c', '1' ] else: cmdline = [self.config['amavisd_exe'], '-c', '1'] agent = subprocess.Popen(cmdline, stdout=subprocess.PIPE) agent_out = agent.communicate()[0] lines = agent_out.strip().split(os.linesep) for line in lines: for rex in self.matchers: res = rex.match(line) if res: groups = res.groupdict() name = groups['name'] for metric, value in groups.items(): if metric == 'name': continue mtype = 'GAUGE' precision = 2 if metric in ('count', 'time'): mtype = 'COUNTER' precision = 0 self.publish("{}.{}".format(name, metric), value, metric_type=mtype, precision=precision) except OSError as err: self.log.error("Could not run %s: %s", self.config['amavisd_exe'], err) return None return True
import asyncio from os import path from homeassistant import config as hass_config from homeassistant.components.generic import DOMAIN from homeassistant.components.websocket_api.const import TYPE_RESULT from homeassistant.const import ( HTTP_INTERNAL_SERVER_ERROR, HTTP_NOT_FOUND, SERVICE_RELOAD, ) from homeassistant.setup import async_setup_component from tests.async_mock import patch async def test_fetching_url(aioclient_mock, hass, hass_client): """Test that it fetches the given url.""" aioclient_mock.get("http://example.com", text="hello world") await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": "http://example.com", "username": "user", "password": "pass", } }, ) await hass.async_block_till_done() client = await hass_client() resp = await client.get("/api/camera_proxy/camera.config_test") assert resp.status == 200 assert aioclient_mock.call_count == 1 body = await resp.text() assert body == "hello world" resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 2 async def test_fetching_without_verify_ssl(aioclient_mock, hass, hass_client): """Test that it fetches the given url when ssl verify is off.""" aioclient_mock.get("https://example.com", text="hello world") await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": "https://example.com", "username": "user", "password": "pass", "verify_ssl": "false", } }, ) await hass.async_block_till_done() client = await hass_client() resp = await client.get("/api/camera_proxy/camera.config_test") assert resp.status == 200 async def test_fetching_url_with_verify_ssl(aioclient_mock, hass, hass_client): """Test that it fetches the given url when ssl verify is explicitly on.""" aioclient_mock.get("https://example.com", text="hello world") await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": "https://example.com", "username": "user", "password": "pass", "verify_ssl": "true", } }, ) await hass.async_block_till_done() client = await hass_client() resp = await client.get("/api/camera_proxy/camera.config_test") assert resp.status == 200 async def test_limit_refetch(aioclient_mock, hass, hass_client): """Test that it fetches the given url.""" aioclient_mock.get("http://example.com/5a", text="hello world") aioclient_mock.get("http://example.com/10a", text="hello world") aioclient_mock.get("http://example.com/15a", text="hello planet") aioclient_mock.get("http://example.com/20a", status=HTTP_NOT_FOUND) await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": 'http://example.com/{{ states.sensor.temp.state + "a" }}', "limit_refetch_to_url_change": True, } }, ) await hass.async_block_till_done() client = await hass_client() resp = await client.get("/api/camera_proxy/camera.config_test") hass.states.async_set("sensor.temp", "5") with patch("async_timeout.timeout", side_effect=asyncio.TimeoutError()): resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 0 assert resp.status == HTTP_INTERNAL_SERVER_ERROR hass.states.async_set("sensor.temp", "10") resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 1 assert resp.status == 200 body = await resp.text() assert body == "hello world" resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 1 assert resp.status == 200 body = await resp.text() assert body == "hello world" hass.states.async_set("sensor.temp", "15") # Url change = fetch new image resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 2 assert resp.status == 200 body = await resp.text() assert body == "hello planet" # Cause a template render error hass.states.async_remove("sensor.temp") resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 2 assert resp.status == 200 body = await resp.text() assert body == "hello planet" async def test_stream_source(aioclient_mock, hass, hass_client, hass_ws_client): """Test that the stream source is rendered.""" assert await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": "https://example.com", "stream_source": 'http://example.com/{{ states.sensor.temp.state + "a" }}', "limit_refetch_to_url_change": True, } }, ) await hass.async_block_till_done() hass.states.async_set("sensor.temp", "5") with patch( "homeassistant.components.camera.request_stream", return_value="http://home.assistant/playlist.m3u8", ) as mock_request_stream: # Request playlist through WebSocket client = await hass_ws_client(hass) await client.send_json( {"id": 1, "type": "camera/stream", "entity_id": "camera.config_test"} ) msg = await client.receive_json() # Assert WebSocket response assert mock_request_stream.call_count == 1 assert mock_request_stream.call_args[0][1] == "http://example.com/5a" assert msg["id"] == 1 assert msg["type"] == TYPE_RESULT assert msg["success"] assert msg["result"]["url"][-13:] == "playlist.m3u8" # Cause a template render error hass.states.async_remove("sensor.temp") await client.send_json( {"id": 2, "type": "camera/stream", "entity_id": "camera.config_test"} ) msg = await client.receive_json() # Assert that no new call to the stream request should have been made assert mock_request_stream.call_count == 1 # Assert the websocket error message assert msg["id"] == 2 assert msg["type"] == TYPE_RESULT assert msg["success"] is False assert msg["error"] == { "code": "start_stream_failed", "message": "camera.config_test does not support play stream service", } async def test_no_stream_source(aioclient_mock, hass, hass_client, hass_ws_client): """Test a stream request without stream source option set.""" assert await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": "https://example.com", "limit_refetch_to_url_change": True, } }, ) await hass.async_block_till_done() with patch( "homeassistant.components.camera.request_stream", return_value="http://home.assistant/playlist.m3u8", ) as mock_request_stream: # Request playlist through WebSocket client = await hass_ws_client(hass) await client.send_json( {"id": 3, "type": "camera/stream", "entity_id": "camera.config_test"} ) msg = await client.receive_json() # Assert the websocket error message assert mock_request_stream.call_count == 0 assert msg["id"] == 3 assert msg["type"] == TYPE_RESULT assert msg["success"] is False assert msg["error"] == { "code": "start_stream_failed", "message": "camera.config_test does not support play stream service", } async def test_camera_content_type(aioclient_mock, hass, hass_client): """Test generic camera with custom content_type.""" svg_image = "<some image>" urlsvg = "https://upload.wikimedia.org/wikipedia/commons/0/02/SVG_logo.svg" aioclient_mock.get(urlsvg, text=svg_image) cam_config_svg = { "name": "config_test_svg", "platform": "generic", "still_image_url": urlsvg, "content_type": "image/svg+xml", } cam_config_normal = cam_config_svg.copy() cam_config_normal.pop("content_type") cam_config_normal["name"] = "config_test_jpg" await async_setup_component( hass, "camera", {"camera": [cam_config_svg, cam_config_normal]} ) await hass.async_block_till_done() client = await hass_client() resp_1 = await client.get("/api/camera_proxy/camera.config_test_svg") assert aioclient_mock.call_count == 1 assert resp_1.status == 200 assert resp_1.content_type == "image/svg+xml" body = await resp_1.text() assert body == svg_image resp_2 = await client.get("/api/camera_proxy/camera.config_test_jpg") assert aioclient_mock.call_count == 2 assert resp_2.status == 200 assert resp_2.content_type == "image/jpeg" body = await resp_2.text() assert body == svg_image async def test_reloading(aioclient_mock, hass, hass_client): """Test we can cleanly reload.""" aioclient_mock.get("http://example.com", text="hello world") await async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "generic", "still_image_url": "http://example.com", "username": "user", "password": "pass", } }, ) await hass.async_block_till_done() client = await hass_client() resp = await client.get("/api/camera_proxy/camera.config_test") assert resp.status == 200 assert aioclient_mock.call_count == 1 body = await resp.text() assert body == "hello world" yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "generic/configuration.yaml", ) with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 1 resp = await client.get("/api/camera_proxy/camera.config_test") assert resp.status == 404 resp = await client.get("/api/camera_proxy/camera.reload") assert resp.status == 200 assert aioclient_mock.call_count == 2 body = await resp.text() assert body == "hello world" def _get_fixtures_base_path(): return path.dirname(path.dirname(path.dirname(__file__)))
from unittest.mock import patch import pytest from homeassistant.components import ios from homeassistant.setup import async_setup_component from tests.common import mock_component, mock_coro @pytest.fixture(autouse=True) def mock_load_json(): """Mock load_json.""" with patch("homeassistant.components.ios.load_json", return_value={}): yield @pytest.fixture(autouse=True) def mock_dependencies(hass): """Mock dependencies loaded.""" mock_component(hass, "zeroconf") mock_component(hass, "device_tracker") async def test_creating_entry_sets_up_sensor(hass): """Test setting up iOS loads the sensor component.""" with patch( "homeassistant.components.ios.sensor.async_setup_entry", return_value=mock_coro(True), ) as mock_setup: assert await async_setup_component(hass, ios.DOMAIN, {ios.DOMAIN: {}}) await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 1 async def test_configuring_ios_creates_entry(hass): """Test that specifying config will create an entry.""" with patch( "homeassistant.components.ios.async_setup_entry", return_value=mock_coro(True) ) as mock_setup: await async_setup_component(hass, ios.DOMAIN, {"ios": {"push": {}}}) await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 1 async def test_not_configuring_ios_not_creates_entry(hass): """Test that no config will not create an entry.""" with patch( "homeassistant.components.ios.async_setup_entry", return_value=mock_coro(True) ) as mock_setup: await async_setup_component(hass, ios.DOMAIN, {"foo": "bar"}) await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 0
import logging import mimetypes import os import voluptuous as vol from homeassistant.components.camera import ( CAMERA_SERVICE_SCHEMA, PLATFORM_SCHEMA, Camera, ) from homeassistant.const import ATTR_ENTITY_ID, CONF_NAME from homeassistant.helpers import config_validation as cv from .const import ( CONF_FILE_PATH, DATA_LOCAL_FILE, DEFAULT_NAME, DOMAIN, SERVICE_UPDATE_FILE_PATH, ) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_FILE_PATH): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) CAMERA_SERVICE_UPDATE_FILE_PATH = CAMERA_SERVICE_SCHEMA.extend( {vol.Required(CONF_FILE_PATH): cv.string} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Camera that works with local files.""" if DATA_LOCAL_FILE not in hass.data: hass.data[DATA_LOCAL_FILE] = [] file_path = config[CONF_FILE_PATH] camera = LocalFile(config[CONF_NAME], file_path) hass.data[DATA_LOCAL_FILE].append(camera) def update_file_path_service(call): """Update the file path.""" file_path = call.data.get(CONF_FILE_PATH) entity_ids = call.data.get(ATTR_ENTITY_ID) cameras = hass.data[DATA_LOCAL_FILE] for camera in cameras: if camera.entity_id in entity_ids: camera.update_file_path(file_path) return True hass.services.register( DOMAIN, SERVICE_UPDATE_FILE_PATH, update_file_path_service, schema=CAMERA_SERVICE_UPDATE_FILE_PATH, ) add_entities([camera]) class LocalFile(Camera): """Representation of a local file camera.""" def __init__(self, name, file_path): """Initialize Local File Camera component.""" super().__init__() self._name = name self.check_file_path_access(file_path) self._file_path = file_path # Set content type of local file content, _ = mimetypes.guess_type(file_path) if content is not None: self.content_type = content def camera_image(self): """Return image response.""" try: with open(self._file_path, "rb") as file: return file.read() except FileNotFoundError: _LOGGER.warning( "Could not read camera %s image from file: %s", self._name, self._file_path, ) def check_file_path_access(self, file_path): """Check that filepath given is readable.""" if not os.access(file_path, os.R_OK): _LOGGER.warning( "Could not read camera %s image from file: %s", self._name, file_path ) def update_file_path(self, file_path): """Update the file_path.""" self.check_file_path_access(file_path) self._file_path = file_path self.schedule_update_ha_state() @property def name(self): """Return the name of this camera.""" return self._name @property def device_state_attributes(self): """Return the camera state attributes.""" return {"file_path": self._file_path}
from app.wraps.login_wrap import login_required from app import app, v from app.utils import ResponseUtil, RequestUtil, SshUtil from app.database.model import Server # get server list @app.route('/api/server/list', methods=['GET']) @login_required() def api_server_list(): # login user user_id = RequestUtil.get_login_user().get('id', '') servers = Server.query.filter_by(user_id=user_id, deleted=False) \ .order_by(Server.id.desc()).all() servers = [server.dict(with_pkey=True) for server in servers] return ResponseUtil.standard_response(1, servers) # new server @app.route('/api/server/new', methods=['POST']) @login_required() @v.param({ 'ip': v.ipv4(), 'port': v.int(min=0), 'account': v.str(), 'pkey': v.str(), v.optional('name'): v.str(), v.optional('id'): v.str() }) def api_server_new(ip, port, account, pkey, name=None, id=None): # login user user_id = RequestUtil.get_login_user().get('id', '') server_id = id name = name if name else ip try: success, log = SshUtil.do_ssh_cmd( ip, port, account, pkey, 'ls -lh', timeout=5) if success: if server_id: # update webhook # you can only update the webhook which you create. server = Server.query.filter_by( id=server_id, user_id=user_id).first() if not server: return ResponseUtil.standard_response( 0, 'Server is not exist!') server.ip = ip server.port = port server.account = account server.pkey = pkey server.name = name else: server = Server(ip=ip, port=port, account=account, pkey=pkey, user_id=user_id, name=name) server.save() return ResponseUtil.standard_response( 1, server.dict(with_pkey=True)) except Exception as e: print(e) return ResponseUtil.standard_response(0, 'Server SSH connect error!') @app.route('/api/server/delete', methods=['POST']) @login_required() @v.param({'server_id': v.int()}) def api_server_delete(server_id): # login user user_id = RequestUtil.get_login_user().get('id', '') server = Server.query.filter_by(user_id=user_id, id=server_id).first() if not server: return ResponseUtil.standard_response(0, 'Permission deny!') server.deleted = True server.save() return ResponseUtil.standard_response(1, 'Success')
import posixpath from perfkitbenchmarker import errors from perfkitbenchmarker import vm_util # The Intel yum/apt repositories are not included in the base OS and need to be # added in. The key file (data/intel_repo_key.txt) is the APT/YUM key # associated with the Intel repositories as documented and releases listed here: # https://software.intel.com/en-us/articles/installing-intel-free-libs-and-python-apt-repo _INTEL_KEY_FILE = 'intel_repo_key.txt' _REMOTE_KEY_FILE = posixpath.join(vm_util.VM_TMP_DIR, _INTEL_KEY_FILE) # APT constants # The local text file of the Intel repo entries. _APT_REPO_FILE = 'intel_repo_list.txt' # The remote file for the repo list. _APT_REMOTE_REPO_FILE = posixpath.join(vm_util.VM_TMP_DIR, 'intel.list') # Command to add the GPG key and update the repo list. _APT_INSTALL_REPO_CMD = ';'.join([ f'sudo apt-key add {_REMOTE_KEY_FILE}', f'rm {_REMOTE_KEY_FILE}', f'sudo mv {_APT_REMOTE_REPO_FILE} /etc/apt/sources.list.d/', 'sudo apt-get update' ]) # YUM constants _YUM_REPO_URL = 'https://yum.repos.intel.com/setup/intelproducts.repo' # The current Intel GPG key. _YUM_REPO_KEY = 'https://yum.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB' # Command to add the Intel repo. _YUM_INSTALL_REPO_CMD = f'sudo yum-config-manager --add-repo {_YUM_REPO_URL}' # The remote path to the downloaded GPG key from the repo _YUM_DOWNLOAD_KEY = posixpath.join(vm_util.VM_TMP_DIR, 'mpi.yumkey') # Command to download the current Intel GPG key. _YUM_DOWNLOAD_KEY_CMD = f'curl -o {_YUM_DOWNLOAD_KEY} {_YUM_REPO_KEY}' # Command to compare the current Intel key to our copy in the data/ directory. _YUM_DIFF_KEY_CMD = f'diff {_REMOTE_KEY_FILE} {_YUM_DOWNLOAD_KEY}' def AptPrepare(vm): """Configuration for APT install.""" vm.PushDataFile(_INTEL_KEY_FILE, _REMOTE_KEY_FILE) vm.PushDataFile(_APT_REPO_FILE, _APT_REMOTE_REPO_FILE) vm.RemoteCommand(_APT_INSTALL_REPO_CMD) vm.InstallPackages('libgomp1') def YumPrepare(vm): """Configuration for YUM install.""" vm.PushDataFile(_INTEL_KEY_FILE, _REMOTE_KEY_FILE) vm.InstallPackages('yum-utils') vm.RemoteCommand(_YUM_INSTALL_REPO_CMD) # the /etc/yum.repos.d/intelproducts.repo file has the gpgkey listed as the # _YUM_REPO_KEY, confirm that it is the same as our local copy vm.RemoteCommand(_YUM_DOWNLOAD_KEY_CMD) diff, _, retcode = vm.RemoteCommandWithReturnCode(_YUM_DIFF_KEY_CMD) if retcode: raise errors.Setup.InvalidConfigurationError( f'Intel GPG key does not match local key: {diff}') vm.RemoteCommand(f'rm {_YUM_DOWNLOAD_KEY}') # need to update with -y to force import of known GPG key vm.RemoteCommand('sudo yum update -y')
from collections import OrderedDict from pyps4_2ndscreen.errors import CredentialTimeout from pyps4_2ndscreen.helpers import Helper from pyps4_2ndscreen.media_art import COUNTRIES import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from .const import CONFIG_ENTRY_VERSION, DEFAULT_ALIAS, DEFAULT_NAME, DOMAIN CONF_MODE = "Config Mode" CONF_AUTO = "Auto Discover" CONF_MANUAL = "Manual Entry" UDP_PORT = 987 TCP_PORT = 997 PORT_MSG = {UDP_PORT: "port_987_bind_error", TCP_PORT: "port_997_bind_error"} PIN_LENGTH = 8 @config_entries.HANDLERS.register(DOMAIN) class PlayStation4FlowHandler(config_entries.ConfigFlow): """Handle a PlayStation 4 config flow.""" VERSION = CONFIG_ENTRY_VERSION CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.helper = Helper() self.creds = None self.name = None self.host = None self.region = None self.pin = None self.m_device = None self.location = None self.device_list = [] async def async_step_user(self, user_input=None): """Handle a user config flow.""" # Check if able to bind to ports: UDP 987, TCP 997. ports = PORT_MSG.keys() failed = await self.hass.async_add_executor_job(self.helper.port_bind, ports) if failed in ports: reason = PORT_MSG[failed] return self.async_abort(reason=reason) return await self.async_step_creds() async def async_step_creds(self, user_input=None): """Return PS4 credentials from 2nd Screen App.""" errors = {} if user_input is not None: try: self.creds = await self.hass.async_add_executor_job( self.helper.get_creds, DEFAULT_ALIAS ) if self.creds is not None: return await self.async_step_mode() return self.async_abort(reason="credential_error") except CredentialTimeout: errors["base"] = "credential_timeout" return self.async_show_form(step_id="creds", errors=errors) async def async_step_mode(self, user_input=None): """Prompt for mode.""" errors = {} mode = [CONF_AUTO, CONF_MANUAL] if user_input is not None: if user_input[CONF_MODE] == CONF_MANUAL: try: device = user_input[CONF_IP_ADDRESS] if device: self.m_device = device except KeyError: errors[CONF_IP_ADDRESS] = "no_ipaddress" if not errors: return await self.async_step_link() mode_schema = OrderedDict() mode_schema[vol.Required(CONF_MODE, default=CONF_AUTO)] = vol.In(list(mode)) mode_schema[vol.Optional(CONF_IP_ADDRESS)] = str return self.async_show_form( step_id="mode", data_schema=vol.Schema(mode_schema), errors=errors ) async def async_step_link(self, user_input=None): """Prompt user input. Create or edit entry.""" regions = sorted(COUNTRIES.keys()) default_region = None errors = {} if user_input is None: # Search for device. devices = await self.hass.async_add_executor_job( self.helper.has_devices, self.m_device ) # Abort if can't find device. if not devices: return self.async_abort(reason="no_devices_found") self.device_list = [device["host-ip"] for device in devices] # Check that devices found aren't configured per account. entries = self.hass.config_entries.async_entries(DOMAIN) if entries: # Retrieve device data from all entries if creds match. conf_devices = [ device for entry in entries if self.creds == entry.data[CONF_TOKEN] for device in entry.data["devices"] ] # Remove configured device from search list. for c_device in conf_devices: if c_device["host"] in self.device_list: # Remove configured device from search list. self.device_list.remove(c_device["host"]) # If list is empty then all devices are configured. if not self.device_list: return self.async_abort(reason="already_configured") # Login to PS4 with user data. if user_input is not None: self.region = user_input[CONF_REGION] self.name = user_input[CONF_NAME] # Assume pin had leading zeros, before coercing to int. self.pin = str(user_input[CONF_CODE]).zfill(PIN_LENGTH) self.host = user_input[CONF_IP_ADDRESS] is_ready, is_login = await self.hass.async_add_executor_job( self.helper.link, self.host, self.creds, self.pin, DEFAULT_ALIAS ) if is_ready is False: errors["base"] = "cannot_connect" elif is_login is False: errors["base"] = "login_failed" else: device = { CONF_HOST: self.host, CONF_NAME: self.name, CONF_REGION: self.region, } # Create entry. return self.async_create_entry( title="PlayStation 4", data={CONF_TOKEN: self.creds, "devices": [device]}, ) # Try to find region automatically. if not self.location: self.location = await location.async_detect_location_info( self.hass.helpers.aiohttp_client.async_get_clientsession() ) if self.location: country = self.location.country_name if country in COUNTRIES: default_region = country # Show User Input form. link_schema = OrderedDict() link_schema[vol.Required(CONF_IP_ADDRESS)] = vol.In(list(self.device_list)) link_schema[vol.Required(CONF_REGION, default=default_region)] = vol.In( list(regions) ) link_schema[vol.Required(CONF_CODE)] = vol.All( vol.Strip, vol.Length(max=PIN_LENGTH), vol.Coerce(int) ) link_schema[vol.Required(CONF_NAME, default=DEFAULT_NAME)] = str return self.async_show_form( step_id="link", data_schema=vol.Schema(link_schema), errors=errors )
import pickle import unittest from perfkitbenchmarker import units class UnitRegistryTestCase(unittest.TestCase): def testUnitNotEqual(self): # See https://github.com/hgrecco/pint/issues/372 self.assertFalse(units.byte != units.Unit('byte')) def testKB(self): self.assertEqual(units.ParseExpression('12KB'), units.ParseExpression('12000 bytes')) def testIntPercent(self): q = units.ParseExpression('10%') self.assertEqual(q.magnitude, 10) self.assertEqual(q.units, units.percent) def testFloatPercent(self): q = units.ParseExpression('12.5%') self.assertEqual(q.magnitude, 12.5) self.assertEqual(q.units, units.percent) class TestPintPickling(unittest.TestCase): def testSameUnitRegistry(self): q_prepickle = 1.0 * units.Unit('second') q_pickled = pickle.dumps(q_prepickle) q_postpickle = pickle.loads(q_pickled) self.assertEqual(q_prepickle, q_postpickle) def testNewUnitRegistry(self): # The fundamental issue with pickling Pint Quantities is that you # need all of your Quantities to point to the same UnitRegistry # object, and when we close and reopen PKB, we create a new # UnitRegistry. So to test it, we create a new UnitRegistry. q_prepickle = 1.0 * units.Unit('second') q_pickled = pickle.dumps(q_prepickle) units._UNIT_REGISTRY = units._UnitRegistry() q_postpickle = pickle.loads(q_pickled) new_second = 1.0 * units.Unit('second') self.assertEqual(q_postpickle, new_second) # This next line checks that q_postpickle is in the same "Pint # universe" as new_second, because we can convert q_postpickle to # the units of new_second. q_postpickle.to(new_second) def testPickleKB(self): # Make sure we can pickle and unpickle quantities with the unit we # defined ourselves. q_prepickle = units.ParseExpression('1KB') q_pickled = pickle.dumps(q_prepickle) q_postpickle = pickle.loads(q_pickled) self.assertEqual(q_prepickle, q_postpickle) def testPicklePercent(self): q = units.ParseExpression('10%') self.assertEqual(q, pickle.loads(pickle.dumps(q))) if __name__ == '__main__': unittest.main()
import logging import math from typing import Any, Callable, List, Optional from bond_api import Action, DeviceType, Direction from homeassistant.components.fan import ( DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SPEED_OFF, SUPPORT_DIRECTION, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from .const import DOMAIN from .entity import BondEntity from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up Bond fan devices.""" hub: BondHub = hass.data[DOMAIN][entry.entry_id] fans = [ BondFan(hub, device) for device in hub.devices if DeviceType.is_fan(device.type) ] async_add_entities(fans, True) class BondFan(BondEntity, FanEntity): """Representation of a Bond fan.""" def __init__(self, hub: BondHub, device: BondDevice): """Create HA entity representing Bond fan.""" super().__init__(hub, device) self._power: Optional[bool] = None self._speed: Optional[int] = None self._direction: Optional[int] = None def _apply_state(self, state: dict): self._power = state.get("power") self._speed = state.get("speed") self._direction = state.get("direction") @property def supported_features(self) -> int: """Flag supported features.""" features = 0 if self._device.supports_speed(): features |= SUPPORT_SET_SPEED if self._device.supports_direction(): features |= SUPPORT_DIRECTION return features @property def speed(self) -> Optional[str]: """Return the current speed.""" if self._power == 0: return SPEED_OFF if not self._power or not self._speed: return None # map 1..max_speed Bond speed to 1..3 HA speed max_speed = max(self._device.props.get("max_speed", 3), self._speed) ha_speed = math.ceil(self._speed * (len(self.speed_list) - 1) / max_speed) return self.speed_list[ha_speed] @property def speed_list(self) -> list: """Get the list of available speeds.""" return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH] @property def current_direction(self) -> Optional[str]: """Return fan rotation direction.""" direction = None if self._direction == Direction.FORWARD: direction = DIRECTION_FORWARD elif self._direction == Direction.REVERSE: direction = DIRECTION_REVERSE return direction async def async_set_speed(self, speed: str) -> None: """Set the desired speed for the fan.""" _LOGGER.debug("async_set_speed called with speed %s", speed) if speed == SPEED_OFF: await self.async_turn_off() return max_speed = self._device.props.get("max_speed", 3) if speed == SPEED_LOW: bond_speed = 1 elif speed == SPEED_HIGH: bond_speed = max_speed else: bond_speed = math.ceil(max_speed / 2) await self._hub.bond.action( self._device.device_id, Action.set_speed(bond_speed) ) async def async_turn_on(self, speed: Optional[str] = None, **kwargs) -> None: """Turn on the fan.""" _LOGGER.debug("Fan async_turn_on called with speed %s", speed) if speed is not None: if speed == SPEED_OFF: await self.async_turn_off() else: await self.async_set_speed(speed) else: await self._hub.bond.action(self._device.device_id, Action.turn_on()) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the fan off.""" await self._hub.bond.action(self._device.device_id, Action.turn_off()) async def async_set_direction(self, direction: str): """Set fan rotation direction.""" bond_direction = ( Direction.REVERSE if direction == DIRECTION_REVERSE else Direction.FORWARD ) await self._hub.bond.action( self._device.device_id, Action.set_direction(bond_direction) )
import numpy as np import pytest # pylint: disable=line-too-long from tensornetwork.contractors.custom_path_solvers.pathsolvers import greedy_size_solve, greedy_cost_solve, full_solve_complete @pytest.mark.parametrize('N', range(2, 20)) def test_greedy_size_solve(N): log_adj = (1 + np.sin(range(N**2))).reshape(N, N) log_adj += log_adj.T order, cost = greedy_size_solve(log_adj) assert order.shape == (2, N - 1) assert isinstance(cost, float) @pytest.mark.parametrize('d1', np.linspace(1, 6, 10)) @pytest.mark.parametrize('d2', np.linspace(1, 6, 10)) def test_greedy_size_solve2(d1, d2): N = 3 log_adj = np.zeros([N, N]) log_adj[0, 1] = d1 log_adj[1, 2] = d2 log_adj += log_adj.T order, cost = greedy_size_solve(log_adj) if d1 >= d2: ex_order = np.array([[0, 0], [1, 1]]) ex_cost = d2 + np.log10(10**d1 + 1) else: ex_order = np.array([[1, 0], [2, 1]]) ex_cost = d1 + np.log10(10**d2 + 1) assert np.array_equal(order, ex_order) assert np.allclose(ex_cost, cost) @pytest.mark.parametrize('N', range(2, 20)) def test_greedy_cost_solve(N): log_adj = (1 + np.sin(range(N**2))).reshape(N, N) log_adj += log_adj.T order, cost = greedy_cost_solve(log_adj) assert order.shape == (2, N - 1) assert isinstance(cost, float) @pytest.mark.parametrize('d1', np.linspace(1, 6, 5)) @pytest.mark.parametrize('d2', np.linspace(1, 6, 5)) @pytest.mark.parametrize('d3', np.linspace(1, 6, 5)) def test_greedy_cost_solve2(d1, d2, d3): N = 3 log_adj = np.zeros([N, N]) log_adj[0, 1] = d1 log_adj[1, 2] = d2 log_adj += log_adj.T log_adj[2, 2] = d3 order, cost = greedy_cost_solve(log_adj) ex_order = np.array([[0, 0], [1, 1]]) ex_cost = d1 + d2 + np.log10(1 + 10**(d3 - d1)) assert np.array_equal(order, ex_order) assert np.allclose(ex_cost, cost) @pytest.mark.parametrize('N', range(2, 8)) def test_full_solve_complete(N): log_adj = (1 + np.sin(range(N**2))).reshape(N, N) log_adj += log_adj.T order, cost, _ = full_solve_complete(log_adj) assert order.shape == (2, N - 1) assert isinstance(cost, float) @pytest.mark.parametrize('d1', np.linspace(1, 6, 5)) def test_full_solve_complete2(d1): N = 7 log_adj = np.zeros([N, N]) log_adj[:(N - 1), 1:] = np.diag(d1 * np.ones(N - 1)) log_adj += log_adj.T log_adj[0, 0] = d1 log_adj[-1, -1] = d1 _, cost, is_optimal = full_solve_complete(log_adj) ex_cost = np.log10((N - 1) * 10**(3 * d1)) assert np.allclose(ex_cost, cost) assert is_optimal @pytest.mark.parametrize('cost_bound', range(1, 50, 5)) @pytest.mark.parametrize('max_branch', range(1, 1000, 100)) def test_full_solve_complete3(cost_bound, max_branch): N = 7 log_adj = (1 + np.sin(range(N**2))).reshape(N, N) log_adj += log_adj.T order, cost, _ = full_solve_complete( log_adj, cost_bound=cost_bound, max_branch=max_branch) assert order.shape == (2, N - 1) assert isinstance(cost, float)
import datetime import os import random import sys sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path sys.path = [os.path.abspath(os.path.dirname(os.path.dirname(__file__)))] + sys.path os.environ['is_test_suite'] = 'True' from auto_ml import Predictor from auto_ml.utils_models import load_ml_model import dill from nose.tools import assert_equal, assert_not_equal, with_setup import numpy as np from sklearn.model_selection import train_test_split import utils_testing as utils # Tests on regression models: def test_perform_feature_selection_true_regression(model_name=None): np.random.seed(0) df_boston_train, df_boston_test = utils.get_boston_regression_dataset() column_descriptions = { 'MEDV': 'output' , 'CHAS': 'categorical' } ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions) ml_predictor.train(df_boston_train, perform_feature_selection=True, model_names=model_name) test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV) print('test_score') print(test_score) # Bumping this up since without these features our score drops lower_bound = -4.0 if model_name == 'DeepLearningRegressor': lower_bound = -14.5 if model_name == 'LGBMRegressor': lower_bound = -4.95 assert lower_bound < test_score < -2.8 def test_perform_feature_selection_false_regression(model_name=None): np.random.seed(0) df_boston_train, df_boston_test = utils.get_boston_regression_dataset() column_descriptions = { 'MEDV': 'output' , 'CHAS': 'categorical' } ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions) ml_predictor.train(df_boston_train, perform_feature_selection=False, model_names=model_name) test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV) print('test_score') print(test_score) lower_bound = -3.0 assert lower_bound < test_score < -2.7 def test_perform_feature_scaling_true_regression(model_name=None): np.random.seed(0) df_boston_train, df_boston_test = utils.get_boston_regression_dataset() column_descriptions = { 'MEDV': 'output' , 'CHAS': 'categorical' } ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions) ml_predictor.train(df_boston_train, perform_feature_scaling=True) test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV) print('test_score') print(test_score) assert -3.0 < test_score < -2.7 def test_perform_feature_scaling_false_regression(model_name=None): np.random.seed(0) df_boston_train, df_boston_test = utils.get_boston_regression_dataset() column_descriptions = { 'MEDV': 'output' , 'CHAS': 'categorical' } ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions) ml_predictor.train(df_boston_train, perform_feature_scaling=False, model_names=model_name) test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV) print('test_score') print(test_score) lower_bound = -3.0 assert lower_bound < test_score < -2.7 def test_compare_all_models_regression(): np.random.seed(0) df_boston_train, df_boston_test = utils.get_boston_regression_dataset() column_descriptions = { 'MEDV': 'output' , 'CHAS': 'categorical' } ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions) ml_predictor.train(df_boston_train, compare_all_models=True) test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV) print('test_score') print(test_score) # ExtraTrees again throws this off assert -3.6 < test_score < -2.8
import os import unittest import mock from kalliope.core.Models.settings.Settings import Settings from kalliope.core.TTS.TTSModule import TTSModule, TtsGenerateAudioFunctionNotFound from kalliope.core.Utils.FileManager import FileManager class TestTTSModule(unittest.TestCase): """ Class to test TTSModule """ def setUp(self): self.TTSMod = TTSModule(language='tests') pass def test_generate_md5_from_words(self): """ Test generate md5 method """ word = "kalliope" expected_result = "5c186d1e123be2667fb5fd54640e4fd0" self.assertEqual(TTSModule.generate_md5_from_words(words=word), expected_result, "Fail md5") def test_get_path_to_store_audio(self): """ Test the path to store audio """ self.TTSMod.words = "kalliope" settings = Settings(cache_path="/tmp/kalliope/tests") self.TTSMod.settings = settings expected_result = "/tmp/kalliope/tests/TTSModule/tests/default/5c186d1e123be2667fb5fd54640e4fd0.tts" self.assertEqual(self.TTSMod._get_path_to_store_audio(), expected_result, "fail test_get_path_to_store_audio, expected path not corresponding to result") def test_generate_and_play(self): """ Test to generate and play sound """ def new_play_audio(TTSModule): pass words = "kalliope" with mock.patch.object(TTSModule, 'play_audio', new=new_play_audio): settings = Settings(cache_path="/tmp/kalliope/tests") self.TTSMod.settings = settings # test missing callback with self.assertRaises(TtsGenerateAudioFunctionNotFound): self.TTSMod.generate_and_play(words=words) # Assert Callback is called # no Cache self.TTSMod.cache = False generate_audio_function_from_child = mock.Mock() self.TTSMod.generate_and_play(words=words, generate_audio_function_from_child=generate_audio_function_from_child) generate_audio_function_from_child.assert_called() # with cache True but not existing on system self.TTSMod.cache = True generate_audio_function_from_child = mock.Mock() self.TTSMod.generate_and_play(words=words, generate_audio_function_from_child=generate_audio_function_from_child) generate_audio_function_from_child.assert_called() # with cache True and existing on system # create tmp file tmp_base_path = "/tmp/kalliope/tests/TTSModule/tests/default/" file_path = os.path.join(tmp_base_path, "5c186d1e123be2667fb5fd54640e4fd0.tts") if os.path.isfile(file_path): # Remove the file FileManager.remove_file(file_path) if not os.path.exists(tmp_base_path): os.makedirs(tmp_base_path) FileManager.write_in_file(file_path, "[kalliope-test] test_generate_and_play") self.TTSMod.cache = True generate_audio_function_from_child = mock.Mock() self.TTSMod.generate_and_play(words=words, generate_audio_function_from_child=generate_audio_function_from_child) generate_audio_function_from_child.assert_not_called() # Remove the tmp file FileManager.remove_file(file_path) def test_is_file_already_in_cache(self): """ Test if file is already stored in cache """ base_cache_path = "/tmp/kalliope/tests/TTSModule/tests/default/" md5_word = "5c186d1e123be2667fb5fd54640e4fd0" file_path = os.path.join(base_cache_path, "5c186d1e123be2667fb5fd54640e4fd0.tts") if os.path.isfile(file_path): # Remove the file FileManager.remove_file(file_path) # Create a tmp file if not os.path.exists(base_cache_path): os.makedirs(base_cache_path) tmp_path = os.path.join(base_cache_path, md5_word+".tts") FileManager.write_in_file(tmp_path, "[kalliope-test] test_is_file_already_in_cache") # Test true self.assertTrue(TTSModule._is_file_already_in_cache(base_cache_path=base_cache_path, file_path=file_path), "Fail retrieving the cached file. The file does not exist but it should !") # Remove the tmp file FileManager.remove_file(tmp_path) # Test False self.assertFalse(TTSModule._is_file_already_in_cache(base_cache_path=base_cache_path, file_path=file_path), "Fail asserting that the file does not exist.") if __name__ == '__main__': unittest.main()
import hashlib import logging import os import os.path import subprocess from datetime import datetime from distutils.version import LooseVersion from typing import List, Optional from dateutil import parser from django.conf import settings from django.core.cache import cache from django.utils.functional import cached_property from filelock import FileLock from pkg_resources import Requirement, resource_filename from sentry_sdk import add_breadcrumb from weblate.trans.util import get_clean_env, path_separator from weblate.vcs.ssh import SSH_WRAPPER LOGGER = logging.getLogger("weblate.vcs") class RepositoryException(Exception): """Error while working with a repository.""" def __init__(self, retcode, message): super().__init__(message) self.retcode = retcode def get_message(self): if self.retcode != 0: return "{} ({})".format(self.args[0], self.retcode) return self.args[0] def __str__(self): return self.get_message() class Repository: """Basic repository object.""" _cmd = "false" _cmd_last_revision: Optional[List[str]] = None _cmd_last_remote_revision: Optional[List[str]] = None _cmd_status = ["status"] _cmd_list_changed_files: Optional[List[str]] = None name = None identifier: Optional[str] = None req_version: Optional[str] = None default_branch = "" needs_push_url = True _version = None @classmethod def get_identifier(cls): return cls.identifier or cls.name.lower() def __init__(self, path, branch=None, component=None, local=False): self.path = path if branch is None: self.branch = self.default_branch else: self.branch = branch self.component = component self.last_output = "" self.lock = FileLock(self.path.rstrip("/").rstrip("\\") + ".lock", timeout=120) self.local = local if not local: # Create ssh wrapper for possible use SSH_WRAPPER.create() if not self.is_valid(): self.init() @classmethod def get_remote_branch(cls, repo: str): return cls.default_branch @classmethod def add_breadcrumb(cls, message, **data): # Add breadcrumb only if settings are already loaded, # we do not want to force loading settings early if settings.configured and getattr(settings, "SENTRY_DSN", None): add_breadcrumb(category="vcs", message=message, data=data, level="info") @classmethod def log(cls, message, level: int = logging.DEBUG): return LOGGER.log(level, "%s: %s", cls._cmd, message) def ensure_config_updated(self): """Ensures the configuration is periodically checked.""" cache_key = f"sp-config-check-{self.component.pk}" if cache.get(cache_key) is None: self.check_config() cache.set(cache_key, True, 86400) def check_config(self): """Check VCS configuration.""" raise NotImplementedError() def is_valid(self): """Check whether this is a valid repository.""" raise NotImplementedError() def init(self): """Initialize the repository.""" raise NotImplementedError() def resolve_symlinks(self, path): """Resolve any symlinks in the path.""" # Resolve symlinks first real_path = path_separator(os.path.realpath(os.path.join(self.path, path))) repository_path = path_separator(os.path.realpath(self.path)) if not real_path.startswith(repository_path): raise ValueError("Too many symlinks or link outside tree") return real_path[len(repository_path) :].lstrip("/") @staticmethod def _getenv(): """Generate environment for process execution.""" return get_clean_env( { "GIT_SSH": SSH_WRAPPER.filename, "GIT_TERMINAL_PROMPT": "0", "SVN_SSH": SSH_WRAPPER.filename, } ) @classmethod def _popen( cls, args: List[str], cwd: Optional[str] = None, merge_err: bool = True, fullcmd: bool = False, raw: bool = False, local: bool = False, stdin: Optional[str] = None, ): """Execute the command using popen.""" if args is None: raise RepositoryException(0, "Not supported functionality") if not fullcmd: args = [cls._cmd] + list(args) text_cmd = " ".join(args) kwargs = {} # These are mutually exclusive, on Python 3.7+ it is posible # to pass stdin = None, but on 3.6 stdin has to be omitted if stdin is not None: kwargs["input"] = stdin else: kwargs["stdin"] = subprocess.PIPE process = subprocess.run( args, cwd=cwd, env={} if local else cls._getenv(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT if merge_err else subprocess.PIPE, universal_newlines=not raw, check=False, **kwargs, ) cls.add_breadcrumb( text_cmd, retcode=process.returncode, output=process.stdout, stderr=process.stderr, cwd=cwd, ) if process.returncode: raise RepositoryException( process.returncode, process.stdout + (process.stderr or "") ) return process.stdout def execute( self, args: List[str], needs_lock: bool = True, fullcmd: bool = False, merge_err: bool = True, stdin: Optional[str] = None, ): """Execute command and caches its output.""" if needs_lock: if not self.lock.is_locked: raise RuntimeError("Repository operation without lock held!") if self.component: self.ensure_config_updated() is_status = args[0] == self._cmd_status[0] try: self.last_output = self._popen( args, self.path, fullcmd=fullcmd, local=self.local, merge_err=merge_err, stdin=stdin, ) except RepositoryException as error: if not is_status: self.log_status(error) raise return self.last_output def log_status(self, error): try: self.log(f"failure {error}") self.log(self.status()) except RepositoryException: pass def clean_revision_cache(self): if "last_revision" in self.__dict__: del self.__dict__["last_revision"] if "last_remote_revision" in self.__dict__: del self.__dict__["last_remote_revision"] @cached_property def last_revision(self): """Return last local revision.""" return self.get_last_revision() def get_last_revision(self): return self.execute(self._cmd_last_revision, needs_lock=False, merge_err=False) @cached_property def last_remote_revision(self): """Return last remote revision.""" return self.execute( self._cmd_last_remote_revision, needs_lock=False, merge_err=False ) @classmethod def _clone(cls, source: str, target: str, branch: str): """Clone repository.""" raise NotImplementedError() @classmethod def clone(cls, source: str, target: str, branch: str, component=None): """Clone repository and return object for cloned repository.""" SSH_WRAPPER.create() cls._clone(source, target, branch) return cls(target, branch, component) def update_remote(self): """Update remote repository.""" raise NotImplementedError() def status(self): """Return status of the repository.""" with self.lock: return self.execute(self._cmd_status) def push(self, branch): """Push given branch to remote repository.""" raise NotImplementedError() def unshallow(self): """Unshallow working copy.""" return def reset(self): """Reset working copy to match remote branch.""" raise NotImplementedError() def merge(self, abort=False, message=None): """Merge remote branch or reverts the merge.""" raise NotImplementedError() def rebase(self, abort=False): """Rebase working copy on top of remote branch.""" raise NotImplementedError() def needs_commit(self, filenames: Optional[List[str]] = None): """Check whether repository needs commit.""" raise NotImplementedError() def count_missing(self): """Count missing commits.""" return len( self.log_revisions(self.ref_to_remote.format(self.get_remote_branch_name())) ) def count_outgoing(self): """Count outgoing commits.""" return len( self.log_revisions( self.ref_from_remote.format(self.get_remote_branch_name()) ) ) def needs_merge(self): """Check whether repository needs merge with upstream. It is missing some revisions. """ return self.count_missing() > 0 def needs_push(self): """Check whether repository needs push to upstream. It has additional revisions. """ return self.count_outgoing() > 0 def _get_revision_info(self, revision): """Return dictionary with detailed revision information.""" raise NotImplementedError() def get_revision_info(self, revision): """Return dictionary with detailed revision information.""" key = f"rev-info-{self.get_identifier()}-{revision}" result = cache.get(key) if not result: result = self._get_revision_info(revision) # Keep the cache for one day cache.set(key, result, 86400) # Parse timestamps into datetime objects for name, value in result.items(): if "date" in name: result[name] = parser.parse(value) return result @classmethod def is_configured(cls): return True @classmethod def is_supported(cls): """Check whether this VCS backend is supported.""" try: version = cls.get_version() except Exception: return False return cls.req_version is None or LooseVersion(version) >= LooseVersion( cls.req_version ) @classmethod def get_version(cls): """Cached getting of version.""" if cls._version is None: try: cls._version = cls._get_version() except Exception as error: cls._version = error if isinstance(cls._version, Exception): # pylint: disable=raising-bad-type raise cls._version return cls._version @classmethod def _get_version(cls): """Return VCS program version.""" return cls._popen(["--version"], merge_err=False) def set_committer(self, name, mail): """Configure commiter name.""" raise NotImplementedError() def commit( self, message: str, author: Optional[str] = None, timestamp: Optional[datetime] = None, files: Optional[List[str]] = None, ): """Create new revision.""" raise NotImplementedError() def remove(self, files: List[str], message: str, author: Optional[str] = None): """Remove files and creates new revision.""" raise NotImplementedError() @staticmethod def update_hash(objhash, filename, extra=None): with open(filename, "rb") as handle: data = handle.read() if extra: objhash.update(extra.encode()) objhash.update("blob {}\0".format(len(data)).encode("ascii")) objhash.update(data) def get_object_hash(self, path): """Return hash of object in the VCS. For files in a way compatible with Git (equivalent to git ls-tree HEAD), for dirs it behaves differently as we do not need to track some attributes (for example permissions). """ real_path = os.path.join(self.path, self.resolve_symlinks(path)) objhash = hashlib.sha1() # nosec if os.path.isdir(real_path): files = [] for root, _unused, filenames in os.walk(real_path): for filename in filenames: full_name = os.path.join(root, filename) files.append((full_name, os.path.relpath(full_name, self.path))) for filename, name in sorted(files): self.update_hash(objhash, filename, name) else: self.update_hash(objhash, real_path) return objhash.hexdigest() def configure_remote( self, pull_url: str, push_url: str, branch: str, fast: bool = True ): """Configure remote repository.""" raise NotImplementedError() def configure_branch(self, branch): """Configure repository branch.""" raise NotImplementedError() def describe(self): """Verbosely describes current revision.""" raise NotImplementedError() def get_file(self, path, revision): """Return content of file at given revision.""" raise NotImplementedError() @staticmethod def get_examples_paths(): """Generator of possible paths for examples.""" yield os.path.join(os.path.dirname(os.path.dirname(__file__)), "examples") yield resource_filename(Requirement.parse("weblate"), "examples") @classmethod def find_merge_driver(cls, name): for path in cls.get_examples_paths(): result = os.path.join(path, name) if os.path.exists(result): return os.path.abspath(result) return None @classmethod def get_merge_driver(cls, file_format): merge_driver = None if file_format == "po": merge_driver = cls.find_merge_driver("git-merge-gettext-po") if merge_driver is None or not os.path.exists(merge_driver): return None return merge_driver def cleanup(self): """Remove not tracked files from the repository.""" raise NotImplementedError() def log_revisions(self, refspec): """Log revisions for given refspec. This is not universal as refspec is different per vcs. """ raise NotImplementedError() def list_changed_files(self, refspec): """List changed files for given refspec. This is not universal as refspec is different per vcs. """ lines = self.execute( self._cmd_list_changed_files + [refspec], needs_lock=False, merge_err=False ).splitlines() return self.parse_changed_files(lines) def parse_changed_files(self, lines): """Parses output with chanaged files.""" raise NotImplementedError() def list_upstream_changed_files(self): """List files missing upstream.""" return list( self.list_changed_files( self.ref_to_remote.format(self.get_remote_branch_name()) ) ) def get_remote_branch_name(self): return f"origin/{self.branch}" def list_remote_branches(self): return []
from datetime import timedelta import logging import forecastio from requests.exceptions import ConnectionError as ConnectError, HTTPError, Timeout import voluptuous as vol from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, PLATFORM_SCHEMA, WeatherEntity, ) from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME, PRESSURE_HPA, PRESSURE_INHG, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from homeassistant.util.dt import utc_from_timestamp from homeassistant.util.pressure import convert as convert_pressure _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by Dark Sky" FORECAST_MODE = ["hourly", "daily"] MAP_CONDITION = { "clear-day": "sunny", "clear-night": "clear-night", "rain": "rainy", "snow": "snowy", "sleet": "snowy-rainy", "wind": "windy", "fog": "fog", "cloudy": "cloudy", "partly-cloudy-day": "partlycloudy", "partly-cloudy-night": "partlycloudy", "hail": "hail", "thunderstorm": "lightning", "tornado": None, } CONF_UNITS = "units" DEFAULT_NAME = "Dark Sky" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_MODE, default="hourly"): vol.In(FORECAST_MODE), vol.Optional(CONF_UNITS): vol.In(["auto", "si", "us", "ca", "uk", "uk2"]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=3) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Dark Sky weather.""" latitude = config.get(CONF_LATITUDE, hass.config.latitude) longitude = config.get(CONF_LONGITUDE, hass.config.longitude) name = config.get(CONF_NAME) mode = config.get(CONF_MODE) units = config.get(CONF_UNITS) if not units: units = "ca" if hass.config.units.is_metric else "us" dark_sky = DarkSkyData(config.get(CONF_API_KEY), latitude, longitude, units) add_entities([DarkSkyWeather(name, dark_sky, mode)], True) class DarkSkyWeather(WeatherEntity): """Representation of a weather condition.""" def __init__(self, name, dark_sky, mode): """Initialize Dark Sky weather.""" self._name = name self._dark_sky = dark_sky self._mode = mode self._ds_data = None self._ds_currently = None self._ds_hourly = None self._ds_daily = None @property def available(self): """Return if weather data is available from Dark Sky.""" return self._ds_data is not None @property def attribution(self): """Return the attribution.""" return ATTRIBUTION @property def name(self): """Return the name of the sensor.""" return self._name @property def temperature(self): """Return the temperature.""" return self._ds_currently.get("temperature") @property def temperature_unit(self): """Return the unit of measurement.""" if self._dark_sky.units is None: return None return TEMP_FAHRENHEIT if "us" in self._dark_sky.units else TEMP_CELSIUS @property def humidity(self): """Return the humidity.""" return round(self._ds_currently.get("humidity") * 100.0, 2) @property def wind_speed(self): """Return the wind speed.""" return self._ds_currently.get("windSpeed") @property def wind_bearing(self): """Return the wind bearing.""" return self._ds_currently.get("windBearing") @property def ozone(self): """Return the ozone level.""" return self._ds_currently.get("ozone") @property def pressure(self): """Return the pressure.""" pressure = self._ds_currently.get("pressure") if "us" in self._dark_sky.units: return round(convert_pressure(pressure, PRESSURE_HPA, PRESSURE_INHG), 2) return pressure @property def visibility(self): """Return the visibility.""" return self._ds_currently.get("visibility") @property def condition(self): """Return the weather condition.""" return MAP_CONDITION.get(self._ds_currently.get("icon")) @property def forecast(self): """Return the forecast array.""" # Per conversation with Joshua Reyes of Dark Sky, to get the total # forecasted precipitation, you have to multiple the intensity by # the hours for the forecast interval def calc_precipitation(intensity, hours): amount = None if intensity is not None: amount = round((intensity * hours), 1) return amount if amount > 0 else None data = None if self._mode == "daily": data = [ { ATTR_FORECAST_TIME: utc_from_timestamp( entry.d.get("time") ).isoformat(), ATTR_FORECAST_TEMP: entry.d.get("temperatureHigh"), ATTR_FORECAST_TEMP_LOW: entry.d.get("temperatureLow"), ATTR_FORECAST_PRECIPITATION: calc_precipitation( entry.d.get("precipIntensity"), 24 ), ATTR_FORECAST_WIND_SPEED: entry.d.get("windSpeed"), ATTR_FORECAST_WIND_BEARING: entry.d.get("windBearing"), ATTR_FORECAST_CONDITION: MAP_CONDITION.get(entry.d.get("icon")), } for entry in self._ds_daily.data ] else: data = [ { ATTR_FORECAST_TIME: utc_from_timestamp( entry.d.get("time") ).isoformat(), ATTR_FORECAST_TEMP: entry.d.get("temperature"), ATTR_FORECAST_PRECIPITATION: calc_precipitation( entry.d.get("precipIntensity"), 1 ), ATTR_FORECAST_CONDITION: MAP_CONDITION.get(entry.d.get("icon")), } for entry in self._ds_hourly.data ] return data def update(self): """Get the latest data from Dark Sky.""" self._dark_sky.update() self._ds_data = self._dark_sky.data currently = self._dark_sky.currently self._ds_currently = currently.d if currently else {} self._ds_hourly = self._dark_sky.hourly self._ds_daily = self._dark_sky.daily class DarkSkyData: """Get the latest data from Dark Sky.""" def __init__(self, api_key, latitude, longitude, units): """Initialize the data object.""" self._api_key = api_key self.latitude = latitude self.longitude = longitude self.requested_units = units self.data = None self.currently = None self.hourly = None self.daily = None self._connect_error = False @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from Dark Sky.""" try: self.data = forecastio.load_forecast( self._api_key, self.latitude, self.longitude, units=self.requested_units ) self.currently = self.data.currently() self.hourly = self.data.hourly() self.daily = self.data.daily() if self._connect_error: self._connect_error = False _LOGGER.info("Reconnected to Dark Sky") except (ConnectError, HTTPError, Timeout, ValueError) as error: if not self._connect_error: self._connect_error = True _LOGGER.error("Unable to connect to Dark Sky. %s", error) self.data = None @property def units(self): """Get the unit system of returned data.""" if self.data is None: return None return self.data.json.get("flags").get("units")
import subprocess from typing import Optional from django.conf import settings from django.core.cache import cache from siphashc import siphash from weblate.trans.util import get_clean_env from weblate.utils.checks import weblate_check from weblate.utils.errors import report_error GPG_ERRORS = {} def check_gpg(app_configs, **kwargs): get_gpg_public_key() template = "{}: {}" return [ weblate_check("weblate.C036", template.format(key, message)) for key, message in GPG_ERRORS.items() ] def gpg_error(name: str, error: Exception, silent: bool = False): report_error(cause=name) if not silent: GPG_ERRORS[name] = "{}\n{}\n{}".format( error, getattr(error, "stderr", ""), getattr(error, "stdout", "") ) def generate_gpg_key() -> Optional[str]: try: subprocess.run( [ "gpg", "--batch", "--pinentry-mode", "loopback", "--passphrase", "", "--quick-generate-key", settings.WEBLATE_GPG_IDENTITY, settings.WEBLATE_GPG_ALGO, "default", "never", ], env=get_clean_env(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, check=True, ) return get_gpg_key() except (subprocess.CalledProcessError, OSError) as error: gpg_error("GPG key generating", error) return None def get_gpg_key(silent=False) -> Optional[str]: try: result = subprocess.run( [ "gpg", "--batch", "--with-colons", "--list-secret-keys", settings.WEBLATE_GPG_IDENTITY, ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=get_clean_env(), universal_newlines=True, check=True, ) for line in result.stdout.splitlines(): if not line.startswith("fpr:"): continue return line.split(":")[9] return None except (subprocess.CalledProcessError, OSError) as error: gpg_error("GPG key listing", error, silent) return None def gpg_cache_key(suffix: str) -> str: return "gpg:{}:{}".format( siphash("Weblate GPG hash", settings.WEBLATE_GPG_IDENTITY), suffix ) def get_gpg_sign_key() -> Optional[str]: """High level wrapper to cache key ID.""" if not settings.WEBLATE_GPG_IDENTITY: return None cache_key = gpg_cache_key("id") keyid = cache.get(cache_key) if keyid is None: keyid = get_gpg_key(silent=True) if keyid is None: keyid = generate_gpg_key() if keyid: cache.set(cache_key, keyid, 7 * 86400) return keyid def get_gpg_public_key() -> Optional[str]: key = get_gpg_sign_key() if key is None: return None cache_key = gpg_cache_key("public") data = cache.get(cache_key) if not data: try: result = subprocess.run( ["gpg", "--batch", "-armor", "--export", key], env=get_clean_env(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, check=True, ) data = result.stdout cache.set(cache_key, data, 7 * 86400) except (subprocess.CalledProcessError, OSError) as error: gpg_error("GPG key public", error) return None return data
import numpy as np from scipy import linalg from .constants import FIFF def get_current_comp(info): """Get the current compensation in effect in the data.""" comp = None first_comp = -1 for k, chan in enumerate(info['chs']): if chan['kind'] == FIFF.FIFFV_MEG_CH: comp = int(chan['coil_type']) >> 16 if first_comp < 0: first_comp = comp elif comp != first_comp: raise ValueError('Compensation is not set equally on ' 'all MEG channels') return comp def set_current_comp(info, comp): """Set the current compensation in effect in the data.""" comp_now = get_current_comp(info) for k, chan in enumerate(info['chs']): if chan['kind'] == FIFF.FIFFV_MEG_CH: rem = chan['coil_type'] - (comp_now << 16) chan['coil_type'] = int(rem + (comp << 16)) def _make_compensator(info, grade): """Auxiliary function for make_compensator.""" for k in range(len(info['comps'])): if info['comps'][k]['kind'] == grade: this_data = info['comps'][k]['data'] # Create the preselector presel = np.zeros((this_data['ncol'], info['nchan'])) for col, col_name in enumerate(this_data['col_names']): ind = [k for k, ch in enumerate(info['ch_names']) if ch == col_name] if len(ind) == 0: raise ValueError('Channel %s is not available in ' 'data' % col_name) elif len(ind) > 1: raise ValueError('Ambiguous channel %s' % col_name) presel[col, ind[0]] = 1.0 # Create the postselector (zero entries for channels not found) postsel = np.zeros((info['nchan'], this_data['nrow'])) for c, ch_name in enumerate(info['ch_names']): ind = [k for k, ch in enumerate(this_data['row_names']) if ch == ch_name] if len(ind) > 1: raise ValueError('Ambiguous channel %s' % ch_name) elif len(ind) == 1: postsel[c, ind[0]] = 1.0 # else, don't use it at all (postsel[c, ?] = 0.0) by allocation this_comp = np.dot(postsel, np.dot(this_data['data'], presel)) return this_comp raise ValueError('Desired compensation matrix (grade = %d) not' ' found' % grade) def make_compensator(info, from_, to, exclude_comp_chs=False): """Return compensation matrix eg. for CTF system. Create a compensation matrix to bring the data from one compensation state to another. Parameters ---------- info : dict The measurement info. from_ : int Compensation in the input data. to : int Desired compensation in the output. exclude_comp_chs : bool Exclude compensation channels from the output. Returns ------- comp : array | None. The compensation matrix. Might be None if no compensation is needed (from == to). """ if from_ == to: return None # s_orig = s_from + C1*s_from = (I + C1)*s_from # s_to = s_orig - C2*s_orig = (I - C2)*s_orig # s_to = (I - C2)*(I + C1)*s_from = (I + C1 - C2 - C2*C1)*s_from if from_ != 0: C1 = _make_compensator(info, from_) comp_from_0 = linalg.inv(np.eye(info['nchan']) - C1) if to != 0: C2 = _make_compensator(info, to) comp_0_to = np.eye(info['nchan']) - C2 if from_ != 0: if to != 0: # This is mathematically equivalent, but has higher numerical # error than using the inverse to always go to zero and back # comp = np.eye(info['nchan']) + C1 - C2 - np.dot(C2, C1) comp = np.dot(comp_0_to, comp_from_0) else: comp = comp_from_0 else: # from == 0, to != 0 guaranteed here comp = comp_0_to if exclude_comp_chs: pick = [k for k, c in enumerate(info['chs']) if c['kind'] != FIFF.FIFFV_REF_MEG_CH] if len(pick) == 0: raise ValueError('Nothing remains after excluding the ' 'compensation channels') comp = comp[pick, :] return comp # @verbose # def compensate_to(data, to, verbose=None): # """ # % # % [newdata] = mne_compensate_to(data,to) # % # % Apply compensation to the data as desired # % # """ # # newdata = data.copy() # now = get_current_comp(newdata['info']) # # # Are we there already? # if now == to: # logger.info('Data are already compensated as desired') # # # Make the compensator and apply it to all data sets # comp = make_compensator(newdata['info'], now, to) # for k in range(len(newdata['evoked'])): # newdata['evoked'][k]['epochs'] = np.dot(comp, # newdata['evoked'][k]['epochs']) # # # Update the compensation info in the channel descriptors # newdata['info']['chs'] = set_current_comp(newdata['info']['chs'], to) # return newdata # def set_current_comp(chs, value): # """Set the current compensation value in the channel info structures # """ # new_chs = chs # # lower_half = int('FFFF', 16) # hex2dec('FFFF') # for k in range(len(chs)): # if chs[k]['kind'] == FIFF.FIFFV_MEG_CH: # coil_type = float(chs[k]['coil_type']) & lower_half # new_chs[k]['coil_type'] = int(coil_type | (value << 16)) # # return new_chs
import django.db.models.deletion from django.conf import settings from django.db import migrations, models def create_index(apps, schema_editor): vendor = schema_editor.connection.vendor if vendor == "postgresql": schema_editor.execute( "CREATE INDEX memory_source_fulltext ON memory_memory " "USING GIN (to_tsvector('english', source))" ) schema_editor.execute( "CREATE INDEX memory_source_index ON memory_memory USING HASH (source)" ) schema_editor.execute( "CREATE INDEX memory_target_index ON memory_memory USING HASH (target)" ) schema_editor.execute( "CREATE INDEX memory_origin_index ON memory_memory USING HASH (origin)" ) elif vendor == "mysql": schema_editor.execute( "CREATE FULLTEXT INDEX memory_source_fulltext ON memory_memory(source)" ) schema_editor.execute( "CREATE INDEX memory_lookup_index ON " "memory_memory(source(255), target(255), origin(255))" ) else: raise Exception(f"Unsupported database: {vendor}") def drop_index(apps, schema_editor): vendor = schema_editor.connection.vendor if vendor == "postgresql": schema_editor.execute("DROP INDEX memory_source_fulltext") schema_editor.execute("DROP INDEX memory_source_index") schema_editor.execute("DROP INDEX memory_target_index") schema_editor.execute("DROP INDEX memory_origin_index") elif vendor == "mysql": schema_editor.execute( "ALTER TABLE memory_memory DROP INDEX memory_source_fulltext" ) schema_editor.execute( "ALTER TABLE memory_memory DROP INDEX memory_lookup_index" ) else: raise Exception(f"Unsupported database: {vendor}") class Migration(migrations.Migration): replaces = [ ("memory", "0001_squashed_0003_auto_20180321_1554"), ("memory", "0002_memory"), ("memory", "0003_migrate_memory"), ("memory", "0004_memory_index"), ("memory", "0005_auto_20200310_0810"), ("memory", "0006_memory_update"), ] initial = True dependencies = [ ("trans", "0063_auto_20200305_2202"), ("weblate_auth", "0006_auto_20190905_1139"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("lang", "0005_auto_20200212_1239"), ("lang", "0006_auto_20200309_1436"), ] operations = [ migrations.CreateModel( name="Memory", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("source", models.TextField()), ("target", models.TextField()), ("origin", models.TextField()), ("from_file", models.BooleanField(db_index=True, default=False)), ("shared", models.BooleanField(db_index=True, default=False)), ( "project", models.ForeignKey( blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="trans.Project", ), ), ( "source_language", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="memory_source_set", to="lang.Language", ), ), ( "target_language", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="memory_target_set", to="lang.Language", ), ), ( "user", models.ForeignKey( blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, ), ), ], ), migrations.RunPython( code=create_index, reverse_code=drop_index, atomic=False, ), ]
from datetime import datetime as dt import logging from typing import List, Optional from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( HVAC_MODE_AUTO, HVAC_MODE_HEAT, HVAC_MODE_OFF, PRESET_AWAY, PRESET_ECO, PRESET_HOME, PRESET_NONE, SUPPORT_PRESET_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import PRECISION_TENTHS from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util from . import ( ATTR_DURATION_DAYS, ATTR_DURATION_HOURS, ATTR_DURATION_UNTIL, ATTR_SYSTEM_MODE, ATTR_ZONE_TEMP, CONF_LOCATION_IDX, SVC_RESET_ZONE_OVERRIDE, SVC_SET_SYSTEM_MODE, EvoChild, EvoDevice, ) from .const import ( DOMAIN, EVO_AUTO, EVO_AUTOECO, EVO_AWAY, EVO_CUSTOM, EVO_DAYOFF, EVO_FOLLOW, EVO_HEATOFF, EVO_PERMOVER, EVO_RESET, EVO_TEMPOVER, ) _LOGGER = logging.getLogger(__name__) PRESET_RESET = "Reset" # reset all child zones to EVO_FOLLOW PRESET_CUSTOM = "Custom" HA_HVAC_TO_TCS = {HVAC_MODE_OFF: EVO_HEATOFF, HVAC_MODE_HEAT: EVO_AUTO} TCS_PRESET_TO_HA = { EVO_AWAY: PRESET_AWAY, EVO_CUSTOM: PRESET_CUSTOM, EVO_AUTOECO: PRESET_ECO, EVO_DAYOFF: PRESET_HOME, EVO_RESET: PRESET_RESET, } # EVO_AUTO: None, HA_PRESET_TO_TCS = {v: k for k, v in TCS_PRESET_TO_HA.items()} EVO_PRESET_TO_HA = { EVO_FOLLOW: PRESET_NONE, EVO_TEMPOVER: "temporary", EVO_PERMOVER: "permanent", } HA_PRESET_TO_EVO = {v: k for k, v in EVO_PRESET_TO_HA.items()} STATE_ATTRS_TCS = ["systemId", "activeFaults", "systemModeStatus"] STATE_ATTRS_ZONES = ["zoneId", "activeFaults", "setpointStatus", "temperatureStatus"] async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Create the evohome Controller, and its Zones, if any.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] _LOGGER.debug( "Found the Location/Controller (%s), id=%s, name=%s (location_idx=%s)", broker.tcs.modelType, broker.tcs.systemId, broker.tcs.location.name, broker.params[CONF_LOCATION_IDX], ) controller = EvoController(broker, broker.tcs) zones = [] for zone in broker.tcs.zones.values(): if zone.modelType == "HeatingZone" or zone.zoneType == "Thermostat": _LOGGER.debug( "Adding: %s (%s), id=%s, name=%s", zone.zoneType, zone.modelType, zone.zoneId, zone.name, ) new_entity = EvoZone(broker, zone) zones.append(new_entity) else: _LOGGER.warning( "Ignoring: %s (%s), id=%s, name=%s: unknown/invalid zone type, " "report as an issue if you feel this zone type should be supported", zone.zoneType, zone.modelType, zone.zoneId, zone.name, ) async_add_entities([controller] + zones, update_before_add=True) class EvoClimateEntity(EvoDevice, ClimateEntity): """Base for an evohome Climate device.""" def __init__(self, evo_broker, evo_device) -> None: """Initialize a Climate device.""" super().__init__(evo_broker, evo_device) self._preset_modes = None @property def hvac_modes(self) -> List[str]: """Return a list of available hvac operation modes.""" return list(HA_HVAC_TO_TCS) @property def preset_modes(self) -> Optional[List[str]]: """Return a list of available preset modes.""" return self._preset_modes class EvoZone(EvoChild, EvoClimateEntity): """Base for a Honeywell TCC Zone.""" def __init__(self, evo_broker, evo_device) -> None: """Initialize a Honeywell TCC Zone.""" super().__init__(evo_broker, evo_device) if evo_device.modelType.startswith("VisionProWifi"): # this system does not have a distinct ID for the zone self._unique_id = f"{evo_device.zoneId}z" else: self._unique_id = evo_device.zoneId self._name = evo_device.name self._icon = "mdi:radiator" if evo_broker.client_v1: self._precision = PRECISION_TENTHS else: self._precision = self._evo_device.setpointCapabilities["valueResolution"] self._preset_modes = list(HA_PRESET_TO_EVO) self._supported_features = SUPPORT_PRESET_MODE | SUPPORT_TARGET_TEMPERATURE async def async_zone_svc_request(self, service: dict, data: dict) -> None: """Process a service request (setpoint override) for a zone.""" if service == SVC_RESET_ZONE_OVERRIDE: await self._evo_broker.call_client_api( self._evo_device.cancel_temp_override() ) return # otherwise it is SVC_SET_ZONE_OVERRIDE temperature = max(min(data[ATTR_ZONE_TEMP], self.max_temp), self.min_temp) if ATTR_DURATION_UNTIL in data: duration = data[ATTR_DURATION_UNTIL] if duration.total_seconds() == 0: await self._update_schedule() until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", "")) else: until = dt_util.now() + data[ATTR_DURATION_UNTIL] else: until = None # indefinitely until = dt_util.as_utc(until) if until else None await self._evo_broker.call_client_api( self._evo_device.set_temperature(temperature, until=until) ) @property def hvac_mode(self) -> str: """Return the current operating mode of a Zone.""" if self._evo_tcs.systemModeStatus["mode"] in [EVO_AWAY, EVO_HEATOFF]: return HVAC_MODE_AUTO is_off = self.target_temperature <= self.min_temp return HVAC_MODE_OFF if is_off else HVAC_MODE_HEAT @property def target_temperature(self) -> float: """Return the target temperature of a Zone.""" return self._evo_device.setpointStatus["targetHeatTemperature"] @property def preset_mode(self) -> Optional[str]: """Return the current preset mode, e.g., home, away, temp.""" if self._evo_tcs.systemModeStatus["mode"] in [EVO_AWAY, EVO_HEATOFF]: return TCS_PRESET_TO_HA.get(self._evo_tcs.systemModeStatus["mode"]) return EVO_PRESET_TO_HA.get(self._evo_device.setpointStatus["setpointMode"]) @property def min_temp(self) -> float: """Return the minimum target temperature of a Zone. The default is 5, but is user-configurable within 5-35 (in Celsius). """ return self._evo_device.setpointCapabilities["minHeatSetpoint"] @property def max_temp(self) -> float: """Return the maximum target temperature of a Zone. The default is 35, but is user-configurable within 5-35 (in Celsius). """ return self._evo_device.setpointCapabilities["maxHeatSetpoint"] async def async_set_temperature(self, **kwargs) -> None: """Set a new target temperature.""" temperature = kwargs["temperature"] until = kwargs.get("until") if until is None: if self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: await self._update_schedule() until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", "")) elif self._evo_device.setpointStatus["setpointMode"] == EVO_TEMPOVER: until = dt_util.parse_datetime(self._evo_device.setpointStatus["until"]) until = dt_util.as_utc(until) if until else None await self._evo_broker.call_client_api( self._evo_device.set_temperature(temperature, until=until) ) async def async_set_hvac_mode(self, hvac_mode: str) -> None: """Set a Zone to one of its native EVO_* operating modes. Zones inherit their _effective_ operating mode from their Controller. Usually, Zones are in 'FollowSchedule' mode, where their setpoints are a function of their own schedule and the Controller's operating mode, e.g. 'AutoWithEco' mode means their setpoint is (by default) 3C less than scheduled. However, Zones can _override_ these setpoints, either indefinitely, 'PermanentOverride' mode, or for a set period of time, 'TemporaryOverride' mode (after which they will revert back to 'FollowSchedule' mode). Finally, some of the Controller's operating modes are _forced_ upon the Zones, regardless of any override mode, e.g. 'HeatingOff', Zones to (by default) 5C, and 'Away', Zones to (by default) 12C. """ if hvac_mode == HVAC_MODE_OFF: await self._evo_broker.call_client_api( self._evo_device.set_temperature(self.min_temp, until=None) ) else: # HVAC_MODE_HEAT await self._evo_broker.call_client_api( self._evo_device.cancel_temp_override() ) async def async_set_preset_mode(self, preset_mode: Optional[str]) -> None: """Set the preset mode; if None, then revert to following the schedule.""" evo_preset_mode = HA_PRESET_TO_EVO.get(preset_mode, EVO_FOLLOW) if evo_preset_mode == EVO_FOLLOW: await self._evo_broker.call_client_api( self._evo_device.cancel_temp_override() ) return temperature = self._evo_device.setpointStatus["targetHeatTemperature"] if evo_preset_mode == EVO_TEMPOVER: await self._update_schedule() until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", "")) else: # EVO_PERMOVER until = None until = dt_util.as_utc(until) if until else None await self._evo_broker.call_client_api( self._evo_device.set_temperature(temperature, until=until) ) async def async_update(self) -> None: """Get the latest state data for a Zone.""" await super().async_update() for attr in STATE_ATTRS_ZONES: self._device_state_attrs[attr] = getattr(self._evo_device, attr) class EvoController(EvoClimateEntity): """Base for a Honeywell TCC Controller/Location. The Controller (aka TCS, temperature control system) is the parent of all the child (CH/DHW) devices. It is implemented as a Climate entity to expose the controller's operating modes to HA. It is assumed there is only one TCS per location, and they are thus synonymous. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize a Honeywell TCC Controller/Location.""" super().__init__(evo_broker, evo_device) self._unique_id = evo_device.systemId self._name = evo_device.location.name self._icon = "mdi:thermostat" self._precision = PRECISION_TENTHS modes = [m["systemMode"] for m in evo_broker.config["allowedSystemModes"]] self._preset_modes = [ TCS_PRESET_TO_HA[m] for m in modes if m in list(TCS_PRESET_TO_HA) ] self._supported_features = SUPPORT_PRESET_MODE if self._preset_modes else 0 async def async_tcs_svc_request(self, service: dict, data: dict) -> None: """Process a service request (system mode) for a controller. Data validation is not required, it will have been done upstream. """ if service == SVC_SET_SYSTEM_MODE: mode = data[ATTR_SYSTEM_MODE] else: # otherwise it is SVC_RESET_SYSTEM mode = EVO_RESET if ATTR_DURATION_DAYS in data: until = dt_util.start_of_local_day() until += data[ATTR_DURATION_DAYS] elif ATTR_DURATION_HOURS in data: until = dt_util.now() + data[ATTR_DURATION_HOURS] else: until = None await self._set_tcs_mode(mode, until=until) async def _set_tcs_mode(self, mode: str, until: Optional[dt] = None) -> None: """Set a Controller to any of its native EVO_* operating modes.""" until = dt_util.as_utc(until) if until else None await self._evo_broker.call_client_api( self._evo_tcs.set_status(mode, until=until) ) @property def hvac_mode(self) -> str: """Return the current operating mode of a Controller.""" tcs_mode = self._evo_tcs.systemModeStatus["mode"] return HVAC_MODE_OFF if tcs_mode == EVO_HEATOFF else HVAC_MODE_HEAT @property def current_temperature(self) -> Optional[float]: """Return the average current temperature of the heating Zones. Controllers do not have a current temp, but one is expected by HA. """ temps = [ z.temperatureStatus["temperature"] for z in self._evo_tcs.zones.values() if z.temperatureStatus["isAvailable"] ] return round(sum(temps) / len(temps), 1) if temps else None @property def preset_mode(self) -> Optional[str]: """Return the current preset mode, e.g., home, away, temp.""" return TCS_PRESET_TO_HA.get(self._evo_tcs.systemModeStatus["mode"]) @property def min_temp(self) -> float: """Return None as Controllers don't have a target temperature.""" return None @property def max_temp(self) -> float: """Return None as Controllers don't have a target temperature.""" return None async def async_set_temperature(self, **kwargs) -> None: """Raise exception as Controllers don't have a target temperature.""" raise NotImplementedError("Evohome Controllers don't have target temperatures.") async def async_set_hvac_mode(self, hvac_mode: str) -> None: """Set an operating mode for a Controller.""" await self._set_tcs_mode(HA_HVAC_TO_TCS.get(hvac_mode)) async def async_set_preset_mode(self, preset_mode: Optional[str]) -> None: """Set the preset mode; if None, then revert to 'Auto' mode.""" await self._set_tcs_mode(HA_PRESET_TO_TCS.get(preset_mode, EVO_AUTO)) async def async_update(self) -> None: """Get the latest state data for a Controller.""" self._device_state_attrs = {} attrs = self._device_state_attrs for attr in STATE_ATTRS_TCS: if attr == "activeFaults": attrs["activeSystemFaults"] = getattr(self._evo_tcs, attr) else: attrs[attr] = getattr(self._evo_tcs, attr)
from __future__ import absolute_import from __future__ import division from __future__ import print_function __author__ = '[email protected]' import json import logging from multiprocessing import Process from multiprocessing import Queue import time from absl import app from absl import flags import unistream_profile_driver flags.DEFINE_list('profile_list', None, 'List of profiles. Each will be run on ' 'its own process to simulate ' 'concurrency.') flags.mark_flags_as_required(['profile_list']) FLAGS = flags.FLAGS def process_profile(profile, response_q): """Method to execute a profile (list of sql scripts) on a cluster. Args: profile: The profile to run. response_q: Communication channel between processes. """ profile_output = unistream_profile_driver.execute_profile(profile) response_q.put([profile, profile_output]) def manage_streams(): """Method to launch concurrent execution of multiple profiles. Returns: A dictionary containing 1. wall_time: Total time taken for all the profiles to complete execution. 2. profile details: 2.1. profile_execution_time: Time taken for all scripts in the profile to complete execution. 2.2. Individual script metrics: script name and its execution time (-1 if the script fails) """ profile_handling_process_list = [] profile_performance = Queue() start_time = time.time() for profile in FLAGS.profile_list: profile_handling_process = Process(target=process_profile, args=(profile, profile_performance,)) profile_handling_process.start() profile_handling_process_list.append(profile_handling_process) for profile_handling_process in profile_handling_process_list: profile_handling_process.join() # All processes have joined, implying all profiles have been completed execution_time = round((time.time() - start_time), 2) num_profiles = len(FLAGS.profile_list) overall_performance = {} while num_profiles: temp_performance_response = profile_performance.get() profile = temp_performance_response[0] overall_performance[profile] = json.loads(temp_performance_response[1]) num_profiles -= 1 overall_performance['wall_time'] = execution_time return json.dumps(overall_performance) def main(argv): del argv print(manage_streams()) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) app.run(main)
import numpy as np import pytest import itertools from tensornetwork.block_sparse.utils import ( flatten, fuse_stride_arrays, fuse_ndarrays, fuse_degeneracies, _find_best_partition, _get_strides, unique, get_dtype, get_real_dtype, intersect, collapse, expand, _intersect_ndarray) np_dtypes = [np.float64, np.complex128] np_tensordot_dtypes = [np.float64, np.complex128] def test_flatten(): listoflist = [[1, 2], [3, 4], [5]] flat = flatten(listoflist) np.testing.assert_allclose(flat, [1, 2, 3, 4, 5]) def test_fuse_stride_arrays(): dims = np.asarray([2, 3, 4, 5]) strides = np.asarray([120, 60, 20, 5, 1]) actual = fuse_stride_arrays(dims, strides) expected = fuse_ndarrays([ np.arange(0, strides[n] * dims[n], strides[n], dtype=np.uint32) for n in range(len(dims)) ]) np.testing.assert_allclose(actual, expected) def test_fuse_ndarrays(): d1 = np.asarray([0, 1]) d2 = np.asarray([2, 3, 4]) fused = fuse_ndarrays([d1, d2]) np.testing.assert_allclose(fused, [2, 3, 4, 3, 4, 5]) def test_fuse_degeneracies(): d1 = np.asarray([0, 1]) d2 = np.asarray([2, 3, 4]) fused_degeneracies = fuse_degeneracies(d1, d2) np.testing.assert_allclose(fused_degeneracies, np.kron(d1, d2)) def test_find_best_partition(): with pytest.raises(ValueError): _find_best_partition([5]) def test_find_best_partition_raises(): d = [5, 4, 5, 2, 6, 8] p = _find_best_partition(d) assert p == 3 #pylint: disable=too-many-return-statements def get_index(return_index, return_inverse, return_counts, which):#pylint: disable=inconsistent-return-statements if which == 'index': return 1 if return_index else -1 if which == 'inverse': if return_index: return 2 if return_inverse else -1 return 1 if return_inverse else -1 if which == 'counts': if return_index and return_inverse: return 3 if return_counts else -1 if return_index or return_inverse: return 2 if return_counts else -1 return 1 if return_counts else -1 @pytest.mark.parametrize('N, dtype, resdtype', [(1, np.int8, np.int8), (2, np.int8, np.int16), (2, np.int16, np.int32), (2, np.int32, np.int64), (3, np.int8, np.int32), (3, np.int16, np.int64), (4, np.int8, np.int32), (4, np.int16, np.int64), (5, np.int8, np.int64), (6, np.int8, np.int64)]) def test_collapse(N, dtype, resdtype): D = 10000 a = np.random.randint(-5, 5, (D, N), dtype=dtype) collapsed = collapse(a) if N in (1, 2, 4): expected = np.squeeze(a.view(resdtype)) elif N == 3: expected = np.squeeze( np.concatenate([a, np.zeros((D, 1), dtype=dtype)], axis=1).view(resdtype)) elif N > 4: expected = np.squeeze( np.concatenate([a, np.zeros((D, 8 - N), dtype=dtype)], axis=1).view(resdtype)) np.testing.assert_allclose(collapsed, expected) def test_collapse_2(): N = 2 dtype = np.int64 D = 10000 a = np.random.randint(-5, 5, (D, N), dtype=dtype) collapsed = collapse(a) np.testing.assert_allclose(collapsed, a) @pytest.mark.parametrize('N, dtype', [(1, np.int8), (1, np.int16), (1, np.int32), (1, np.int64), (2, np.int8), (2, np.int16), (2, np.int32), (3, np.int8), (3, np.int16), (4, np.int8), (4, np.int16), (4, np.int32)]) def test_collapse_expand(N, dtype): D = 10000 expected = np.random.randint(-5, 5, (D, N), dtype=dtype) collapsed = collapse(expected) actual = expand(collapsed, dtype, original_width=N, original_ndim=2) np.testing.assert_allclose(expected, actual) @pytest.mark.parametrize('N, label_dtype, D', [(1, np.int8, 1000), (1, np.int16, 1000), (2, np.int8, 100), (2, np.int16, 1000), (3, np.int8, 100), (3, np.int16, 10000), (4, np.int8, 100), (4, np.int16, 10000), (4, np.int32, 100000)]) @pytest.mark.parametrize('return_index', [True, False]) @pytest.mark.parametrize('return_inverse', [True, False]) @pytest.mark.parametrize('return_counts', [True, False]) def test_unique(N, label_dtype, D, return_index, return_inverse, return_counts): a = np.random.randint(-10, 10, (D, N), dtype=np.int16) expected = np.unique(a, return_index, return_inverse, return_counts, axis=0) actual = unique( a, return_index, return_inverse, return_counts, label_dtype=label_dtype) def test_array(act, exp): if N != 3: t1 = np.squeeze(exp.view(get_dtype(2 * N))) t2 = np.squeeze(act.view(get_dtype(2 * N))) ordact = np.argsort(t2) ordexp = np.argsort(t1) else: t1 = np.squeeze( np.concatenate([exp, np.zeros((exp.shape[0], 1), dtype=np.int16)], axis=1).view(get_dtype(2 * N))) t2 = np.squeeze( np.concatenate([act, np.zeros((act.shape[0], 1), dtype=np.int16)], axis=1).view(get_dtype(2 * N))) ordact = np.argsort(t2) ordexp = np.argsort(t1) np.testing.assert_allclose(t1[ordexp], t2[ordact]) return ordact, ordexp if not any([return_index, return_inverse, return_counts]): test_array(actual, expected) else: ordact, ordexp = test_array(actual[0], expected[0]) if return_index: ind = get_index(return_index, return_inverse, return_counts, 'index') np.testing.assert_allclose(actual[ind][ordact], expected[ind][ordexp]) if return_inverse: ind = get_index(return_index, return_inverse, return_counts, 'inverse') mapact = np.zeros(len(ordact), dtype=np.int16) mapact[ordact] = np.arange(len(ordact), dtype=np.int16) mapexp = np.zeros(len(ordexp), dtype=np.int16) mapexp[ordexp] = np.arange(len(ordexp), dtype=np.int16) np.testing.assert_allclose(mapact[actual[ind]], mapexp[expected[ind]]) assert actual[ind].dtype == label_dtype if return_counts: ind = get_index(return_index, return_inverse, return_counts, 'counts') np.testing.assert_allclose(actual[ind][ordact], expected[ind][ordexp]) @pytest.mark.parametrize('return_index', [True, False]) @pytest.mark.parametrize('return_inverse', [True, False]) @pytest.mark.parametrize('return_counts', [True, False]) def test_unique_2(return_index, return_inverse, return_counts): N = 5 D = 1000 a = np.random.randint(-10, 10, (D, N), dtype=np.int16) expected = np.unique(a, return_index, return_inverse, return_counts, axis=0) actual = unique(a, return_index, return_inverse, return_counts) if not any([return_index, return_inverse, return_counts]): np.testing.assert_allclose(expected, actual) else: for n, e in enumerate(expected): np.testing.assert_allclose(e, actual[n]) @pytest.mark.parametrize('return_index', [True, False]) @pytest.mark.parametrize('return_inverse', [True, False]) @pytest.mark.parametrize('return_counts', [True, False]) def test_unique_1d(return_index, return_inverse, return_counts): D = 1000 a = np.random.randint(-10, 10, D, dtype=np.int16) expected = np.unique(a, return_index, return_inverse, return_counts) actual = unique(a, return_index, return_inverse, return_counts) if not any([return_index, return_inverse, return_counts]): np.testing.assert_allclose(expected, actual) else: for n, e in enumerate(expected): np.testing.assert_allclose(e, actual[n]) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_1(dtype): a = np.array([[0, 1, 2], [2, 3, 4]], dtype=dtype) b = np.array([[0, -2, 6], [2, 3, 4]], dtype=dtype) out = intersect(a, b, axis=1) np.testing.assert_allclose(np.array([[0], [2]]), out) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_2(dtype): a = np.array([[0, 1, 2], [2, 3, 4]], dtype=dtype) b = np.array([[0, -2, 6, 2], [2, 3, 4, 4]], dtype=dtype) out, la, lb = intersect(a, b, axis=1, return_indices=True) np.testing.assert_allclose(np.array([[0, 2], [2, 4]]), out) np.testing.assert_allclose(la, [0, 2]) np.testing.assert_allclose(lb, [0, 3]) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_3(dtype): a = np.array([0, 1, 2, 3, 4], dtype=dtype) b = np.array([0, -1, 4], dtype=dtype) out = intersect(a, b) np.testing.assert_allclose([0, 4], out) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_4(dtype): a = np.array([0, 1, 2, 3, 4], dtype=dtype) b = np.array([0, -1, 4], dtype=dtype) out, la, lb = intersect(a, b, return_indices=True) np.testing.assert_allclose([0, 4], out) np.testing.assert_allclose(la, [0, 4]) np.testing.assert_allclose(lb, [0, 2]) def test_intersect_raises(): np.random.seed(10) a = np.random.randint(0, 10, (4, 5, 1)) b = np.random.randint(0, 10, (4, 6)) with pytest.raises(ValueError, match="array ndims"): intersect(a, b, axis=0) a = np.random.randint(0, 10, (4, 5)) b = np.random.randint(0, 10, (4, 6)) with pytest.raises(ValueError, match="array widths"): intersect(a, b, axis=0) c = np.random.randint(0, 10, (3, 7)) with pytest.raises(ValueError, match="array heights"): intersect(a, c, axis=1) with pytest.raises(NotImplementedError, match="intersection can only"): intersect(a, c, axis=2) d = np.random.randint(0, 10, (3, 7, 3)) e = np.random.randint(0, 10, (3, 7, 3)) with pytest.raises(NotImplementedError, match="_intersect_ndarray is only"): intersect(d, e, axis=1) a = np.random.randint(0, 10, (4, 5), dtype=np.int16) b = np.random.randint(0, 10, (4, 6), dtype=np.int32) with pytest.raises(ValueError, match="array dtypes"): intersect(a, b, axis=0) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_5(dtype): a = np.array([[0, 2], [1, 3], [2, 4]], dtype=dtype) b = np.array([[0, 2], [-2, 3], [6, 6]], dtype=dtype) out = intersect(a, b, axis=0) np.testing.assert_allclose(np.array([[0, 2]]), out) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_6(dtype): a = np.array([[0, 2], [1, 3], [2, 4]], dtype=dtype) b = np.array([[0, 2], [-2, 3], [6, 4], [2, 4]], dtype=dtype) out, la, lb = intersect(a, b, axis=0, return_indices=True) np.testing.assert_allclose(np.array([[0, 2], [2, 4]]), out) np.testing.assert_allclose(la, [0, 2]) np.testing.assert_allclose(lb, [0, 3]) @pytest.mark.parametrize('dtype', [np.int8, np.int16, np.int32, np.int64]) def test_intersect_1d(dtype): a = np.random.randint(-5, 5, 10, dtype=dtype) b = np.random.randint(-2, 2, 8, dtype=dtype) out, la, lb = intersect(a, b, axis=0, return_indices=True) out_, la_, lb_ = np.intersect1d(a, b, return_indices=True) np.testing.assert_allclose(out, out_) np.testing.assert_allclose(la, la_) np.testing.assert_allclose(lb, lb_) def test_intersect_ndarray_1(): a = np.array([[0, 1, 2], [2, 3, 4]]) b = np.array([[0, -2, 6], [2, 3, 4]]) out = _intersect_ndarray(a, b, axis=1) np.testing.assert_allclose(np.array([[0], [2]]), out) def test_intersect_ndarray_2(): a = np.array([[0, 1, 2], [2, 3, 4]]) b = np.array([[0, -2, 6, 2], [2, 3, 4, 4]]) out, la, lb = _intersect_ndarray(a, b, axis=1, return_indices=True) np.testing.assert_allclose(np.array([[0, 2], [2, 4]]), out) np.testing.assert_allclose(la, [0, 2]) np.testing.assert_allclose(lb, [0, 3]) def test_intersect_ndarray_3(): a = np.array([0, 1, 2, 3, 4]) b = np.array([0, -1, 4]) out = _intersect_ndarray(a, b) np.testing.assert_allclose([0, 4], out) def test_intersect_ndarray_4(): a = np.array([0, 1, 2, 3, 4]) b = np.array([0, -1, 4]) out, la, lb = _intersect_ndarray(a, b, return_indices=True) np.testing.assert_allclose([0, 4], out) np.testing.assert_allclose(la, [0, 4]) np.testing.assert_allclose(lb, [0, 2]) def test_intersect_ndarray_5(): a = np.array([[0, 2], [1, 3], [2, 4]]) b = np.array([[0, 2], [-2, 3], [6, 6]]) out = _intersect_ndarray(a, b, axis=0) np.testing.assert_allclose(np.array([[0, 2]]), out) def test_intersect_ndarray_6(): a = np.array([[0, 2], [1, 3], [2, 4]]) b = np.array([[0, 2], [-2, 3], [6, 4], [2, 4]]) out, la, lb = _intersect_ndarray(a, b, axis=0, return_indices=True) np.testing.assert_allclose(np.array([[0, 2], [2, 4]]), out) np.testing.assert_allclose(la, [0, 2]) np.testing.assert_allclose(lb, [0, 3]) def test_intersect_ndarray_1d(): a = np.random.randint(-5, 5, 10) b = np.random.randint(-2, 2, 8) out, la, lb = _intersect_ndarray(a, b, axis=0, return_indices=True) out_, la_, lb_ = np.intersect1d(a, b, return_indices=True) np.testing.assert_allclose(out, out_) np.testing.assert_allclose(la, la_) np.testing.assert_allclose(lb, lb_) def test_intersect_ndarray_raises(): np.random.seed(10) a = np.random.randint(0, 10, (4, 5, 1)) b = np.random.randint(0, 10, (4, 6)) with pytest.raises(ValueError, match="array ndims"): _intersect_ndarray(a, b, axis=0) a = np.random.randint(0, 10, (4, 5)) b = np.random.randint(0, 10, (4, 6)) with pytest.raises(ValueError, match="array widths"): _intersect_ndarray(a, b, axis=0) with pytest.raises(NotImplementedError, match="intersection can only"): _intersect_ndarray(a, b, axis=2) d = np.random.randint(0, 10, (3, 7, 3)) e = np.random.randint(0, 10, (3, 7, 3)) with pytest.raises(NotImplementedError, match="_intersect_ndarray is only"): _intersect_ndarray(d, e, axis=1) def test_get_real_dtype(): assert get_real_dtype(np.complex128) == np.float64 assert get_real_dtype(np.complex64) == np.float32 assert get_real_dtype(np.float64) == np.float64 assert get_real_dtype(np.float32) == np.float32 def test_get_dtype(): assert get_dtype(1) == np.int8 assert get_dtype(2) == np.int16 assert get_dtype(3) == np.int32 assert get_dtype(4) == np.int32 assert get_dtype(5) == np.int64 assert get_dtype(6) == np.int64 assert get_dtype(7) == np.int64 assert get_dtype(8) == np.int64 assert get_dtype(9) == np.int64
import collections import itertools import sys from typing import List, Tuple, Dict, Set import dedupe from dedupe.core import randomPairs, randomPairsMatch, unique from dedupe.canonical import getCanonicalRep from dedupe._typing import Data, TrainingData, RecordDict, TrainingExample, Literal, RecordID def console_label(deduper: dedupe.api.ActiveMatching) -> None: # pragma: no cover ''' Train a matcher instance (Dedupe, RecordLink, or Gazetteer) from the command line. Example .. code:: python > deduper = dedupe.Dedupe(variables) > deduper.prepare_training(data) > dedupe.console_label(deduper) ''' finished = False use_previous = False fields = unique(field.field for field in deduper.data_model.primary_fields) buffer_len = 1 # Max number of previous operations examples_buffer: List[Tuple[TrainingExample, Literal['match', 'distinct', 'uncertain']]] = [] uncertain_pairs: List[TrainingExample] = [] while not finished: if use_previous: record_pair, _ = examples_buffer.pop(0) use_previous = False else: try: if not uncertain_pairs: uncertain_pairs = deduper.uncertain_pairs() record_pair = uncertain_pairs.pop() except IndexError: break n_match = (len(deduper.training_pairs['match']) + sum(label == 'match' for _, label in examples_buffer)) n_distinct = (len(deduper.training_pairs['distinct']) + sum(label == 'distinct' for _, label in examples_buffer)) for pair in record_pair: for field in fields: line = "%s : %s" % (field, pair[field]) print(line, file=sys.stderr) print(file=sys.stderr) print("{0}/10 positive, {1}/10 negative".format(n_match, n_distinct), file=sys.stderr) print('Do these records refer to the same thing?', file=sys.stderr) valid_response = False user_input = '' while not valid_response: if examples_buffer: prompt = '(y)es / (n)o / (u)nsure / (f)inished / (p)revious' valid_responses = {'y', 'n', 'u', 'f', 'p'} else: prompt = '(y)es / (n)o / (u)nsure / (f)inished' valid_responses = {'y', 'n', 'u', 'f'} print(prompt, file=sys.stderr) user_input = input() if user_input in valid_responses: valid_response = True if user_input == 'y': examples_buffer.insert(0, (record_pair, 'match')) elif user_input == 'n': examples_buffer.insert(0, (record_pair, 'distinct')) elif user_input == 'u': examples_buffer.insert(0, (record_pair, 'uncertain')) elif user_input == 'f': print('Finished labeling', file=sys.stderr) finished = True elif user_input == 'p': use_previous = True uncertain_pairs.append(record_pair) if len(examples_buffer) > buffer_len: record_pair, label = examples_buffer.pop() if label in {'distinct', 'match'}: examples: TrainingData examples = {'distinct': [], 'match': []} examples[label].append(record_pair) # type: ignore deduper.mark_pairs(examples) for record_pair, label in examples_buffer: if label in ['distinct', 'match']: exmples: TrainingData examples = {'distinct': [], 'match': []} examples[label].append(record_pair) # type: ignore deduper.mark_pairs(examples) def training_data_link(data_1: Data, data_2: Data, common_key: str, training_size: int = 50000) -> TrainingData: # pragma: nocover ''' Construct training data for consumption by the func:`mark_pairs` method from already linked datasets. Args: data_1: Dictionary of records from first dataset, where the keys are record_ids and the values are dictionaries with the keys being field names data_2: Dictionary of records from second dataset, same form as data_1 common_key: The name of the record field that uniquely identifies a match training_size: the rough limit of the number of training examples, defaults to 50000 .. note:: Every match must be identified by the sharing of a common key. This function assumes that if two records do not share a common key then they are distinct records. ''' identified_records: Dict[str, Tuple[List[RecordID], List[RecordID]]] identified_records = collections.defaultdict(lambda: ([], [])) matched_pairs: Set[Tuple[RecordID, RecordID]] = set() distinct_pairs: Set[Tuple[RecordID, RecordID]] = set() for record_id, record in data_1.items(): identified_records[record[common_key]][0].append(record_id) for record_id, record in data_2.items(): identified_records[record[common_key]][1].append(record_id) for keys_1, keys_2 in identified_records.values(): if keys_1 and keys_2: matched_pairs.update(itertools.product(keys_1, keys_2)) keys_1 = list(data_1.keys()) keys_2 = list(data_2.keys()) random_pairs = [(keys_1[i], keys_2[j]) for i, j in randomPairsMatch(len(data_1), len(data_2), training_size)] distinct_pairs = { pair for pair in random_pairs if pair not in matched_pairs} matched_records = [(data_1[key_1], data_2[key_2]) for key_1, key_2 in matched_pairs] distinct_records = [(data_1[key_1], data_2[key_2]) for key_1, key_2 in distinct_pairs] training_pairs: TrainingData training_pairs = {'match': matched_records, 'distinct': distinct_records} return training_pairs def training_data_dedupe(data: Data, common_key: str, training_size: int = 50000) -> TrainingData: # pragma: nocover ''' Construct training data for consumption by the func:`mark_pairs` method from an already deduplicated dataset. Args: data: Dictionary of records where the keys are record_ids and the values are dictionaries with the keys being field names common_key: The name of the record field that uniquely identifies a match training_size: the rough limit of the number of training examples, defaults to 50000 .. note:: Every match must be identified by the sharing of a common key. This function assumes that if two records do not share a common key then they are distinct records. ''' identified_records: Dict[str, List[RecordID]] identified_records = collections.defaultdict(list) matched_pairs: Set[Tuple[RecordID, RecordID]] = set() distinct_pairs: Set[Tuple[RecordID, RecordID]] = set() unique_record_ids: Set[RecordID] = set() # a list of record_ids associated with each common_key for record_id, record in data.items(): unique_record_ids.add(record_id) identified_records[record[common_key]].append(record_id) # all combinations of matched_pairs from each common_key group for record_ids in identified_records.values(): if len(record_ids) > 1: matched_pairs.update(itertools.combinations(sorted(record_ids), 2)) # type: ignore # calculate indices using dedupe.core.randomPairs to avoid # the memory cost of enumerating all possible pairs unique_record_ids_l = list(unique_record_ids) pair_indices = randomPairs(len(unique_record_ids), training_size) distinct_pairs = set() for i, j in pair_indices: distinct_pairs.add((unique_record_ids_l[i], unique_record_ids_l[j])) distinct_pairs -= matched_pairs matched_records = [(data[key_1], data[key_2]) for key_1, key_2 in matched_pairs] distinct_records = [(data[key_1], data[key_2]) for key_1, key_2 in distinct_pairs] training_pairs: TrainingData training_pairs = {'match': matched_records, 'distinct': distinct_records} return training_pairs def canonicalize(record_cluster: List[RecordDict]) -> RecordDict: # pragma: nocover """ Constructs a canonical representation of a duplicate cluster by finding canonical values for each field Args: record_cluster: A list of records within a duplicate cluster, where the records are dictionaries with field names as keys and field values as values """ return getCanonicalRep(record_cluster)
import time from lemur.plugins.lemur_aws.sts import sts_client @sts_client("route53") def wait_for_dns_change(change_id, client=None): _, change_id = change_id while True: response = client.get_change(Id=change_id) if response["ChangeInfo"]["Status"] == "INSYNC": return time.sleep(5) @sts_client("route53") def find_zone_id(domain, client=None): paginator = client.get_paginator("list_hosted_zones") zones = [] for page in paginator.paginate(): for zone in page["HostedZones"]: if domain.endswith(zone["Name"]) or (domain + ".").endswith(zone["Name"]): if not zone["Config"]["PrivateZone"]: zones.append((zone["Name"], zone["Id"])) if not zones: raise ValueError("Unable to find a Route53 hosted zone for {}".format(domain)) return zones[0][1] @sts_client("route53") def get_zones(client=None): paginator = client.get_paginator("list_hosted_zones") zones = [] for page in paginator.paginate(): for zone in page["HostedZones"]: if not zone["Config"]["PrivateZone"]: zones.append( zone["Name"][:-1] ) # We need [:-1] to strip out the trailing dot. return zones @sts_client("route53") def change_txt_record(action, zone_id, domain, value, client=None): current_txt_records = [] try: current_records = client.list_resource_record_sets( HostedZoneId=zone_id, StartRecordName=domain, StartRecordType="TXT", MaxItems="1", )["ResourceRecordSets"] for record in current_records: if record.get("Type") == "TXT": current_txt_records.extend(record.get("ResourceRecords", [])) except Exception as e: # Current Resource Record does not exist if "NoSuchHostedZone" not in str(type(e)): raise # For some reason TXT records need to be # manually quoted. seen = False for record in current_txt_records: for k, v in record.items(): if '"{}"'.format(value) == v: seen = True if not seen: current_txt_records.append({"Value": '"{}"'.format(value)}) if action == "DELETE" and len(current_txt_records) > 1: # If we want to delete one record out of many, we'll update the record to not include the deleted value instead. # This allows us to support concurrent issuance. current_txt_records = [ record for record in current_txt_records if not (record.get("Value") == '"{}"'.format(value)) ] action = "UPSERT" response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch={ "Changes": [ { "Action": action, "ResourceRecordSet": { "Name": domain, "Type": "TXT", "TTL": 300, "ResourceRecords": current_txt_records, }, } ] }, ) return response["ChangeInfo"]["Id"] def create_txt_record(host, value, account_number): zone_id = find_zone_id(host, account_number=account_number) change_id = change_txt_record( "UPSERT", zone_id, host, value, account_number=account_number ) return zone_id, change_id def delete_txt_record(change_ids, account_number, host, value): for change_id in change_ids: zone_id, _ = change_id try: change_txt_record( "DELETE", zone_id, host, value, account_number=account_number ) except Exception as e: if "but it was not found" in e.response.get("Error", {}).get("Message"): # We tried to delete a record that doesn't exist. We'll ignore this error. pass else: raise
from django import forms from django.contrib.auth import get_user_model from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import UserChangeForm, UserCreationForm from django.contrib import admin from django.utils.encoding import force_str from django.utils.timezone import localtime from django.utils.translation import pgettext_lazy, gettext_lazy as _ from shop.models.customer import CustomerModel, CustomerState class CustomerInlineAdminBase(admin.StackedInline): model = CustomerModel fieldsets = [ (None, {'fields': ['get_number']}), ] readonly_fields = ['get_number'] def get_extra(self, request, obj=None, **kwargs): return 0 if obj is None else 1 def has_add_permission(self, request, obj=None): return False def has_delete_permission(self, request, obj=None): return False def get_number(self, customer): return customer.get_number() get_number.short_description = pgettext_lazy('customer', "Number") class CustomerCreationForm(UserCreationForm): class Meta(UserChangeForm.Meta): model = get_user_model() class CustomerChangeForm(UserChangeForm): email = forms.EmailField(required=False) class Meta(UserChangeForm.Meta): model = get_user_model() def __init__(self, *args, **kwargs): initial = kwargs.get('initial', {}) instance = kwargs.get('instance') initial['email'] = instance.email or '' super().__init__(initial=initial, *args, **kwargs) def clean_email(self): return self.cleaned_data.get('email').strip() class CustomerListFilter(admin.SimpleListFilter): title = _("Customer State") parameter_name = 'custate' def lookups(self, request, model_admin): return CustomerState.choices def queryset(self, request, queryset): try: queryset = queryset.filter(customer__recognized=CustomerState(int(self.value()))) finally: return queryset class CustomerAdminBase(UserAdmin): """ This ModelAdmin class must be extended and registered inside the merchant's implementation. Since the customer model can differ, the field `inlines` must be specified, for instance ``` @admin.register(CustomerProxy) class CustomerAdmin(CustomerAdminBase): inlines = [CustomerInlineAdminBase] ``` """ form = CustomerChangeForm add_form = CustomerCreationForm list_display = ['get_username', 'last_name', 'first_name', 'recognized', 'last_access', 'is_unexpired'] segmentation_list_display = ['get_username'] list_filter = list(UserAdmin.list_filter) + [CustomerListFilter] readonly_fields = ['last_login', 'date_joined', 'last_access', 'recognized'] ordering = ['id'] class Media: js = ['shop/js/admin/customer.js'] def get_fieldsets(self, request, obj=None): fieldsets = list(super().get_fieldsets(request, obj=obj)) if obj: fieldsets[0][1]['fields'] = ['username', 'recognized', 'password'] fieldsets[3][1]['fields'] = ['date_joined', 'last_login', 'last_access'] if not obj.has_usable_password(): fieldsets.pop(2) return fieldsets def get_username(self, user): return str(user) get_username.short_description = _("Username") get_username.admin_order_field = 'email' def recognized(self, user): if user.is_superuser: user_state = _("Administrator") elif user.is_staff: user_state = _("Staff") else: user_state = _("User") if hasattr(user, 'customer'): customer_state = force_str(user.customer.recognized) if user.is_staff or user.is_superuser: return '{}/{}'.format(customer_state, user_state) return customer_state return user_state recognized.short_description = _("State") def last_access(self, user): if hasattr(user, 'customer'): return localtime(user.customer.last_access).strftime("%d %B %Y %H:%M:%S") return _("No data") last_access.short_description = _("Last accessed") last_access.admin_order_field = 'customer__last_access' def is_unexpired(self, user): if hasattr(user, 'customer'): return not user.customer.is_expired return True is_unexpired.short_description = _("Unexpired") is_unexpired.boolean = True def save_related(self, request, form, formsets, change): if hasattr(form.instance, 'customer') and (form.instance.is_staff or form.instance.is_superuser): form.instance.customer.recognized = CustomerState.REGISTERED super().save_related(request, form, formsets, change) class CustomerProxy(get_user_model()): """ With this neat proxy model, we are able to place the Customer Model Admin into the section "MyAwesomeShop" instead of section "email_auth". """ class Meta: proxy = True verbose_name = _("Customer") verbose_name_plural = _("Customers") try: admin.site.unregister(get_user_model()) except admin.sites.NotRegistered: pass
import logging import pytest QtWebEngineWidgets = pytest.importorskip("PyQt5.QtWebEngineWidgets") QWebEnginePage = QtWebEngineWidgets.QWebEnginePage QWebEngineScriptCollection = QtWebEngineWidgets.QWebEngineScriptCollection QWebEngineScript = QtWebEngineWidgets.QWebEngineScript from qutebrowser.browser import greasemonkey from qutebrowser.utils import usertypes webenginetab = pytest.importorskip( "qutebrowser.browser.webengine.webenginetab") pytestmark = pytest.mark.usefixtures('greasemonkey_manager') class TestWebengineScripts: """Test the _WebEngineScripts utility class.""" @pytest.fixture def webengine_scripts(self, webengine_tab): return webengine_tab._scripts def test_greasemonkey_undefined_world(self, webengine_scripts, caplog): """Make sure scripts with non-existent worlds are rejected.""" scripts = [ greasemonkey.GreasemonkeyScript( [('qute-js-world', 'Mars'), ('name', 'test')], None) ] with caplog.at_level(logging.ERROR, 'greasemonkey'): webengine_scripts._inject_greasemonkey_scripts(scripts) assert len(caplog.records) == 1 msg = caplog.messages[0] assert "has invalid value for '@qute-js-world': Mars" in msg collection = webengine_scripts._widget.page().scripts().toList() assert not any(script.name().startswith('GM-') for script in collection) @pytest.mark.parametrize("worldid", [-1, 257]) def test_greasemonkey_out_of_range_world(self, worldid, webengine_scripts, caplog): """Make sure scripts with out-of-range worlds are rejected.""" scripts = [ greasemonkey.GreasemonkeyScript( [('qute-js-world', worldid), ('name', 'test')], None) ] with caplog.at_level(logging.ERROR, 'greasemonkey'): webengine_scripts._inject_greasemonkey_scripts(scripts) assert len(caplog.records) == 1 msg = caplog.messages[0] assert "has invalid value for '@qute-js-world': " in msg assert "should be between 0 and" in msg collection = webengine_scripts._widget.page().scripts().toList() assert not any(script.name().startswith('GM-') for script in collection) @pytest.mark.parametrize("worldid", [0, 10]) def test_greasemonkey_good_worlds_are_passed(self, worldid, webengine_scripts, caplog): """Make sure scripts with valid worlds have it set.""" scripts = [ greasemonkey.GreasemonkeyScript( [('name', 'foo'), ('qute-js-world', worldid)], None ) ] with caplog.at_level(logging.ERROR, 'greasemonkey'): webengine_scripts._inject_greasemonkey_scripts(scripts) collection = webengine_scripts._widget.page().scripts() assert collection.toList()[-1].worldId() == worldid def test_greasemonkey_document_end_workaround(self, monkeypatch, webengine_scripts): """Make sure document-end is forced when needed.""" monkeypatch.setattr(greasemonkey.objects, 'backend', usertypes.Backend.QtWebEngine) scripts = [ greasemonkey.GreasemonkeyScript([ ('name', 'Iridium'), ('namespace', 'https://github.com/ParticleCore'), ('run-at', 'document-start'), ], None) ] webengine_scripts._inject_greasemonkey_scripts(scripts) collection = webengine_scripts._widget.page().scripts() script = collection.toList()[-1] assert script.injectionPoint() == QWebEngineScript.DocumentReady def test_notification_permission_workaround(): """Make sure the value for QWebEnginePage::Notifications is correct.""" try: notifications = QWebEnginePage.Notifications except AttributeError: pytest.skip("No Notifications member") permissions = webenginetab._WebEnginePermissions assert permissions._options[notifications] == 'content.notifications' assert permissions._messages[notifications] == 'show notifications'
from elkm1_lib.const import ( SettingFormat, ZoneLogicalStatus, ZonePhysicalStatus, ZoneType, ) from elkm1_lib.util import pretty_const, username import voluptuous as vol from homeassistant.const import VOLT from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_platform from . import ElkAttachedEntity, create_elk_entities from .const import ATTR_VALUE, DOMAIN, ELK_USER_CODE_SERVICE_SCHEMA SERVICE_SENSOR_COUNTER_REFRESH = "sensor_counter_refresh" SERVICE_SENSOR_COUNTER_SET = "sensor_counter_set" SERVICE_SENSOR_ZONE_BYPASS = "sensor_zone_bypass" SERVICE_SENSOR_ZONE_TRIGGER = "sensor_zone_trigger" UNDEFINED_TEMPATURE = -40 ELK_SET_COUNTER_SERVICE_SCHEMA = { vol.Required(ATTR_VALUE): vol.All(vol.Coerce(int), vol.Range(0, 65535)) } async def async_setup_entry(hass, config_entry, async_add_entities): """Create the Elk-M1 sensor platform.""" elk_data = hass.data[DOMAIN][config_entry.entry_id] entities = [] elk = elk_data["elk"] create_elk_entities(elk_data, elk.counters, "counter", ElkCounter, entities) create_elk_entities(elk_data, elk.keypads, "keypad", ElkKeypad, entities) create_elk_entities(elk_data, [elk.panel], "panel", ElkPanel, entities) create_elk_entities(elk_data, elk.settings, "setting", ElkSetting, entities) create_elk_entities(elk_data, elk.zones, "zone", ElkZone, entities) async_add_entities(entities, True) platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_SENSOR_COUNTER_REFRESH, {}, "async_counter_refresh", ) platform.async_register_entity_service( SERVICE_SENSOR_COUNTER_SET, ELK_SET_COUNTER_SERVICE_SCHEMA, "async_counter_set", ) platform.async_register_entity_service( SERVICE_SENSOR_ZONE_BYPASS, ELK_USER_CODE_SERVICE_SCHEMA, "async_zone_bypass", ) platform.async_register_entity_service( SERVICE_SENSOR_ZONE_TRIGGER, {}, "async_zone_trigger", ) def temperature_to_state(temperature, undefined_temperature): """Convert temperature to a state.""" return temperature if temperature > undefined_temperature else None class ElkSensor(ElkAttachedEntity): """Base representation of Elk-M1 sensor.""" def __init__(self, element, elk, elk_data): """Initialize the base of all Elk sensors.""" super().__init__(element, elk, elk_data) self._state = None @property def state(self): """Return the state of the sensor.""" return self._state async def async_counter_refresh(self): """Refresh the value of a counter from the panel.""" if not isinstance(self, ElkCounter): raise HomeAssistantError("supported only on ElkM1 Counter sensors") self._element.get() async def async_counter_set(self, value=None): """Set the value of a counter on the panel.""" if not isinstance(self, ElkCounter): raise HomeAssistantError("supported only on ElkM1 Counter sensors") self._element.set(value) async def async_zone_bypass(self, code=None): """Bypass zone.""" if not isinstance(self, ElkZone): raise HomeAssistantError("supported only on ElkM1 Zone sensors") self._element.bypass(code) async def async_zone_trigger(self): """Trigger zone.""" if not isinstance(self, ElkZone): raise HomeAssistantError("supported only on ElkM1 Zone sensors") self._element.trigger() class ElkCounter(ElkSensor): """Representation of an Elk-M1 Counter.""" @property def icon(self): """Icon to use in the frontend.""" return "mdi:numeric" def _element_changed(self, element, changeset): self._state = self._element.value class ElkKeypad(ElkSensor): """Representation of an Elk-M1 Keypad.""" @property def temperature_unit(self): """Return the temperature unit.""" return self._temperature_unit @property def unit_of_measurement(self): """Return the unit of measurement.""" return self._temperature_unit @property def icon(self): """Icon to use in the frontend.""" return "mdi:thermometer-lines" @property def device_state_attributes(self): """Attributes of the sensor.""" attrs = self.initial_attrs() attrs["area"] = self._element.area + 1 attrs["temperature"] = self._state attrs["last_user_time"] = self._element.last_user_time.isoformat() attrs["last_user"] = self._element.last_user + 1 attrs["code"] = self._element.code attrs["last_user_name"] = username(self._elk, self._element.last_user) attrs["last_keypress"] = self._element.last_keypress return attrs def _element_changed(self, element, changeset): self._state = temperature_to_state( self._element.temperature, UNDEFINED_TEMPATURE ) class ElkPanel(ElkSensor): """Representation of an Elk-M1 Panel.""" @property def icon(self): """Icon to use in the frontend.""" return "mdi:home" @property def device_state_attributes(self): """Attributes of the sensor.""" attrs = self.initial_attrs() attrs["system_trouble_status"] = self._element.system_trouble_status return attrs def _element_changed(self, element, changeset): if self._elk.is_connected(): self._state = ( "Paused" if self._element.remote_programming_status else "Connected" ) else: self._state = "Disconnected" class ElkSetting(ElkSensor): """Representation of an Elk-M1 Setting.""" @property def icon(self): """Icon to use in the frontend.""" return "mdi:numeric" def _element_changed(self, element, changeset): self._state = self._element.value @property def device_state_attributes(self): """Attributes of the sensor.""" attrs = self.initial_attrs() attrs["value_format"] = SettingFormat(self._element.value_format).name.lower() return attrs class ElkZone(ElkSensor): """Representation of an Elk-M1 Zone.""" @property def icon(self): """Icon to use in the frontend.""" zone_icons = { ZoneType.FIRE_ALARM.value: "fire", ZoneType.FIRE_VERIFIED.value: "fire", ZoneType.FIRE_SUPERVISORY.value: "fire", ZoneType.KEYFOB.value: "key", ZoneType.NON_ALARM.value: "alarm-off", ZoneType.MEDICAL_ALARM.value: "medical-bag", ZoneType.POLICE_ALARM.value: "alarm-light", ZoneType.POLICE_NO_INDICATION.value: "alarm-light", ZoneType.KEY_MOMENTARY_ARM_DISARM.value: "power", ZoneType.KEY_MOMENTARY_ARM_AWAY.value: "power", ZoneType.KEY_MOMENTARY_ARM_STAY.value: "power", ZoneType.KEY_MOMENTARY_DISARM.value: "power", ZoneType.KEY_ON_OFF.value: "toggle-switch", ZoneType.MUTE_AUDIBLES.value: "volume-mute", ZoneType.POWER_SUPERVISORY.value: "power-plug", ZoneType.TEMPERATURE.value: "thermometer-lines", ZoneType.ANALOG_ZONE.value: "speedometer", ZoneType.PHONE_KEY.value: "phone-classic", ZoneType.INTERCOM_KEY.value: "deskphone", } return f"mdi:{zone_icons.get(self._element.definition, 'alarm-bell')}" @property def device_state_attributes(self): """Attributes of the sensor.""" attrs = self.initial_attrs() attrs["physical_status"] = ZonePhysicalStatus( self._element.physical_status ).name.lower() attrs["logical_status"] = ZoneLogicalStatus( self._element.logical_status ).name.lower() attrs["definition"] = ZoneType(self._element.definition).name.lower() attrs["area"] = self._element.area + 1 attrs["triggered_alarm"] = self._element.triggered_alarm return attrs @property def temperature_unit(self): """Return the temperature unit.""" if self._element.definition == ZoneType.TEMPERATURE.value: return self._temperature_unit return None @property def unit_of_measurement(self): """Return the unit of measurement.""" if self._element.definition == ZoneType.TEMPERATURE.value: return self._temperature_unit if self._element.definition == ZoneType.ANALOG_ZONE.value: return VOLT return None def _element_changed(self, element, changeset): if self._element.definition == ZoneType.TEMPERATURE.value: self._state = temperature_to_state( self._element.temperature, UNDEFINED_TEMPATURE ) elif self._element.definition == ZoneType.ANALOG_ZONE.value: self._state = self._element.voltage else: self._state = pretty_const( ZoneLogicalStatus(self._element.logical_status).name )
import unittest import dedupe import dedupe.sampling import dedupe.predicates import dedupe.api from collections import deque data_dict = {'1': {'name': 'Bob', 'age': '51'}, '2': {'name': 'Linda', 'age': '50'}, '3': {'name': 'Gene', 'age': '12'}, '4': {'name': 'Tina', 'age': '15'}, '5': {'name': 'Bob B.', 'age': '51'}, '6': {'name': 'bob belcher', 'age': '51'}, '7': {'name': 'linda ', 'age': '50'}} class DedupeSampling(unittest.TestCase): def setUp(self): field_definition = [{'field': 'name', 'type': 'String'}, {'field': 'age', 'type': 'String'}] self.deduper = dedupe.Dedupe(field_definition) def test_even_split(self): assert sum(dedupe.sampling.evenSplits(10, 10)) == 10 assert sum(dedupe.sampling.evenSplits(10, 1)) == 10 assert sum(dedupe.sampling.evenSplits(10, 4)) == 10 def test_sample_predicate(self): items = data_dict.items() pred = dedupe.predicates.SimplePredicate(dedupe.predicates.sameThreeCharStartPredicate, 'name') assert dedupe.sampling.dedupeSamplePredicate(10, pred, items) == [('1', '5')] def test_sample_predicates(self): items = deque(data_dict.items()) pred = dedupe.predicates.SimplePredicate(dedupe.predicates.sameThreeCharStartPredicate, 'name') assert list(dedupe.sampling.dedupeSamplePredicates(10, [pred], items)) == [[('1', '5')]] def test_blockedSample(self): pred = dedupe.predicates.SimplePredicate(dedupe.predicates.sameThreeCharStartPredicate, 'name') assert len(dedupe.sampling.dedupeBlockedSample(10, [pred], deque(data_dict.items()))) == 1
from datetime import timedelta import logging from pysabnzbd import SabnzbdApi, SabnzbdApiException import voluptuous as vol from homeassistant.components.discovery import SERVICE_SABNZBD from homeassistant.const import ( CONF_API_KEY, CONF_HOST, CONF_NAME, CONF_PATH, CONF_PORT, CONF_SENSORS, CONF_SSL, DATA_GIGABYTES, DATA_MEGABYTES, DATA_RATE_MEGABYTES_PER_SECOND, ) from homeassistant.core import callback from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.json import load_json, save_json _LOGGER = logging.getLogger(__name__) DOMAIN = "sabnzbd" DATA_SABNZBD = "sabznbd" _CONFIGURING = {} ATTR_SPEED = "speed" BASE_URL_FORMAT = "{}://{}:{}/" CONFIG_FILE = "sabnzbd.conf" DEFAULT_HOST = "localhost" DEFAULT_NAME = "SABnzbd" DEFAULT_PORT = 8080 DEFAULT_SPEED_LIMIT = "100" DEFAULT_SSL = False UPDATE_INTERVAL = timedelta(seconds=30) SERVICE_PAUSE = "pause" SERVICE_RESUME = "resume" SERVICE_SET_SPEED = "set_speed" SIGNAL_SABNZBD_UPDATED = "sabnzbd_updated" SENSOR_TYPES = { "current_status": ["Status", None, "status"], "speed": ["Speed", DATA_RATE_MEGABYTES_PER_SECOND, "kbpersec"], "queue_size": ["Queue", DATA_MEGABYTES, "mb"], "queue_remaining": ["Left", DATA_MEGABYTES, "mbleft"], "disk_size": ["Disk", DATA_GIGABYTES, "diskspacetotal1"], "disk_free": ["Disk Free", DATA_GIGABYTES, "diskspace1"], "queue_count": ["Queue Count", None, "noofslots_total"], "day_size": ["Daily Total", DATA_GIGABYTES, "day_size"], "week_size": ["Weekly Total", DATA_GIGABYTES, "week_size"], "month_size": ["Monthly Total", DATA_GIGABYTES, "month_size"], "total_size": ["Total", DATA_GIGABYTES, "total_size"], } SPEED_LIMIT_SCHEMA = vol.Schema( {vol.Optional(ATTR_SPEED, default=DEFAULT_SPEED_LIMIT): cv.string} ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_PATH): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_SENSORS): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_check_sabnzbd(sab_api): """Check if we can reach SABnzbd.""" try: await sab_api.check_available() return True except SabnzbdApiException: _LOGGER.error("Connection to SABnzbd API failed") return False async def async_configure_sabnzbd( hass, config, use_ssl, name=DEFAULT_NAME, api_key=None ): """Try to configure Sabnzbd and request api key if configuration fails.""" host = config[CONF_HOST] port = config[CONF_PORT] web_root = config.get(CONF_PATH) uri_scheme = "https" if use_ssl else "http" base_url = BASE_URL_FORMAT.format(uri_scheme, host, port) if api_key is None: conf = await hass.async_add_executor_job( load_json, hass.config.path(CONFIG_FILE) ) api_key = conf.get(base_url, {}).get(CONF_API_KEY, "") sab_api = SabnzbdApi( base_url, api_key, web_root=web_root, session=async_get_clientsession(hass) ) if await async_check_sabnzbd(sab_api): async_setup_sabnzbd(hass, sab_api, config, name) else: async_request_configuration(hass, config, base_url, web_root) async def async_setup(hass, config): """Set up the SABnzbd component.""" async def sabnzbd_discovered(service, info): """Handle service discovery.""" ssl = info.get("properties", {}).get("https", "0") == "1" await async_configure_sabnzbd(hass, info, ssl) discovery.async_listen(hass, SERVICE_SABNZBD, sabnzbd_discovered) conf = config.get(DOMAIN) if conf is not None: use_ssl = conf[CONF_SSL] name = conf.get(CONF_NAME) api_key = conf.get(CONF_API_KEY) await async_configure_sabnzbd(hass, conf, use_ssl, name, api_key) return True @callback def async_setup_sabnzbd(hass, sab_api, config, name): """Set up SABnzbd sensors and services.""" sab_api_data = SabnzbdApiData(sab_api, name, config.get(CONF_SENSORS, {})) if config.get(CONF_SENSORS): hass.data[DATA_SABNZBD] = sab_api_data hass.async_create_task( discovery.async_load_platform(hass, "sensor", DOMAIN, {}, config) ) async def async_service_handler(service): """Handle service calls.""" if service.service == SERVICE_PAUSE: await sab_api_data.async_pause_queue() elif service.service == SERVICE_RESUME: await sab_api_data.async_resume_queue() elif service.service == SERVICE_SET_SPEED: speed = service.data.get(ATTR_SPEED) await sab_api_data.async_set_queue_speed(speed) hass.services.async_register( DOMAIN, SERVICE_PAUSE, async_service_handler, schema=vol.Schema({}) ) hass.services.async_register( DOMAIN, SERVICE_RESUME, async_service_handler, schema=vol.Schema({}) ) hass.services.async_register( DOMAIN, SERVICE_SET_SPEED, async_service_handler, schema=SPEED_LIMIT_SCHEMA ) async def async_update_sabnzbd(now): """Refresh SABnzbd queue data.""" try: await sab_api.refresh_data() async_dispatcher_send(hass, SIGNAL_SABNZBD_UPDATED, None) except SabnzbdApiException as err: _LOGGER.error(err) async_track_time_interval(hass, async_update_sabnzbd, UPDATE_INTERVAL) @callback def async_request_configuration(hass, config, host, web_root): """Request configuration steps from the user.""" configurator = hass.components.configurator # We got an error if this method is called while we are configuring if host in _CONFIGURING: configurator.async_notify_errors( _CONFIGURING[host], "Failed to register, please try again." ) return async def async_configuration_callback(data): """Handle configuration changes.""" api_key = data.get(CONF_API_KEY) sab_api = SabnzbdApi( host, api_key, web_root=web_root, session=async_get_clientsession(hass) ) if not await async_check_sabnzbd(sab_api): return def success(): """Signal successful setup.""" conf = load_json(hass.config.path(CONFIG_FILE)) conf[host] = {CONF_API_KEY: api_key} save_json(hass.config.path(CONFIG_FILE), conf) req_config = _CONFIGURING.pop(host) configurator.request_done(req_config) hass.async_add_job(success) async_setup_sabnzbd(hass, sab_api, config, config.get(CONF_NAME, DEFAULT_NAME)) _CONFIGURING[host] = configurator.async_request_config( DEFAULT_NAME, async_configuration_callback, description="Enter the API Key", submit_caption="Confirm", fields=[{"id": CONF_API_KEY, "name": "API Key", "type": ""}], ) class SabnzbdApiData: """Class for storing/refreshing sabnzbd api queue data.""" def __init__(self, sab_api, name, sensors): """Initialize component.""" self.sab_api = sab_api self.name = name self.sensors = sensors async def async_pause_queue(self): """Pause Sabnzbd queue.""" try: return await self.sab_api.pause_queue() except SabnzbdApiException as err: _LOGGER.error(err) return False async def async_resume_queue(self): """Resume Sabnzbd queue.""" try: return await self.sab_api.resume_queue() except SabnzbdApiException as err: _LOGGER.error(err) return False async def async_set_queue_speed(self, limit): """Set speed limit for the Sabnzbd queue.""" try: return await self.sab_api.set_speed_limit(limit) except SabnzbdApiException as err: _LOGGER.error(err) return False def get_queue_field(self, field): """Return the value for the given field from the Sabnzbd queue.""" return self.sab_api.queue.get(field)
from test import CollectorTestCase from test import get_collector_config from test import unittest from mock import Mock from mock import call from mock import patch from diamond.collector import Collector from smart import SmartCollector ########################################################################## class TestSmartCollector(CollectorTestCase): def setUp(self): config = get_collector_config('SmartCollector', { 'interval': 10, 'bin': 'true', }) self.collector = SmartCollector(config, None) def test_import(self): self.assertTrue(SmartCollector) @patch('os.access', Mock(return_value=True)) @patch.object(Collector, 'publish') def test_should_work_with_real_data_osx_missing(self, publish_mock): patch_listdir = patch('os.listdir', Mock(return_value=['disk0'])) patch_communicate = patch( 'subprocess.Popen.communicate', Mock(return_value=( self.getFixture('osx_missing').getvalue(), ''))) patch_listdir.start() patch_communicate.start() self.collector.collect() patch_listdir.stop() patch_communicate.stop() self.assertPublishedMany(publish_mock, {}) @patch('os.access', Mock(return_value=True)) @patch.object(Collector, 'publish') def test_should_work_with_real_data_osx_ssd(self, publish_mock): patch_listdir = patch('os.listdir', Mock(return_value=['disk0'])) patch_communicate = patch( 'subprocess.Popen.communicate', Mock(return_value=( self.getFixture('osx_ssd').getvalue(), ''))) patch_listdir.start() patch_communicate.start() self.collector.collect() patch_listdir.stop() patch_communicate.stop() self.assertPublishedMany(publish_mock, { 'disk0.172': 0, 'disk0.Head_Amplitude': 100, 'disk0.Reallocated_Sector_Ct': 0, 'disk0.Temperature_Celsius': 128, 'disk0.174': 3, 'disk0.Reported_Uncorrect': 0, 'disk0.Raw_Read_Error_Rate': 5849487, 'disk0.Power_On_Hours': 199389561752279, 'disk0.Total_LBAs_Read': 17985, 'disk0.Power_Cycle_Count': 381, 'disk0.Hardware_ECC_Recovered': 5849487, 'disk0.171': 0, 'disk0.Soft_Read_Error_Rate': 5849487, 'disk0.234': 2447, 'disk0.Program_Fail_Cnt_Total': 0, 'disk0.Media_Wearout_Indicator': 4881, 'disk0.Erase_Fail_Count_Total': 0, 'disk0.Wear_Leveling_Count': 2, 'disk0.Reallocated_Event_Count': 0, 'disk0.Total_LBAs_Written': 2447, 'disk0.Soft_ECC_Correction': 5849487, }) @patch('os.access', Mock(return_value=True)) @patch.object(Collector, 'publish') def test_should_work_with_real_data_centos55_hdd(self, publish_mock): patch_listdir = patch('os.listdir', Mock(return_value=['sda'])) patch_communicate = patch( 'subprocess.Popen.communicate', Mock(return_value=( self.getFixture('centos5.5_hdd').getvalue(), ''))) patch_listdir.start() patch_communicate.start() self.collector.collect() patch_listdir.stop() patch_communicate.stop() metrics = { 'sda.Temperature_Celsius': 28, 'sda.Power_On_Hours': 6827, 'sda.Power_Cycle_Count': 7, 'sda.Power-Off_Retract_Count': 5, 'sda.UDMA_CRC_Error_Count': 0, 'sda.Load_Cycle_Count': 2, 'sda.Calibration_Retry_Count': 0, 'sda.Spin_Up_Time': 3991, 'sda.Spin_Retry_Count': 0, 'sda.Multi_Zone_Error_Rate': 0, 'sda.Raw_Read_Error_Rate': 0, 'sda.Reallocated_Event_Count': 0, 'sda.Start_Stop_Count': 8, 'sda.Offline_Uncorrectable': 0, 'sda.Current_Pending_Sector': 0, 'sda.Reallocated_Sector_Ct': 0, 'sda.Seek_Error_Rate': 0, 'sda.Thermal_Throttle_0': 0, 'sda.Thermal_Throttle_1': 0, } self.setDocExample(collector=self.collector.__class__.__name__, metrics=metrics, defaultpath=self.collector.config['path']) self.assertPublishedMany(publish_mock, metrics) @patch('os.access', Mock(return_value=True)) @patch.object(Collector, 'publish') def test_should_work_with_real_data_debian_invalid_checksum_warning( self, publish_mock): fixture_data = self.getFixture( 'debian_invalid_checksum_warning').getvalue() patch_listdir = patch('os.listdir', Mock(return_value=['sda'])) patch_communicate = patch('subprocess.Popen.communicate', Mock(return_value=(fixture_data, ''))) patch_listdir.start() patch_communicate.start() self.collector.collect() patch_listdir.stop() patch_communicate.stop() metrics = { 'sda.Raw_Read_Error_Rate': 0, 'sda.Spin_Up_Time': 4225, 'sda.Start_Stop_Count': 13, 'sda.Reallocated_Sector_Ct': 0, 'sda.Seek_Error_Rate': 0, 'sda.Power_On_Hours': 88, 'sda.Spin_Retry_Count': 0, 'sda.Calibration_Retry_Count': 0, 'sda.Power_Cycle_Count': 13, 'sda.Power-Off_Retract_Count': 7, 'sda.Load_Cycle_Count': 5, 'sda.Temperature_Celsius': 35, 'sda.Reallocated_Event_Count': 0, 'sda.Current_Pending_Sector': 0, 'sda.Offline_Uncorrectable': 0, 'sda.UDMA_CRC_Error_Count': 0, 'sda.Multi_Zone_Error_Rate': 0, } header_call = call('sda.ATTRIBUTE_NAME', 'RAW_VALUE') published_metric_header = header_call in publish_mock.mock_calls assert not published_metric_header, "published metric for header row" self.assertPublishedMany(publish_mock, metrics) def test_find_attr_start_line(self): def get_fixture_lines(fixture): return self.getFixture(fixture).getvalue().strip().splitlines() def assert_attrs_start_at(expected, fixture): lines = get_fixture_lines(fixture) self.assertEqual(expected, self.collector.find_attr_start_line(lines)) lines = get_fixture_lines('osx_missing') self.assertEqual(5, self.collector.find_attr_start_line(lines, 2, 4)) assert_attrs_start_at(7, 'osx_ssd') assert_attrs_start_at(7, 'centos5.5_hdd') assert_attrs_start_at(8, 'debian_invalid_checksum_warning') ########################################################################## if __name__ == "__main__": unittest.main()
import os import sys import mne def run(): """Run command.""" from mne.commands.utils import get_optparser parser = get_optparser(__file__) parser.add_option("-i", "--in", dest="raw_in", help="Input raw FIF file", metavar="FILE") parser.add_option("--tmin", dest="tmin", type="float", help="Time before event in seconds", default=-0.2) parser.add_option("--tmax", dest="tmax", type="float", help="Time after event in seconds", default=0.2) parser.add_option("-g", "--n-grad", dest="n_grad", type="int", help="Number of SSP vectors for gradiometers", default=2) parser.add_option("-m", "--n-mag", dest="n_mag", type="int", help="Number of SSP vectors for magnetometers", default=2) parser.add_option("-e", "--n-eeg", dest="n_eeg", type="int", help="Number of SSP vectors for EEG", default=2) parser.add_option("--l-freq", dest="l_freq", type="float", help="Filter low cut-off frequency in Hz", default=1) parser.add_option("--h-freq", dest="h_freq", type="float", help="Filter high cut-off frequency in Hz", default=35) parser.add_option("--eog-l-freq", dest="eog_l_freq", type="float", help="Filter low cut-off frequency in Hz used for " "EOG event detection", default=1) parser.add_option("--eog-h-freq", dest="eog_h_freq", type="float", help="Filter high cut-off frequency in Hz used for " "EOG event detection", default=10) parser.add_option("-p", "--preload", dest="preload", help="Temporary file used during computation (to " "save memory)", default=True) parser.add_option("-a", "--average", dest="average", action="store_true", help="Compute SSP after averaging", default=False) # XXX: change to default=True in 0.17 parser.add_option("--proj", dest="proj", help="Use SSP projections from a fif file.", default=None) parser.add_option("--filtersize", dest="filter_length", type="int", help="Number of taps to use for filtering", default=2048) parser.add_option("-j", "--n-jobs", dest="n_jobs", type="int", help="Number of jobs to run in parallel", default=1) parser.add_option("--rej-grad", dest="rej_grad", type="float", help="Gradiometers rejection parameter in fT/cm (peak " "to peak amplitude)", default=2000) parser.add_option("--rej-mag", dest="rej_mag", type="float", help="Magnetometers rejection parameter in fT (peak to " "peak amplitude)", default=3000) parser.add_option("--rej-eeg", dest="rej_eeg", type="float", help="EEG rejection parameter in µV (peak to peak " "amplitude)", default=50) parser.add_option("--rej-eog", dest="rej_eog", type="float", help="EOG rejection parameter in µV (peak to peak " "amplitude)", default=1e9) parser.add_option("--avg-ref", dest="avg_ref", action="store_true", help="Add EEG average reference proj", default=False) parser.add_option("--no-proj", dest="no_proj", action="store_true", help="Exclude the SSP projectors currently in the " "fiff file", default=False) parser.add_option("--bad", dest="bad_fname", help="Text file containing bad channels list " "(one per line)", default=None) parser.add_option("--event-id", dest="event_id", type="int", help="ID to use for events", default=998) parser.add_option("--event-raw", dest="raw_event_fname", help="raw file to use for event detection", default=None) parser.add_option("--tstart", dest="tstart", type="float", help="Start artifact detection after tstart seconds", default=0.) parser.add_option("-c", "--channel", dest="ch_name", type="string", help="Custom EOG channel(s), comma separated", default=None) options, args = parser.parse_args() raw_in = options.raw_in if raw_in is None: parser.print_help() sys.exit(1) tmin = options.tmin tmax = options.tmax n_grad = options.n_grad n_mag = options.n_mag n_eeg = options.n_eeg l_freq = options.l_freq h_freq = options.h_freq eog_l_freq = options.eog_l_freq eog_h_freq = options.eog_h_freq average = options.average preload = options.preload filter_length = options.filter_length n_jobs = options.n_jobs reject = dict(grad=1e-13 * float(options.rej_grad), mag=1e-15 * float(options.rej_mag), eeg=1e-6 * float(options.rej_eeg), eog=1e-6 * float(options.rej_eog)) avg_ref = options.avg_ref no_proj = options.no_proj bad_fname = options.bad_fname event_id = options.event_id proj_fname = options.proj raw_event_fname = options.raw_event_fname tstart = options.tstart ch_name = options.ch_name if bad_fname is not None: with open(bad_fname, 'r') as fid: bads = [w.rstrip() for w in fid.readlines()] print('Bad channels read : %s' % bads) else: bads = [] if raw_in.endswith('_raw.fif') or raw_in.endswith('-raw.fif'): prefix = raw_in[:-8] else: prefix = raw_in[:-4] eog_event_fname = prefix + '_eog-eve.fif' if average: eog_proj_fname = prefix + '_eog_avg-proj.fif' else: eog_proj_fname = prefix + '_eog-proj.fif' raw = mne.io.read_raw_fif(raw_in, preload=preload) if raw_event_fname is not None: raw_event = mne.io.read_raw_fif(raw_event_fname) else: raw_event = raw flat = None # XXX : not exposed to the user projs, events = mne.preprocessing.compute_proj_eog( raw=raw, raw_event=raw_event, tmin=tmin, tmax=tmax, n_grad=n_grad, n_mag=n_mag, n_eeg=n_eeg, l_freq=l_freq, h_freq=h_freq, average=average, filter_length=filter_length, n_jobs=n_jobs, reject=reject, flat=flat, bads=bads, avg_ref=avg_ref, no_proj=no_proj, event_id=event_id, eog_l_freq=eog_l_freq, eog_h_freq=eog_h_freq, tstart=tstart, ch_name=ch_name, copy=False) raw.close() if raw_event_fname is not None: raw_event.close() if proj_fname is not None: print('Including SSP projections from : %s' % proj_fname) # append the eog projs, so they are last in the list projs = mne.read_proj(proj_fname) + projs if isinstance(preload, str) and os.path.exists(preload): os.remove(preload) print("Writing EOG projections in %s" % eog_proj_fname) mne.write_proj(eog_proj_fname, projs) print("Writing EOG events in %s" % eog_event_fname) mne.write_events(eog_event_fname, events) is_main = (__name__ == '__main__') if is_main: run()
from .unit import Unit from matchzoo.utils.bert_utils import \ is_whitespace, is_control, run_strip_accents class BertClean(Unit): """Clean unit for raw text.""" def transform(self, input_: str) -> str: """ Process input data from raw terms to cleaned text. :param input_: raw textual input. :return cleaned_text: cleaned text. """ output = [] for char in input_: cp = ord(char) if cp == 0 or cp == 0xfffd or is_control(char): continue if is_whitespace(char): output.append(" ") else: output.append(char) cleaned_text = "".join(output) return cleaned_text class StripAccent(Unit): """Process unit for text lower case.""" def transform(self, input_: list) -> list: """ Strips accents from each token. :param input_: list of tokens. :return tokens: Accent-stripped list of tokens. """ return [run_strip_accents(token) for token in input_]
from core.models import Category from django.test.testcases import TestCase from django.urls import reverse class ExportViewMixinTest(TestCase): def setUp(self): self.url = reverse('export-category') self.cat1 = Category.objects.create(name='Cat 1') self.cat2 = Category.objects.create(name='Cat 2') def test_get(self): response = self.client.get(self.url) self.assertContains(response, self.cat1.name, status_code=200) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') def test_post(self): data = { 'file_format': '0', } response = self.client.post(self.url, data) self.assertContains(response, self.cat1.name, status_code=200) self.assertTrue(response.has_header("Content-Disposition")) self.assertEqual(response['Content-Type'], 'text/csv')
import argparse import logging import sys import traceback import pysensu_yelp from paasta_tools import bounce_lib from paasta_tools import marathon_tools from paasta_tools.monitoring_tools import send_event from paasta_tools.utils import _log from paasta_tools.utils import DEFAULT_SOA_DIR from paasta_tools.utils import get_services_for_cluster from paasta_tools.utils import InvalidJobNameError from paasta_tools.utils import load_system_paasta_config log = logging.getLogger(__name__) class DontKillEverythingError(Exception): pass def parse_args(argv): parser = argparse.ArgumentParser(description="Cleans up stale marathon jobs.") parser.add_argument( "-d", "--soa-dir", dest="soa_dir", metavar="SOA_DIR", default=DEFAULT_SOA_DIR, help="define a different soa config directory", ) parser.add_argument( "-t", "--kill-threshold", dest="kill_threshold", default=0.5, help="The decimal fraction of apps we think is " "sane to kill when this job runs", ) parser.add_argument( "-v", "--verbose", action="store_true", dest="verbose", default=False ) parser.add_argument( "-f", "--force", action="store_true", dest="force", default=False, help="Force the cleanup if we are above the " "kill_threshold", ) return parser.parse_args(argv) def delete_app(app_id, client, soa_dir): """Deletes a marathon app safely and logs to notify the user that it happened""" log.warn("%s appears to be old; attempting to delete" % app_id) service, instance, _, __ = marathon_tools.deformat_job_id(app_id) cluster = load_system_paasta_config().get_cluster() try: short_app_id = marathon_tools.compose_job_id(service, instance) with bounce_lib.bounce_lock_zookeeper(short_app_id): bounce_lib.delete_marathon_app(app_id, client) send_event( service=service, check_name="check_marathon_services_replication.%s" % short_app_id, soa_dir=soa_dir, status=pysensu_yelp.Status.OK, overrides={}, output="This instance was removed and is no longer running", ) send_event( service=service, check_name="setup_marathon_job.%s" % short_app_id, soa_dir=soa_dir, status=pysensu_yelp.Status.OK, overrides={}, output="This instance was removed and is no longer running", ) log_line = "Deleted stale marathon job that looks lost: %s" % app_id _log( service=service, component="deploy", level="event", cluster=cluster, instance=instance, line=log_line, ) except IOError: log.debug("%s is being bounced, skipping" % app_id) except Exception: loglines = ["Exception raised during cleanup of service %s:" % service] loglines.extend(traceback.format_exc().rstrip().split("\n")) for logline in loglines: _log( service=service, component="deploy", level="debug", cluster=load_system_paasta_config().get_cluster(), instance=instance, line=logline, ) raise def cleanup_apps(soa_dir, kill_threshold=0.5, force=False): """Clean up old or invalid jobs/apps from marathon. Retrieves both a list of apps currently in marathon and a list of valid app ids in order to determine what to kill. :param soa_dir: The SOA config directory to read from :param kill_threshold: The decimal fraction of apps we think is sane to kill when this job runs. :param force: Force the cleanup if we are above the kill_threshold""" log.info("Loading marathon configuration") system_paasta_config = load_system_paasta_config() log.info("Connecting to marathon") clients = marathon_tools.get_marathon_clients( marathon_tools.get_marathon_servers(system_paasta_config) ) valid_services = get_services_for_cluster(instance_type="marathon", soa_dir=soa_dir) all_apps_with_clients = marathon_tools.get_marathon_apps_with_clients( clients.get_all_clients() ) app_ids_with_clients = [] for (app, client) in all_apps_with_clients: try: app_id = marathon_tools.deformat_job_id(app.id.lstrip("/")) except InvalidJobNameError: log.warn( "%s doesn't conform to paasta naming conventions? Skipping." % app.id ) continue app_ids_with_clients.append((app_id, client)) apps_to_kill = [ ((service, instance, git_sha, config_sha), client) for (service, instance, git_sha, config_sha), client in app_ids_with_clients if (service, instance) not in valid_services ] log.debug("Running apps: %s" % app_ids_with_clients) log.debug("Valid apps: %s" % valid_services) log.debug("Terminating: %s" % apps_to_kill) if app_ids_with_clients: above_kill_threshold = float(len(apps_to_kill)) / float( len(app_ids_with_clients) ) > float(kill_threshold) if above_kill_threshold and not force: log.critical( "Paasta was about to kill more than %s of the running services, this " "is probably a BAD mistake!, run again with --force if you " "really need to destroy everything" % kill_threshold ) raise DontKillEverythingError for id_tuple, client in apps_to_kill: app_id = marathon_tools.format_job_id(*id_tuple) delete_app(app_id=app_id, client=client, soa_dir=soa_dir) def main(argv=None): args = parse_args(argv) soa_dir = args.soa_dir kill_threshold = args.kill_threshold force = args.force if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.WARNING) try: cleanup_apps(soa_dir, kill_threshold=kill_threshold, force=force) except DontKillEverythingError: sys.exit(1) if __name__ == "__main__": main()
import argparse import os import os.path import re import sys import shlex import subprocess def get_friends(py_file): for ext in ('.py', '.py.md5', '.rst', '.ipynb'): friend = re.sub(r'\.py$', ext, py_file) if os.path.isfile(friend): yield friend def is_under_version_control(path): command = ['git', 'ls-files', '--error-unmatch', path] popen = subprocess.Popen( command, cwd=os.path.dirname(path), stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) popen.communicate() popen.wait() return popen.returncode == 0 def find_unbuilt_examples(gallery_subdir): """Returns True if there are any examples that have not been built yet.""" for root, dirs, files in os.walk(gallery_subdir): in_files = [os.path.join(root, f) for f in files if f.endswith('.py')] for in_file in in_files: out_file = in_file.replace('/gallery/', '/auto_examples/') friends = list(get_friends(out_file)) if any([not os.path.isfile(f) for f in friends]): yield in_file def diff(f1, f2): """Returns True if the files are different.""" with open(f1) as fin: f1_contents = fin.read() with open(f2) as fin: f2_contents = fin.read() return f1_contents != f2_contents def find_py_files(subdir): for root, dirs, files in os.walk(subdir): for f in files: if f.endswith('.py'): yield os.path.join(root, f) def main(): parser = argparse.ArgumentParser() parser.add_argument( '--apply', action='store_true', help='Apply any suggestions made by this script', ) args = parser.parse_args() curr_dir = os.path.dirname(os.path.abspath(__file__)) output_dir = os.path.abspath(os.path.join(curr_dir, '../auto_examples/')) retval = 0 rebuild = False suggestions = [] # # Check for stale output. # for out_file in find_py_files(output_dir): in_file = out_file.replace('/auto_examples/', '/gallery/') if not os.path.isfile(in_file): print('%s is stale, consider removing it and its friends.' % in_file) for friend in get_friends(out_file): suggestions.append('git rm -f %s' % friend) retval = 1 continue for friend in get_friends(out_file): if not is_under_version_control(friend): print('%s is not under version control, consider adding it.' % friend) suggestions.append('git add %s' % friend) if diff(in_file, out_file): print('%s is stale.' % in_file) rebuild = True retval = 1 gallery_dir = output_dir.replace('/auto_examples', '/gallery') unbuilt = list(find_unbuilt_examples(gallery_dir)) if unbuilt: for u in unbuilt: print('%s has not been built yet' % u) rebuild = True retval = 1 if rebuild: src_dir = os.path.abspath(os.path.join(gallery_dir, '..')) print('consider rebuilding the gallery:') print('\tmake -C %s html' % src_dir) if suggestions: print('consider running the following commands (or rerun this script with --apply option):') for command in suggestions: print('\t' + command) if args.apply: for command in suggestions: subprocess.check_call(shlex.split(command)) return retval if __name__ == '__main__': sys.exit(main())
import unittest from unit_tests.myStringIO import StringIO from mock import Mock, ANY from .files import require_empty_dir, make_file from trashcli.rm import RmCmd, ListTrashinfos from .fake_trash_dir import a_trashinfo_with_path, a_trashinfo_without_path from trashcli.fs import FileSystemReader class TestTrashRm(unittest.TestCase): def test_issue69(self): self.add_invalid_trashinfo_without_path(1) self.trash_rm.run(['trash-rm', 'any-pattern (ignored)']) assert ('trash-rm: ' 'sandbox/xdh/Trash/info/1.trashinfo: ' 'unable to parse \'Path\'' '\n' == self.stderr.getvalue()) def test_integration(self): self.add_trashinfo_for(1, 'to/be/deleted') self.add_trashinfo_for(2, 'to/be/kept') self.trash_rm.run(['trash-rm', 'delete*']) self.assert_trashinfo_has_been_deleted(1) def setUp(self): require_empty_dir('sandbox/xdh') self.stderr = StringIO() self.trash_rm = RmCmd(environ = {'XDG_DATA_HOME':'sandbox/xdh'} , getuid = 123 , list_volumes = lambda:[] , stderr = self.stderr , file_reader = FileSystemReader()) def add_trashinfo_for(self, index, path): make_file(self.trashinfo_from_index(index), a_trashinfo_with_path(path)) def add_invalid_trashinfo_without_path(self, index): make_file(self.trashinfo_from_index(index), a_trashinfo_without_path()) def trashinfo_from_index(self, index): return 'sandbox/xdh/Trash/info/%s.trashinfo' % index def assert_trashinfo_has_been_deleted(self, index): import os filename = self.trashinfo_from_index(index) assert not os.path.exists(filename), 'File "%s" still exists' % filename class TestListing(unittest.TestCase): def setUp(self): require_empty_dir('sandbox') self.out = Mock() self.listing = ListTrashinfos(self.out, FileSystemReader(), None) self.index = 0 def test_should_report_original_location(self): self.add_trashinfo('/foo') self.listing.list_from_volume_trashdir('sandbox/Trash', '/') self.out.assert_called_with('/foo', ANY) def test_should_report_trashinfo_path(self): self.add_trashinfo(trashinfo_path='sandbox/Trash/info/a.trashinfo') self.listing.list_from_volume_trashdir('sandbox/Trash', '/') self.out.assert_called_with(ANY, 'sandbox/Trash/info/a.trashinfo') def test_should_handle_volume_trashdir(self): self.add_trashinfo(trashinfo_path='sandbox/.Trash/123/info/a.trashinfo') self.listing.list_from_volume_trashdir('sandbox/.Trash/123', '/fake/vol') self.out.assert_called_with(ANY, 'sandbox/.Trash/123/info/a.trashinfo') def test_should_absolutize_relative_path_for_volume_trashdir(self): self.add_trashinfo(path='foo/bar', trashdir='sandbox/.Trash/501') self.listing.list_from_volume_trashdir('sandbox/.Trash/501', '/fake/vol') self.out.assert_called_with('/fake/vol/foo/bar', ANY) def add_trashinfo(self, path='unspecified/original/location', trashinfo_path=None, trashdir='sandbox/Trash'): trashinfo_path = trashinfo_path or self._trashinfo_path(trashdir) make_file(trashinfo_path, a_trashinfo_with_path(path)) def _trashinfo_path(self, trashdir): path = '%s/info/%s.trashinfo' % (trashdir, self.index) self.index +=1 return path
from django.utils.translation import gettext_lazy as _ from weblate.addons.base import BaseAddon from weblate.addons.events import EVENT_COMPONENT_UPDATE, EVENT_UNIT_PRE_CREATE from weblate.addons.forms import BulkEditAddonForm from weblate.trans.bulk import bulk_perform from weblate.trans.models import Unit from weblate.utils.state import STATE_FUZZY, STATE_TRANSLATED class FlagBase(BaseAddon): events = (EVENT_UNIT_PRE_CREATE,) icon = "flag.svg" @classmethod def can_install(cls, component, user): if not component.has_template(): return False # Following formats support fuzzy flag, so avoid messing up with them if component.file_format in {"ts", "po", "po-mono"}: return False return super().can_install(component, user) class SourceEditAddon(FlagBase): name = "weblate.flags.source_edit" verbose = _('Flag new source strings as "Needs editing"') description = _( "Whenever a new source string is imported from the VCS, it is " "flagged as needing editing in Weblate. This way you can easily " "filter and edit source strings written by the developers." ) def unit_pre_create(self, unit): if unit.translation.is_template and unit.state >= STATE_TRANSLATED: unit.state = STATE_FUZZY class TargetEditAddon(FlagBase): name = "weblate.flags.target_edit" verbose = _('Flag new translations as "Needs editing"') description = _( "Whenever a new translatable string is imported from the VCS, it is " "flagged as needing editing in Weblate. This way you can easily " "filter and edit translations created by the developers." ) def unit_pre_create(self, unit): if not unit.translation.is_template and unit.state >= STATE_TRANSLATED: unit.state = STATE_FUZZY class SameEditAddon(FlagBase): name = "weblate.flags.same_edit" verbose = _('Flag unchanged translations as "Needs editing"') description = _( "Whenever a new translatable string is imported from the VCS and it matches a " "source string, it is flagged as needing editing in Weblate. This is " "especially useful for file formats that include all strings even if not " "translated." ) def unit_pre_create(self, unit): if ( not unit.translation.is_template and unit.source == unit.target and "ignore-same" not in unit.all_flags and unit.state >= STATE_TRANSLATED ): unit.state = STATE_FUZZY class BulkEditAddon(BaseAddon): events = (EVENT_COMPONENT_UPDATE,) name = "weblate.flags.bulk" verbose = _("Bulk edit") description = _("Bulk edit flags, labels or state for strings.") settings_form = BulkEditAddonForm multiple = True def component_update(self, component): label_set = component.project.label_set bulk_perform( None, Unit.objects.filter(translation__component=component), query=self.instance.configuration["q"], target_state=self.instance.configuration["state"], add_flags=self.instance.configuration["add_flags"], remove_flags=self.instance.configuration["remove_flags"], add_labels=label_set.filter( name__in=self.instance.configuration["add_labels"] ), remove_labels=label_set.filter( name__in=self.instance.configuration["remove_labels"] ), )
from django.conf.urls import url from zinnia.tests.implementations.urls.default import ( urlpatterns as test_urlpatterns) from zinnia.views.authors import AuthorDetail from zinnia.views.categories import CategoryDetail from zinnia.views.tags import TagDetail class CustomModelDetailMixin(object): """ Mixin for changing the template_name and overriding the context. """ template_name = 'zinnia/entry_custom_list.html' def get_context_data(self, **kwargs): context = super(CustomModelDetailMixin, self).get_context_data(**kwargs) context.update({'extra': 'context'}) return context class CustomTagDetail(CustomModelDetailMixin, TagDetail): pass class CustomAuthorDetail(CustomModelDetailMixin, AuthorDetail): pass class CustomCategoryDetail(CustomModelDetailMixin, CategoryDetail): pass urlpatterns = [ url(r'^authors/(?P<username>[.+-@\w]+)/$', CustomAuthorDetail.as_view(), name='zinnia_author_detail'), url(r'^authors/(?P<username>[.+-@\w]+)/page/(?P<page>\d+)/$', CustomAuthorDetail.as_view(), name='zinnia_author_detail_paginated'), url(r'^categories/(?P<path>[-\/\w]+)/page/(?P<page>\d+)/$', CustomCategoryDetail.as_view(), name='zinnia_category_detail_paginated'), url(r'^categories/(?P<path>[-\/\w]+)/$', CustomCategoryDetail.as_view(), name='zinnia_category_detail'), url(r'^tags/(?P<tag>[^/]+)/$', CustomTagDetail.as_view(), name='zinnia_tag_detail'), url(r'^tags/(?P<tag>[^/]+)/page/(?P<page>\d+)/$', CustomTagDetail.as_view(), name='zinnia_tag_detail_paginated'), ] + test_urlpatterns
import logging from absl import flags import numpy from perfkitbenchmarker import configs from perfkitbenchmarker import sample FLAGS = flags.FLAGS BENCHMARK_NAME = 'stress_ng' BENCHMARK_CONFIG = """ stress_ng: description: Runs stress-ng vm_groups: default: vm_spec: *default_single_core disk_spec: *default_50_gb """ STRESS_NG_DIR = '~/stress_ng' GIT_REPO = 'https://github.com/ColinIanKing/stress-ng' GIT_TAG_MAP = { '0.05.23': '54722768329c9f8184c1c98db63435f201377df1', # ubuntu1604 '0.09.25': '2db2812edf99ec80c08edf98ee88806a3662031c', # ubuntu1804 } VALID_CPU_METHODS = { 'all', 'ackermann', 'bitops', 'callfunc', 'cdouble', 'cfloat', 'clongdouble', 'correlate', 'crc16', 'decimal32', 'decimal64', 'decimal128', 'dither', 'djb2a', 'double', 'euler', 'explog', 'fft', 'fibonacci', 'float', 'fnv1a', 'gamma', 'gcd', 'gray', 'hamming', 'hanoi', 'hyperbolic', 'idct', 'int128', 'int64', 'int32', 'int16', 'int8', 'int128float', 'int128double', 'int128longdouble', 'int128decimal32', 'int128decimal64', 'int128decimal128', 'int64float', 'int64double', 'int64longdouble', 'int32float', 'int32double', 'int32longdouble', 'jenkin', 'jmp', 'ln2', 'longdouble', 'loop', 'matrixprod', 'nsqrt', 'omega', 'parity', 'phi', 'pi', 'pjw', 'prime', 'psi', 'queens', 'rand', 'rand48', 'rgb', 'sdbm', 'sieve', 'sqrt', 'trig', 'union', 'zeta' } VALID_STRESSORS = { 'affinity', 'af-alg', 'aio', 'aio-linux', 'apparmor', 'bigheap', 'brk', 'bsearch', 'cache', 'chdir', 'chmod', 'clock', 'clone', 'context', 'cpu', 'cpu-online', 'crypt', 'daemon', 'dentry', 'dir', 'dup', 'epoll', 'eventfd', 'exec', 'fallocate', 'fault', 'fcntl', 'fiemap', 'fifo', 'filename', 'flock', 'fork', 'fp-error', 'fstat', 'futex', 'get', 'getrandom', 'getdent', 'handle', 'hdd', 'heapsort', 'hsearch', 'icache', 'iosync', 'inotify', 'itimer', 'kcmp', 'key', 'kill', 'klog', 'lease', 'link', 'lockbus', 'lockf', 'longjmp', 'lsearch', 'malloc', 'matrix', 'membarrier', 'memcpy', 'memfd', 'mergesort', 'mincore', 'mknod', 'mlock', 'mmap', 'mmapfork', 'mmapmany', 'mremap', 'msg', 'mq', 'nice', 'null', 'numa', 'oom-pipe', 'open', 'personality', 'pipe', 'poll', 'procfs', 'pthread', 'ptrace', 'qsort', 'quota', 'rdrand', 'readahead', 'remap-file-pages', 'rename', 'rlimit', 'seccomp', 'seek', 'sem-posix', 'sem-sysv', 'shm-posix', 'shm-sysv', 'sendfile', 'sigfd', 'sigfpe', 'sigpending', 'sigq', 'sigsegv', 'sigsuspend', 'sleep', 'socket', 'socket-fd', 'socket-pair', 'spawn', 'splice', 'stack', 'str', 'stream', 'switch', 'symlink', 'sync-file', 'sysinfo', 'sysfs', 'tee', 'timer', 'timerfd', 'tsc', 'tsearch', 'udp', 'udp-flood', 'unshare', 'urandom', 'userfaultfd', 'utime', 'vecmath', 'vfork', 'vm', 'vm-rw', 'vm-splice', 'wait', 'wcs', 'xattr', 'yield', 'zero', 'zlib', 'zombie' } CPU_SUITE = { 'af-alg', 'bsearch', 'context', 'cpu', 'cpu-online', 'crypt', 'fp-error', 'getrandom', 'heapsort', 'hsearch', 'longjmp', 'lsearch', 'matrix', 'mergesort', 'numa', 'qsort', 'rdrand', 'str', 'stream', 'tsc', 'tsearch', 'vecmath', 'wcs', 'zlib' } CPU_CACHE_SUITE = { 'bsearch', 'cache', 'heapsort', 'hsearch', 'icache', 'lockbus', 'lsearch', 'malloc', 'matrix', 'membarrier', 'memcpy', 'mergesort', 'qsort', 'str', 'stream', 'tsearch', 'vecmath', 'wcs', 'zlib' } MEMORY_SUITE = { 'bsearch', 'context', 'heapsort', 'hsearch', 'lockbus', 'lsearch', 'malloc', 'matrix', 'membarrier', 'memcpy', 'memfd', 'mergesort', 'mincore', 'null', 'numa', 'oom-pipe', 'pipe', 'qsort', 'stack', 'str', 'stream', 'tsearch', 'vm', 'vm-rw', 'wcs', 'zero', 'zlib' } # Run the stressors that are each part of all of the compute related stress-ng # classes: cpu, cpu-cache, and memory. DEFAULT_STRESSORS = sorted( CPU_SUITE.intersection(CPU_CACHE_SUITE).intersection(MEMORY_SUITE)) flags.DEFINE_integer('stress_ng_duration', 10, 'Number of seconds to run the test.') flags.DEFINE_boolean('stress_ng_calc_geomean', True, 'Whether to calculate geomean or not.') flags.DEFINE_list('stress_ng_custom_stressors', DEFAULT_STRESSORS, 'List of stressors to run against. Default combines cpu,' 'cpu-cache, and memory suites') flags.DEFINE_list('stress_ng_cpu_methods', [], 'List of cpu methods to run with. By default none are ran.') ALL_WORKLOADS = ['small', 'medium', 'large'] flags.DEFINE_list( 'stress_ng_thread_workloads', ['large'], 'List of threads sizes to run against. Options are' 'small (1 thread total), medium (1 thread per 2 cpus), and ' 'large (1 thread per cpu).') flags.register_validator( 'stress_ng_thread_workloads', lambda workloads: workloads and set(workloads).issubset(ALL_WORKLOADS)) ALL_VERSIONS = ['0.05.23', '0.09.25'] flags.DEFINE_enum( 'stress_ng_version', '0.09.25', ALL_VERSIONS, 'Stress-ng version to use. Default is 0.09.25 which ' 'is the default package on Ubuntu 1804.') def _GeoMeanOverflow(iterable): """Returns the geometric mean. See https://en.wikipedia.org/wiki/Geometric_mean#Relationship_with_logarithms Args: iterable: a list of positive floats to take the geometric mean of. Returns: The geometric mean of the list. """ a = numpy.log(iterable) return numpy.exp(a.sum() / len(a)) def StressngCustomStressorsValidator(stressors): """Returns whether or not the list of custom stressors is valid.""" return VALID_STRESSORS.issuperset(set(stressors)) def StressngCpuMethodsValidator(cpu_methods): """Returns whether or not the list of cpu methods is valid.""" return ('all_cpu_methods' in cpu_methods or VALID_CPU_METHODS.issuperset(set(cpu_methods))) flags.register_validator('stress_ng_custom_stressors', StressngCustomStressorsValidator) flags.register_validator('stress_ng_cpu_methods', StressngCpuMethodsValidator) def GetConfig(user_config): return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) def Prepare(benchmark_spec): """Installs stress-ng on the target vm. Args: benchmark_spec: The benchmark specification. Contains all data that is required to run the benchmark. """ vm = benchmark_spec.vms[0] vm.InstallPackages( 'build-essential libaio-dev libapparmor-dev libattr1-dev libbsd-dev libcap-dev libgcrypt11-dev libkeyutils-dev libsctp-dev zlib1g-dev' ) vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, STRESS_NG_DIR)) vm.RemoteCommand('cd {0} && git checkout {1}'.format( STRESS_NG_DIR, GIT_TAG_MAP[FLAGS.stress_ng_version])) vm.RemoteCommand('cd {0} && make && sudo make install'.format(STRESS_NG_DIR)) def _ParseStressngResult(metadata, output, cpu_method=None): """Returns stress-ng data as a sample. Sample output eg: stress-ng: info: [2566] dispatching hogs: 2 context stress-ng: info: [2566] successful run completed in 5.00s stress-ng: info: [2566] stressor bogo ops real time usr time sys time bogo ops/s bogo ops/s stress-ng: info: [2566] (secs) (secs) (secs) (real time) (usr+sys time) stress-ng: info: [2566] context 22429 5.00 5.49 4.48 4485.82 2249.65 Args: metadata: metadata of the sample. output: the output of the stress-ng benchmark. cpu_method: an optional flag for the cpu method for the cpu stressor. """ output_list = output.splitlines() output_matrix = [i.split() for i in output_list] if len(output_matrix) != 5: logging.error('output is missing') return '' assert output_matrix[2][-4] == 'bogo' and output_matrix[2][-3] == 'ops/s' assert output_matrix[3][-4] == '(real' and output_matrix[3][-3] == 'time)' line = output_matrix[4] name = line[3] value = float(line[-2]) # parse bogo ops/s (real time) if name == 'cpu' and cpu_method: return sample.Sample( metric=cpu_method, value=value, unit='bogus_ops_sec', # bogus operations per second metadata=metadata) return sample.Sample( metric=name, value=value, unit='bogus_ops_sec', # bogus operations per second metadata=metadata) def _RunWorkload(vm, num_threads): """Runs stress-ng on the target vm. Args: vm: The target vm to run on. num_threads: Number of instances of stressors to launch. Returns: A list of sample.Sample objects. """ metadata = { 'duration_sec': FLAGS.stress_ng_duration, 'threads': num_threads, 'version': FLAGS.stress_ng_version, } samples = [] values_to_geomean_list = [] stressors = FLAGS.stress_ng_custom_stressors for stressor in stressors: cmd = ('stress-ng --{stressor} {numthreads} --metrics-brief ' '-t {duration}'.format( stressor=stressor, numthreads=num_threads, duration=FLAGS.stress_ng_duration)) stdout, stderr = vm.RemoteCommand(cmd) # TODO(user): Find the actual stress-ng version that changes output to # stderr instead of stdout if FLAGS.stress_ng_version > '0.05.23': stdout = stderr stressng_sample = _ParseStressngResult(metadata, stdout) if stressng_sample: samples.append(stressng_sample) values_to_geomean_list.append(stressng_sample.value) cpu_methods = (VALID_CPU_METHODS if 'all_cpu_methods' in FLAGS.stress_ng_cpu_methods else FLAGS.stress_ng_cpu_methods) for cpu_method in cpu_methods: cmd = ('stress-ng --cpu {numthreads} --metrics-brief ' '-t {duration} --cpu-method {cpu_method}'.format( numthreads=num_threads, duration=FLAGS.stress_ng_duration, cpu_method=cpu_method)) stdout, _ = vm.RemoteCommand(cmd) stressng_sample = _ParseStressngResult(metadata, stdout, cpu_method) if stressng_sample: samples.append(stressng_sample) values_to_geomean_list.append(stressng_sample.value) if FLAGS.stress_ng_calc_geomean: geomean_metadata = metadata.copy() geomean_metadata['stressors'] = stressors # True only if each stressor provided a value geomean_metadata['valid_run'] = ( len(values_to_geomean_list) == len(stressors) + len(cpu_methods)) geomean_sample = sample.Sample( metric='STRESS_NG_GEOMEAN', value=_GeoMeanOverflow(values_to_geomean_list), unit='bogus_ops_sec', metadata=geomean_metadata) samples.append(geomean_sample) return samples def Run(benchmark_spec): """Runs stress-ng on the target vm. Args: benchmark_spec: The benchmark specification. Contains all data that is required to run the benchmark. Returns: A list of sample.Sample objects. """ vm = benchmark_spec.vms[0] samples = [] for workload in FLAGS.stress_ng_thread_workloads: if workload == 'small': samples.extend(_RunWorkload(vm, 1)) elif workload == 'medium': samples.extend(_RunWorkload(vm, vm.NumCpusForBenchmark() / 2)) elif workload == 'large': samples.extend(_RunWorkload(vm, vm.NumCpusForBenchmark())) return samples def Cleanup(benchmark_spec): """Cleans up stress-ng from the target vm. Args: benchmark_spec: The benchmark specification. Contains all data that is required to run the benchmark. """ vm = benchmark_spec.vms[0] vm.RemoteCommand('cd {0} && sudo make uninstall'.format(STRESS_NG_DIR))
import os import os.path import logging import jinja2.exceptions import pytest from PyQt5.QtCore import QUrl from qutebrowser.utils import utils, jinja from qutebrowser.config import configexc @pytest.fixture(autouse=True) def patch_read_file(monkeypatch): """pytest fixture to patch utils.read_file.""" real_resource_filename = utils.resource_filename def _read_file(path, binary=False): """A read_file which returns a simple template if the path is right.""" if path == os.path.join('html', 'test.html'): assert not binary return """Hello {{var}}""" elif path == os.path.join('html', 'test2.html'): assert not binary return """{{ resource_url('utils/testfile') }}""" elif path == os.path.join('html', 'test3.html'): assert not binary return """{{ data_url('testfile.txt') }}""" elif path == 'testfile.txt': assert binary return b'foo' elif path == os.path.join('html', 'undef.html'): assert not binary return """{{ does_not_exist() }}""" elif path == os.path.join('html', 'attributeerror.html'): assert not binary return """{{ obj.foobar }}""" else: raise IOError("Invalid path {}!".format(path)) def _resource_filename(path): if path == 'utils/testfile': return real_resource_filename(path) elif path == 'testfile.txt': return path else: raise IOError("Invalid path {}!".format(path)) monkeypatch.setattr(jinja.utils, 'read_file', _read_file) monkeypatch.setattr(jinja.utils, 'resource_filename', _resource_filename) def test_simple_template(): """Test with a simple template.""" data = jinja.render('test.html', var='World') assert data == "Hello World" def test_resource_url(): """Test resource_url() which can be used from templates.""" data = jinja.render('test2.html') print(data) url = QUrl(data) assert url.isValid() assert url.scheme() == 'file' path = url.path() if utils.is_windows: path = path.lstrip('/') path = path.replace('/', os.sep) with open(path, 'r', encoding='utf-8') as f: assert f.read().splitlines()[0] == "Hello World!" def test_data_url(): """Test data_url() which can be used from templates.""" data = jinja.render('test3.html') print(data) url = QUrl(data) assert url.isValid() assert data == 'data:text/plain;base64,Zm9v' # 'foo' def test_not_found(caplog): """Test with a template which does not exist.""" with caplog.at_level(logging.ERROR): data = jinja.render('does_not_exist.html') assert "The does_not_exist.html template could not be found!" in data assert caplog.messages[0].startswith("The does_not_exist.html template" " could not be loaded from") def test_utf8(): """Test rendering with a UTF8 template. This was an attempt to get a failing test case for #127 but it seems the issue is elsewhere. https://github.com/qutebrowser/qutebrowser/issues/127 """ data = jinja.render('test.html', var='\u2603') assert data == "Hello \u2603" def test_undefined_function(caplog): """Make sure undefined attributes crash since we preload resources..""" with pytest.raises(jinja2.exceptions.UndefinedError): jinja.render('undef.html') def test_attribute_error(): """Make sure accessing an unknown attribute fails.""" with pytest.raises(AttributeError): jinja.render('attributeerror.html', obj=object()) @pytest.mark.parametrize('escape', [True, False]) def test_autoescape(escape): if not escape: with jinja.environment.no_autoescape(): template = jinja.environment.from_string("{{ v }}") assert template.render(v='<foo') == '<foo' template = jinja.environment.from_string("{{ v }}") assert template.render(v='<foo') == '&lt;foo' @pytest.mark.parametrize('template, expected', [ ('{{ func1(conf.aliases) }} {{ func2(conf.backend) }}', ['aliases', 'backend']), ('{{ conf.aliases["a"].propname }}', ['aliases']), ('{{ conf.auto_save.interval + conf.hints.min_chars }}', ['auto_save.interval', 'hints.min_chars']), ('{{ notconf.a.b.c }}', []), ]) def test_template_config_variables(template, expected, config_stub): assert jinja.template_config_variables(template) == frozenset(expected) @pytest.mark.parametrize('template', [ '{{ func1(conf.aaa) }}', '{{ conf.bbb["a"].propname }}', '{{ conf.ccc + 1 }}', ]) def test_template_config_variables_no_option(template, config_stub): with pytest.raises(configexc.NoOptionError): jinja.template_config_variables(template)
import copy import logging from aiohttp import web_response import plexapi.exceptions from plexapi.gdm import GDM from plexauth import PlexAuth import requests.exceptions import voluptuous as vol from homeassistant import config_entries from homeassistant.components.http.view import HomeAssistantView from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.const import ( CONF_CLIENT_ID, CONF_HOST, CONF_PORT, CONF_SOURCE, CONF_SSL, CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.network import get_url from .const import ( # pylint: disable=unused-import AUTH_CALLBACK_NAME, AUTH_CALLBACK_PATH, AUTOMATIC_SETUP_STRING, CONF_IGNORE_NEW_SHARED_USERS, CONF_IGNORE_PLEX_WEB_CLIENTS, CONF_MONITORED_USERS, CONF_SERVER, CONF_SERVER_IDENTIFIER, CONF_USE_EPISODE_ART, DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, MANUAL_SETUP_STRING, PLEX_SERVER_CONFIG, SERVERS, X_PLEX_DEVICE_NAME, X_PLEX_PLATFORM, X_PLEX_PRODUCT, X_PLEX_VERSION, ) from .errors import NoServersFound, ServerNotSpecified from .server import PlexServer _LOGGER = logging.getLogger(__package__) @callback def configured_servers(hass): """Return a set of the configured Plex servers.""" return { entry.data[CONF_SERVER_IDENTIFIER] for entry in hass.config_entries.async_entries(DOMAIN) } async def async_discover(hass): """Scan for available Plex servers.""" gdm = GDM() await hass.async_add_executor_job(gdm.scan) for server_data in gdm.entries: await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: config_entries.SOURCE_INTEGRATION_DISCOVERY}, data=server_data, ) class PlexFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Plex config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return PlexOptionsFlowHandler(config_entry) def __init__(self): """Initialize the Plex flow.""" self.current_login = {} self.available_servers = None self.plexauth = None self.token = None self.client_id = None self._manual = False async def async_step_user( self, user_input=None, errors=None ): # pylint: disable=arguments-differ """Handle a flow initialized by the user.""" if user_input is not None: return await self.async_step_plex_website_auth() if self.show_advanced_options: return await self.async_step_user_advanced(errors=errors) return self.async_show_form(step_id="user", errors=errors) async def async_step_user_advanced(self, user_input=None, errors=None): """Handle an advanced mode flow initialized by the user.""" if user_input is not None: if user_input.get("setup_method") == MANUAL_SETUP_STRING: self._manual = True return await self.async_step_manual_setup() return await self.async_step_plex_website_auth() data_schema = vol.Schema( { vol.Required("setup_method", default=AUTOMATIC_SETUP_STRING): vol.In( [AUTOMATIC_SETUP_STRING, MANUAL_SETUP_STRING] ) } ) return self.async_show_form( step_id="user_advanced", data_schema=data_schema, errors=errors ) async def async_step_manual_setup(self, user_input=None, errors=None): """Begin manual configuration.""" if user_input is not None and errors is None: user_input.pop(CONF_URL, None) host = user_input.get(CONF_HOST) if host: port = user_input[CONF_PORT] prefix = "https" if user_input.get(CONF_SSL) else "http" user_input[CONF_URL] = f"{prefix}://{host}:{port}" elif CONF_TOKEN not in user_input: return await self.async_step_manual_setup( user_input=user_input, errors={"base": "host_or_token"} ) return await self.async_step_server_validate(user_input) previous_input = user_input or {} data_schema = vol.Schema( { vol.Optional( CONF_HOST, description={"suggested_value": previous_input.get(CONF_HOST)}, ): str, vol.Required( CONF_PORT, default=previous_input.get(CONF_PORT, DEFAULT_PORT) ): int, vol.Required( CONF_SSL, default=previous_input.get(CONF_SSL, DEFAULT_SSL) ): bool, vol.Required( CONF_VERIFY_SSL, default=previous_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL), ): bool, vol.Optional( CONF_TOKEN, description={"suggested_value": previous_input.get(CONF_TOKEN)}, ): str, } ) return self.async_show_form( step_id="manual_setup", data_schema=data_schema, errors=errors ) async def async_step_server_validate(self, server_config): """Validate a provided configuration.""" errors = {} self.current_login = server_config plex_server = PlexServer(self.hass, server_config) try: await self.hass.async_add_executor_job(plex_server.connect) except NoServersFound: _LOGGER.error("No servers linked to Plex account") errors["base"] = "no_servers" except (plexapi.exceptions.BadRequest, plexapi.exceptions.Unauthorized): _LOGGER.error("Invalid credentials provided, config not created") errors[CONF_TOKEN] = "faulty_credentials" except requests.exceptions.SSLError as error: _LOGGER.error("SSL certificate error: [%s]", error) errors["base"] = "ssl_error" except (plexapi.exceptions.NotFound, requests.exceptions.ConnectionError): server_identifier = ( server_config.get(CONF_URL) or plex_server.server_choice or "Unknown" ) _LOGGER.error("Plex server could not be reached: %s", server_identifier) errors[CONF_HOST] = "not_found" except ServerNotSpecified as available_servers: self.available_servers = available_servers.args[0] return await self.async_step_select_server() except Exception as error: # pylint: disable=broad-except _LOGGER.exception("Unknown error connecting to Plex server: %s", error) return self.async_abort(reason="unknown") if errors: if self._manual: return await self.async_step_manual_setup( user_input=server_config, errors=errors ) return await self.async_step_user(errors=errors) server_id = plex_server.machine_identifier url = plex_server.url_in_use token = server_config.get(CONF_TOKEN) entry_config = {CONF_URL: url} if self.client_id: entry_config[CONF_CLIENT_ID] = self.client_id if token: entry_config[CONF_TOKEN] = token if url.startswith("https"): entry_config[CONF_VERIFY_SSL] = server_config.get( CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL ) data = { CONF_SERVER: plex_server.friendly_name, CONF_SERVER_IDENTIFIER: server_id, PLEX_SERVER_CONFIG: entry_config, } entry = await self.async_set_unique_id(server_id) if ( self.context[CONF_SOURCE] # pylint: disable=no-member == config_entries.SOURCE_REAUTH ): self.hass.config_entries.async_update_entry(entry, data=data) _LOGGER.debug("Updated config entry for %s", plex_server.friendly_name) await self.hass.config_entries.async_reload(entry.entry_id) return self.async_abort(reason="reauth_successful") self._abort_if_unique_id_configured() _LOGGER.debug("Valid config created for %s", plex_server.friendly_name) return self.async_create_entry(title=plex_server.friendly_name, data=data) async def async_step_select_server(self, user_input=None): """Use selected Plex server.""" config = dict(self.current_login) if user_input is not None: config[CONF_SERVER] = user_input[CONF_SERVER] return await self.async_step_server_validate(config) configured = configured_servers(self.hass) available_servers = [ name for (name, server_id) in self.available_servers if server_id not in configured ] if not available_servers: return self.async_abort(reason="all_configured") if len(available_servers) == 1: config[CONF_SERVER] = available_servers[0] return await self.async_step_server_validate(config) return self.async_show_form( step_id="select_server", data_schema=vol.Schema( {vol.Required(CONF_SERVER): vol.In(available_servers)} ), errors={}, ) async def async_step_integration_discovery(self, discovery_info): """Handle GDM discovery.""" machine_identifier = discovery_info["data"]["Resource-Identifier"] await self.async_set_unique_id(machine_identifier) self._abort_if_unique_id_configured() host = f"{discovery_info['from'][0]}:{discovery_info['data']['Port']}" name = discovery_info["data"]["Name"] self.context["title_placeholders"] = { # pylint: disable=no-member "host": host, "name": name, } return await self.async_step_user() async def async_step_plex_website_auth(self): """Begin external auth flow on Plex website.""" self.hass.http.register_view(PlexAuthorizationCallbackView) payload = { "X-Plex-Device-Name": X_PLEX_DEVICE_NAME, "X-Plex-Version": X_PLEX_VERSION, "X-Plex-Product": X_PLEX_PRODUCT, "X-Plex-Device": self.hass.config.location_name, "X-Plex-Platform": X_PLEX_PLATFORM, "X-Plex-Model": "Plex OAuth", } session = async_get_clientsession(self.hass) self.plexauth = PlexAuth(payload, session) await self.plexauth.initiate_auth() forward_url = f"{get_url(self.hass)}{AUTH_CALLBACK_PATH}?flow_id={self.flow_id}" auth_url = self.plexauth.auth_url(forward_url) return self.async_external_step(step_id="obtain_token", url=auth_url) async def async_step_obtain_token(self, user_input=None): """Obtain token after external auth completed.""" token = await self.plexauth.token(10) if not token: return self.async_external_step_done(next_step_id="timed_out") self.token = token self.client_id = self.plexauth.client_identifier return self.async_external_step_done(next_step_id="use_external_token") async def async_step_timed_out(self, user_input=None): """Abort flow when time expires.""" return self.async_abort(reason="token_request_timeout") async def async_step_use_external_token(self, user_input=None): """Continue server validation with external token.""" server_config = {CONF_TOKEN: self.token} return await self.async_step_server_validate(server_config) async def async_step_reauth(self, data): """Handle a reauthorization flow request.""" self.current_login = dict(data) return await self.async_step_user() class PlexOptionsFlowHandler(config_entries.OptionsFlow): """Handle Plex options.""" def __init__(self, config_entry): """Initialize Plex options flow.""" self.options = copy.deepcopy(dict(config_entry.options)) self.server_id = config_entry.data[CONF_SERVER_IDENTIFIER] async def async_step_init(self, user_input=None): """Manage the Plex options.""" return await self.async_step_plex_mp_settings() async def async_step_plex_mp_settings(self, user_input=None): """Manage the Plex media_player options.""" plex_server = self.hass.data[DOMAIN][SERVERS][self.server_id] if user_input is not None: self.options[MP_DOMAIN][CONF_USE_EPISODE_ART] = user_input[ CONF_USE_EPISODE_ART ] self.options[MP_DOMAIN][CONF_IGNORE_NEW_SHARED_USERS] = user_input[ CONF_IGNORE_NEW_SHARED_USERS ] self.options[MP_DOMAIN][CONF_IGNORE_PLEX_WEB_CLIENTS] = user_input[ CONF_IGNORE_PLEX_WEB_CLIENTS ] account_data = { user: {"enabled": bool(user in user_input[CONF_MONITORED_USERS])} for user in plex_server.accounts } self.options[MP_DOMAIN][CONF_MONITORED_USERS] = account_data return self.async_create_entry(title="", data=self.options) available_accounts = {name: name for name in plex_server.accounts} available_accounts[plex_server.owner] += " [Owner]" default_accounts = plex_server.accounts known_accounts = set(plex_server.option_monitored_users) if known_accounts: default_accounts = { user for user in plex_server.option_monitored_users if plex_server.option_monitored_users[user]["enabled"] } for user in plex_server.accounts: if user not in known_accounts: available_accounts[user] += " [New]" if not plex_server.option_ignore_new_shared_users: for new_user in plex_server.accounts - known_accounts: default_accounts.add(new_user) return self.async_show_form( step_id="plex_mp_settings", data_schema=vol.Schema( { vol.Required( CONF_USE_EPISODE_ART, default=plex_server.option_use_episode_art, ): bool, vol.Optional( CONF_MONITORED_USERS, default=default_accounts ): cv.multi_select(available_accounts), vol.Required( CONF_IGNORE_NEW_SHARED_USERS, default=plex_server.option_ignore_new_shared_users, ): bool, vol.Required( CONF_IGNORE_PLEX_WEB_CLIENTS, default=plex_server.option_ignore_plexweb_clients, ): bool, } ), ) class PlexAuthorizationCallbackView(HomeAssistantView): """Handle callback from external auth.""" url = AUTH_CALLBACK_PATH name = AUTH_CALLBACK_NAME requires_auth = False async def get(self, request): """Receive authorization confirmation.""" hass = request.app["hass"] await hass.config_entries.flow.async_configure( flow_id=request.query["flow_id"], user_input=None ) return web_response.Response( headers={"content-type": "text/html"}, text="<script>window.close()</script>Success! This window can be closed", )
from datetime import timedelta import logging from opensensemap_api import OpenSenseMap from opensensemap_api.exceptions import OpenSenseMapError import voluptuous as vol from homeassistant.components.air_quality import PLATFORM_SCHEMA, AirQualityEntity from homeassistant.const import CONF_NAME from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by openSenseMap" CONF_STATION_ID = "station_id" SCAN_INTERVAL = timedelta(minutes=10) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_STATION_ID): cv.string, vol.Optional(CONF_NAME): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the openSenseMap air quality platform.""" name = config.get(CONF_NAME) station_id = config[CONF_STATION_ID] session = async_get_clientsession(hass) osm_api = OpenSenseMapData(OpenSenseMap(station_id, hass.loop, session)) await osm_api.async_update() if "name" not in osm_api.api.data: _LOGGER.error("Station %s is not available", station_id) raise PlatformNotReady station_name = osm_api.api.data["name"] if name is None else name async_add_entities([OpenSenseMapQuality(station_name, osm_api)], True) class OpenSenseMapQuality(AirQualityEntity): """Implementation of an openSenseMap air quality entity.""" def __init__(self, name, osm): """Initialize the air quality entity.""" self._name = name self._osm = osm @property def name(self): """Return the name of the air quality entity.""" return self._name @property def particulate_matter_2_5(self): """Return the particulate matter 2.5 level.""" return self._osm.api.pm2_5 @property def particulate_matter_10(self): """Return the particulate matter 10 level.""" return self._osm.api.pm10 @property def attribution(self): """Return the attribution.""" return ATTRIBUTION async def async_update(self): """Get the latest data from the openSenseMap API.""" await self._osm.async_update() class OpenSenseMapData: """Get the latest data and update the states.""" def __init__(self, api): """Initialize the data object.""" self.api = api @Throttle(SCAN_INTERVAL) async def async_update(self): """Get the latest data from the Pi-hole.""" try: await self.api.get_data() except OpenSenseMapError as err: _LOGGER.error("Unable to fetch data: %s", err)
from datetime import timedelta import logging from typing import Callable, List, Optional from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_NAME, DATA_MEGABYTES, DATA_RATE_MEGABYTES_PER_SECOND, DEVICE_CLASS_TIMESTAMP, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util.dt import utcnow from . import NZBGetEntity from .const import DATA_COORDINATOR, DOMAIN from .coordinator import NZBGetDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) SENSOR_TYPES = { "article_cache": ["ArticleCacheMB", "Article Cache", DATA_MEGABYTES], "average_download_rate": [ "AverageDownloadRate", "Average Speed", DATA_RATE_MEGABYTES_PER_SECOND, ], "download_paused": ["DownloadPaused", "Download Paused", None], "download_rate": ["DownloadRate", "Speed", DATA_RATE_MEGABYTES_PER_SECOND], "download_size": ["DownloadedSizeMB", "Size", DATA_MEGABYTES], "free_disk_space": ["FreeDiskSpaceMB", "Disk Free", DATA_MEGABYTES], "post_job_count": ["PostJobCount", "Post Processing Jobs", "Jobs"], "post_paused": ["PostPaused", "Post Processing Paused", None], "remaining_size": ["RemainingSizeMB", "Queue Size", DATA_MEGABYTES], "uptime": ["UpTimeSec", "Uptime", None], } async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up NZBGet sensor based on a config entry.""" coordinator: NZBGetDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id][ DATA_COORDINATOR ] sensors = [] for sensor_config in SENSOR_TYPES.values(): sensors.append( NZBGetSensor( coordinator, entry.entry_id, entry.data[CONF_NAME], sensor_config[0], sensor_config[1], sensor_config[2], ) ) async_add_entities(sensors) class NZBGetSensor(NZBGetEntity): """Representation of a NZBGet sensor.""" def __init__( self, coordinator: NZBGetDataUpdateCoordinator, entry_id: str, entry_name: str, sensor_type: str, sensor_name: str, unit_of_measurement: Optional[str] = None, ): """Initialize a new NZBGet sensor.""" self._sensor_type = sensor_type self._unique_id = f"{entry_id}_{sensor_type}" self._unit_of_measurement = unit_of_measurement super().__init__( coordinator=coordinator, entry_id=entry_id, name=f"{entry_name} {sensor_name}", ) @property def device_class(self): """Return the device class.""" if "UpTimeSec" in self._sensor_type: return DEVICE_CLASS_TIMESTAMP return None @property def unique_id(self) -> str: """Return the unique ID of the sensor.""" return self._unique_id @property def unit_of_measurement(self) -> str: """Return the unit that the state of sensor is expressed in.""" return self._unit_of_measurement @property def state(self): """Return the state of the sensor.""" value = self.coordinator.data["status"].get(self._sensor_type) if value is None: _LOGGER.warning("Unable to locate value for %s", self._sensor_type) return None if "DownloadRate" in self._sensor_type and value > 0: # Convert download rate from Bytes/s to MBytes/s return round(value / 2 ** 20, 2) if "UpTimeSec" in self._sensor_type and value > 0: uptime = utcnow() - timedelta(seconds=value) return uptime.replace(microsecond=0).isoformat() return value
import logging import os.path import discord import voluptuous as vol from homeassistant.components.notify import ( ATTR_DATA, ATTR_TARGET, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import CONF_TOKEN import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_TOKEN): cv.string}) ATTR_IMAGES = "images" def get_service(hass, config, discovery_info=None): """Get the Discord notification service.""" token = config[CONF_TOKEN] return DiscordNotificationService(hass, token) class DiscordNotificationService(BaseNotificationService): """Implement the notification service for Discord.""" def __init__(self, hass, token): """Initialize the service.""" self.token = token self.hass = hass def file_exists(self, filename): """Check if a file exists on disk and is in authorized path.""" if not self.hass.config.is_allowed_path(filename): return False return os.path.isfile(filename) async def async_send_message(self, message, **kwargs): """Login to Discord, send message to channel(s) and log out.""" discord.VoiceClient.warn_nacl = False discord_bot = discord.Client() images = None if ATTR_TARGET not in kwargs: _LOGGER.error("No target specified") return None data = kwargs.get(ATTR_DATA) or {} if ATTR_IMAGES in data: images = [] for image in data.get(ATTR_IMAGES): image_exists = await self.hass.async_add_executor_job( self.file_exists, image ) if image_exists: images.append(image) else: _LOGGER.warning("Image not found: %s", image) # pylint: disable=unused-variable @discord_bot.event async def on_ready(): """Send the messages when the bot is ready.""" try: for channelid in kwargs[ATTR_TARGET]: channelid = int(channelid) channel = discord_bot.get_channel( channelid ) or discord_bot.get_user(channelid) if channel is None: _LOGGER.warning("Channel not found for id: %s", channelid) continue # Must create new instances of File for each channel. files = None if images: files = [] for image in images: files.append(discord.File(image)) await channel.send(message, files=files) except (discord.errors.HTTPException, discord.errors.NotFound) as error: _LOGGER.warning("Communication error: %s", error) await discord_bot.logout() await discord_bot.close() # Using reconnect=False prevents multiple ready events to be fired. await discord_bot.start(self.token, reconnect=False)
import asyncio import logging from typing import Any import aiohttp from aiohttp.hdrs import AUTHORIZATION import async_timeout import voluptuous as vol from homeassistant.components.scene import Scene from homeassistant.const import ( CONF_PLATFORM, CONF_TIMEOUT, CONF_TOKEN, HTTP_OK, HTTP_UNAUTHORIZED, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_TIMEOUT = 10 PLATFORM_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): "lifx_cloud", vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the scenes stored in the LIFX Cloud.""" token = config.get(CONF_TOKEN) timeout = config.get(CONF_TIMEOUT) headers = {AUTHORIZATION: f"Bearer {token}"} url = "https://api.lifx.com/v1/scenes" try: httpsession = async_get_clientsession(hass) with async_timeout.timeout(timeout): scenes_resp = await httpsession.get(url, headers=headers) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.exception("Error on %s", url) return False status = scenes_resp.status if status == HTTP_OK: data = await scenes_resp.json() devices = [LifxCloudScene(hass, headers, timeout, scene) for scene in data] async_add_entities(devices) return True if status == HTTP_UNAUTHORIZED: _LOGGER.error("Unauthorized (bad token?) on %s", url) return False _LOGGER.error("HTTP error %d on %s", scenes_resp.status, url) return False class LifxCloudScene(Scene): """Representation of a LIFX Cloud scene.""" def __init__(self, hass, headers, timeout, scene_data): """Initialize the scene.""" self.hass = hass self._headers = headers self._timeout = timeout self._name = scene_data["name"] self._uuid = scene_data["uuid"] @property def name(self): """Return the name of the scene.""" return self._name async def async_activate(self, **kwargs: Any) -> None: """Activate the scene.""" url = f"https://api.lifx.com/v1/scenes/scene_id:{self._uuid}/activate" try: httpsession = async_get_clientsession(self.hass) with async_timeout.timeout(self._timeout): await httpsession.put(url, headers=self._headers) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.exception("Error on %s", url)
from unittest import mock from nikola import Nikola from nikola.plugins.misc.taxonomies_classifier import TaxonomiesClassifier from nikola.plugins.task.authors import ClassifyAuthors from nikola.plugins.task.categories import ClassifyCategories from nikola.plugins.task.tags import ClassifyTags from nikola.plugins.task.taxonomies import RenderTaxonomies import pytest @pytest.fixture(params=[ClassifyAuthors, ClassifyCategories, ClassifyTags], ids=["authors", "categories", "tags"]) def taxonomy(request): return request.param() @pytest.fixture(params=[ "base:", "base:blog", "base:path/with/trailing/slash/", "base:/path/with/leading/slash", "index:tags.html", "index:blog/tags.html", "index:path/to/tags.html", "index:/path/with/leading/slash.html", ]) def path(request): return request.param @pytest.fixture def fixture(taxonomy, path): scheme, _, path = path.partition(':') append_index = scheme == 'base' if isinstance(taxonomy, ClassifyAuthors) and append_index: site = Nikola(TRANSLATIONS={"en": ""}, AUTHOR_PATH=path) elif isinstance(taxonomy, ClassifyAuthors) and not append_index: pytest.skip("There is no AUTHORS_INDEX_PATH setting") elif isinstance(taxonomy, ClassifyCategories) and append_index: site = Nikola(TRANSLATIONS={"en": ""}, CATEGORY_PATH=path) elif isinstance(taxonomy, ClassifyCategories) and not append_index: site = Nikola(TRANSLATIONS={"en": ""}, CATEGORIES_INDEX_PATH=path) elif isinstance(taxonomy, ClassifyTags) and append_index: site = Nikola(TRANSLATIONS={"en": ""}, TAG_PATH=path) elif isinstance(taxonomy, ClassifyTags) and not append_index: site = Nikola(TRANSLATIONS={"en": ""}, TAGS_INDEX_PATH=path) else: raise TypeError("Unknown taxonomy %r" % type(taxonomy)) site._template_system = mock.MagicMock() site._template_system.template_deps.return_value = [] site._template_system.name = "dummy" site.hierarchy_per_classification = {taxonomy.classification_name: {"en": []}} site.posts_per_classification = {taxonomy.classification_name: {"en": {}}} site.taxonomy_plugins = {taxonomy.classification_name: taxonomy} taxonomy.set_site(site) classifier = TaxonomiesClassifier() classifier.set_site(site) expected = path.strip("/") if append_index: expected += "/" if not expected.startswith("/"): expected = "/" + expected return site, classifier, taxonomy, append_index, expected def test_render_taxonomies_permalink(fixture): # Arrange site, _, taxonomy, _, expected = fixture renderer = RenderTaxonomies() renderer.set_site(site) # Act tasks = list(renderer._generate_classification_overview(taxonomy, "en")) # Assert action, args = tasks[0]["actions"][0] context = args[2] assert context["permalink"] == expected def test_taxonomy_index_path_helper(fixture): # Arrange site, _, taxonomy, _, expected = fixture # Act path = site.path(taxonomy.classification_name + "_index", "name", "en", is_link=True) # Assert assert path == expected def test_taxonomy_classifier_index_path(fixture): # Arrange site, classifier, taxonomy, append_index, expected = fixture if append_index: expected += "index.html" # Act path = classifier._taxonomy_index_path("name", "en", taxonomy) # Assert assert path == [x for x in expected.split('/') if x] def test_taxonomy_overview_path(fixture): # Arrange _, _, taxonomy, append_index, expected = fixture # Act result = taxonomy.get_overview_path("en") # Assert assert result == ([x for x in expected.split('/') if x], "always" if append_index else "never")
import asyncio from datetime import timedelta from functools import partial import logging import re import sys from typing import Any, Dict from icmplib import SocketPermissionError, ping as icmp_ping import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASS_CONNECTIVITY, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.reload import setup_reload_service from . import DOMAIN, PLATFORMS, async_get_next_ping_id from .const import PING_TIMEOUT _LOGGER = logging.getLogger(__name__) ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" CONF_PING_COUNT = "count" DEFAULT_NAME = "Ping" DEFAULT_PING_COUNT = 5 SCAN_INTERVAL = timedelta(minutes=5) PARALLEL_UPDATES = 0 PING_MATCHER = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)" ) PING_MATCHER_BUSYBOX = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)" ) WIN32_PING_MATCHER = re.compile(r"(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms") PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_PING_COUNT, default=DEFAULT_PING_COUNT): vol.Range( min=1, max=100 ), } ) def setup_platform(hass, config, add_entities, discovery_info=None) -> None: """Set up the Ping Binary sensor.""" setup_reload_service(hass, DOMAIN, PLATFORMS) host = config[CONF_HOST] count = config[CONF_PING_COUNT] name = config.get(CONF_NAME, f"{DEFAULT_NAME} {host}") try: # Verify we can create a raw socket, or # fallback to using a subprocess icmp_ping("127.0.0.1", count=0, timeout=0) ping_cls = PingDataICMPLib except SocketPermissionError: ping_cls = PingDataSubProcess ping_data = ping_cls(hass, host, count) add_entities([PingBinarySensor(name, ping_data)], True) class PingBinarySensor(BinarySensorEntity): """Representation of a Ping Binary sensor.""" def __init__(self, name: str, ping) -> None: """Initialize the Ping Binary sensor.""" self._name = name self._ping = ping @property def name(self) -> str: """Return the name of the device.""" return self._name @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_CONNECTIVITY @property def is_on(self) -> bool: """Return true if the binary sensor is on.""" return self._ping.available @property def device_state_attributes(self) -> Dict[str, Any]: """Return the state attributes of the ICMP checo request.""" if self._ping.data is not False: return { ATTR_ROUND_TRIP_TIME_AVG: self._ping.data["avg"], ATTR_ROUND_TRIP_TIME_MAX: self._ping.data["max"], ATTR_ROUND_TRIP_TIME_MDEV: self._ping.data["mdev"], ATTR_ROUND_TRIP_TIME_MIN: self._ping.data["min"], } async def async_update(self) -> None: """Get the latest data.""" await self._ping.async_update() class PingData: """The base class for handling the data retrieval.""" def __init__(self, hass, host, count) -> None: """Initialize the data object.""" self.hass = hass self._ip_address = host self._count = count self.data = {} self.available = False class PingDataICMPLib(PingData): """The Class for handling the data retrieval using icmplib.""" async def async_update(self) -> None: """Retrieve the latest details from the host.""" _LOGGER.debug("ping address: %s", self._ip_address) data = await self.hass.async_add_executor_job( partial( icmp_ping, self._ip_address, count=self._count, timeout=1, id=async_get_next_ping_id(self.hass), ) ) self.available = data.is_alive if not self.available: self.data = False return self.data = { "min": data.min_rtt, "max": data.max_rtt, "avg": data.avg_rtt, "mdev": "", } class PingDataSubProcess(PingData): """The Class for handling the data retrieval using the ping binary.""" def __init__(self, hass, host, count) -> None: """Initialize the data object.""" super().__init__(hass, host, count) if sys.platform == "win32": self._ping_cmd = [ "ping", "-n", str(self._count), "-w", "1000", self._ip_address, ] else: self._ping_cmd = [ "ping", "-n", "-q", "-c", str(self._count), "-W1", self._ip_address, ] async def async_ping(self): """Send ICMP echo request and return details if success.""" pinger = await asyncio.create_subprocess_exec( *self._ping_cmd, stdin=None, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, ) try: out_data, out_error = await asyncio.wait_for( pinger.communicate(), self._count + PING_TIMEOUT ) if out_data: _LOGGER.debug( "Output of command: `%s`, return code: %s:\n%s", " ".join(self._ping_cmd), pinger.returncode, out_data, ) if out_error: _LOGGER.debug( "Error of command: `%s`, return code: %s:\n%s", " ".join(self._ping_cmd), pinger.returncode, out_error, ) if pinger.returncode > 1: # returncode of 1 means the host is unreachable _LOGGER.exception( "Error running command: `%s`, return code: %s", " ".join(self._ping_cmd), pinger.returncode, ) if sys.platform == "win32": match = WIN32_PING_MATCHER.search(str(out_data).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} if "max/" not in str(out_data): match = PING_MATCHER_BUSYBOX.search(str(out_data).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} match = PING_MATCHER.search(str(out_data).split("\n")[-1]) rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": rtt_mdev} except asyncio.TimeoutError: _LOGGER.exception( "Timed out running command: `%s`, after: %ss", self._ping_cmd, self._count + PING_TIMEOUT, ) if pinger: try: await pinger.kill() except TypeError: pass del pinger return False except AttributeError: return False async def async_update(self) -> None: """Retrieve the latest details from the host.""" self.data = await self.async_ping() self.available = bool(self.data)
import numpy as np import functools from tensornetwork.network_components import Node, contract_between from tensornetwork.backends import backend_factory from tensornetwork.linalg.node_linalg import conj from typing import Any, List, Optional, Text, Type, Union, Dict, Sequence from tensornetwork.matrixproductstates.base_mps import BaseMPS from tensornetwork.backends.abstract_backend import AbstractBackend Tensor = Any class FiniteMPS(BaseMPS): """An MPS class for finite systems. MPS tensors are stored as a list. `FiniteMPS` has a central site, also called orthogonality center. The position of this central site is stored in `FiniteMPS.center_position`, and it can be be shifted using the `FiniteMPS.position` method. `FiniteMPS.position` uses QR and RQ methods to shift `center_position`. `FiniteMPS` can be initialized either from a `list` of tensors, or by calling the classmethod `FiniteMPS.random`. By default, `FiniteMPS` is initialized in *canonical* form, i.e. the state is normalized, and all tensors to the left of `center_position` are left orthogonal, and all tensors to the right of `center_position` are right orthogonal. The tensor at `FiniteMPS.center_position` is neither left nor right orthogonal. Note that canonicalization can be computationally relatively costly and scales :math:`\\propto ND^3`. """ def __init__(self, tensors: List[Tensor], center_position: Optional[int] = None, canonicalize: Optional[bool] = True, backend: Optional[Union[AbstractBackend, Text]] = None) -> None: """Initialize a `FiniteMPS`. If `canonicalize` is `True` the state is brought into canonical form, with `BaseMPS.center_position` at `center_position`. if `center_position` is `None` and `canonicalize = True`, `BaseMPS.center_position` is set to 0. Args: tensors: A list of `Tensor` objects. center_position: The initial position of the center site. canonicalize: If `True` the mps is canonicalized at initialization. backend: The name of the backend that should be used to perform contractions. Available backends are currently 'numpy', 'tensorflow', 'pytorch', 'jax' """ super().__init__( tensors=tensors, center_position=center_position, connector_matrix=None, backend=backend) if (center_position is not None) and (not canonicalize): raise ValueError("can only set center_position of canonical mps") if canonicalize: if center_position is None: center_position = 0 if center_position == len(self) - 1: self.center_position = len(self) - 1 self.position(0) self.position(len(self) - 1) else: self.center_position = 0 self.position(len(self) - 1) self.position(center_position) @classmethod def random( cls, d: List[int], D: List[int], dtype: Type[np.number], canonicalize: bool = True, backend: Optional[Union[AbstractBackend, Text]] = None) -> "FiniteMPS": """Initialize a random `FiniteMPS`. The resulting state is normalized. Its center-position is at 0. Args: d: A list of physical dimensions. D: A list of bond dimensions. dtype: A numpy dtype. backend: An optional backend. Returns: `FiniteMPS` """ #use numpy backend for tensor initialization be = backend_factory.get_backend('numpy') if len(D) != len(d) - 1: raise ValueError('len(D) = {} is different from len(d) - 1 = {}'.format( len(D), len(d) - 1)) D = [1] + D + [1] tensors = [ be.randn((D[n], d[n], D[n + 1]), dtype=dtype) for n in range(len(d)) ] return cls( tensors=tensors, center_position=None, canonicalize=canonicalize, backend=backend) # pylint: disable=arguments-differ def canonicalize(self, normalize: bool = True) -> np.number: """Bring the MPS into canonical form according to `center_position`. If `center_position` is `None`, the MPS is canonicalized with `center_position = 0`. Args: normalize: If `True`, normalize matrices when shifting the orthogonality center. Returns: `Tensor`: The norm of the MPS. """ N = len(self.tensors) if self.center_position is not None: pos = self.center_position if pos >= N // 2: self.center_position = 0 self.position(N - 1, normalize=normalize) else: self.center_position = len(self.tensors) - 1 self.position(0, normalize=normalize) return self.position(pos, normalize=normalize) self.center_position = len(self.tensors) - 1 return self.position(0, normalize=normalize) def check_canonical(self) -> Tensor: """Check whether the MPS is in the expected canonical form. Returns: The L2 norm of the vector of local deviations. """ if self.center_position is None: raise ValueError( "FiniteMPS.center_positions is `None`. Cannot check canonical form.") deviations = [] for site in range(len(self.tensors)): if site < self.center_position: deviation = self.check_orthonormality('l', site) elif site > self.center_position: deviation = self.check_orthonormality('r', site) else: continue deviations.append(deviation**2) return self.backend.sqrt(sum(deviations[1:], deviations[0])) def left_envs(self, sites: Sequence[int]) -> Dict: """Compute left reduced density matrices for site `sites`. This returns a dict `left_envs` mapping sites (int) to Tensors. `left_envs[site]` is the left-reduced density matrix to the left of site `site`. Args: sites (list of int): A list of sites of the MPS. Returns: `dict` mapping `int` to `Tensor`: The left-reduced density matrices at each site in `sites`. """ sites = np.array(sites) #enable logical indexing if len(sites) == 0: return {} if self.center_position is not None: center_position = self.center_position else: center_position = 0 n2 = np.max(sites) #check if all elements of `sites` are within allowed range if not np.all(sites <= len(self)): raise ValueError('all elements of `sites` have to be <= N = {}'.format( len(self))) if not np.all(sites >= 0): raise ValueError('all elements of `sites` have to be positive') # left-reduced density matrices to the left of `center_position` # (including center_position) are all identities left_sites = sites[sites <= center_position] left_envs = {} for site in left_sites: left_envs[site] = Node( self.backend.eye( N=self.backend.sparse_shape( self.backend.conj(self.tensors[site]))[0], dtype=self.dtype), backend=self.backend) # left reduced density matrices at sites > center_position # have to be calculated from a network contraction if n2 > center_position: nodes = {} conj_nodes = {} for site in range(center_position, n2): nodes[site] = Node(self.tensors[site], backend=self.backend) conj_nodes[site] = conj(nodes[site]) nodes[center_position][0] ^ conj_nodes[center_position][0] nodes[center_position][1] ^ conj_nodes[center_position][1] for site in range(center_position + 1, n2): nodes[site][0] ^ nodes[site - 1][2] conj_nodes[site][0] ^ conj_nodes[site - 1][2] nodes[site][1] ^ conj_nodes[site][1] edges = {site: node[2] for site, node in nodes.items()} conj_edges = {site: node[2] for site, node in conj_nodes.items()} left_env = contract_between(nodes[center_position], conj_nodes[center_position]) left_env.reorder_edges( [edges[center_position], conj_edges[center_position]]) if center_position + 1 in sites: left_envs[center_position + 1] = left_env for site in range(center_position + 1, n2): left_env = contract_between(left_env, nodes[site]) left_env = contract_between(left_env, conj_nodes[site]) if site + 1 in sites: left_env.reorder_edges([edges[site], conj_edges[site]]) left_envs[site + 1] = left_env return {k: v.tensor for k, v in left_envs.items()} def right_envs(self, sites: Sequence[int]) -> Dict: """Compute right reduced density matrices for site `sites. This returns a dict `right_envs` mapping sites (int) to Tensors. `right_envs[site]` is the right-reduced density matrix to the right of site `site`. Args: sites (list of int): A list of sites of the MPS. Returns: `dict` mapping `int` to `Tensor`: The right-reduced density matrices at each site in `sites`. """ sites = np.array(sites) if len(sites) == 0: return {} if self.center_position is not None: center_position = self.center_position else: center_position = len(self.tensors) - 1 n1 = np.min(sites) #check if all elements of `sites` are within allowed range if not np.all(sites < len(self)): raise ValueError('all elements of `sites` have to be < N = {}'.format( len(self))) if not np.all(sites >= -1): raise ValueError('all elements of `sites` have to be >= -1') # right-reduced density matrices to the right of `center_position` # (including center_position) are all identities right_sites = sites[sites >= center_position] right_envs = {} for site in right_sites: right_envs[site] = Node( self.backend.eye( N=self.backend.sparse_shape( self.backend.conj(self.tensors[site]))[2], dtype=self.dtype), backend=self.backend) # right reduced density matrices at sites < center_position # have to be calculated from a network contraction if n1 < center_position: nodes = {} conj_nodes = {} for site in reversed(range(n1 + 1, center_position + 1)): nodes[site] = Node(self.tensors[site], backend=self.backend) conj_nodes[site] = conj(nodes[site]) nodes[center_position][2] ^ conj_nodes[center_position][2] nodes[center_position][1] ^ conj_nodes[center_position][1] for site in reversed(range(n1 + 1, center_position)): nodes[site][2] ^ nodes[site + 1][0] conj_nodes[site][2] ^ conj_nodes[site + 1][0] nodes[site][1] ^ conj_nodes[site][1] edges = {site: node[0] for site, node in nodes.items()} conj_edges = {site: node[0] for site, node in conj_nodes.items()} right_env = contract_between(nodes[center_position], conj_nodes[center_position]) if center_position - 1 in sites: right_env.reorder_edges( [edges[center_position], conj_edges[center_position]]) right_envs[center_position - 1] = right_env for site in reversed(range(n1 + 1, center_position)): right_env = contract_between(right_env, nodes[site]) right_env = contract_between(right_env, conj_nodes[site]) if site - 1 in sites: right_env.reorder_edges([edges[site], conj_edges[site]]) right_envs[site - 1] = right_env return {k: v.tensor for k, v in right_envs.items()} def save(self, path: str): raise NotImplementedError()
from itertools import chain import logging from homeassistant.components.binary_sensor import ( DEVICE_CLASS_CONNECTIVITY, DEVICE_CLASS_MOTION, DEVICE_CLASS_OCCUPANCY, DEVICE_CLASS_SOUND, BinarySensorEntity, ) from homeassistant.const import CONF_MONITORED_CONDITIONS from . import CONF_BINARY_SENSORS, DATA_NEST, DATA_NEST_CONFIG, NestSensorDevice _LOGGER = logging.getLogger(__name__) BINARY_TYPES = {"online": DEVICE_CLASS_CONNECTIVITY} CLIMATE_BINARY_TYPES = { "fan": None, "is_using_emergency_heat": "heat", "is_locked": None, "has_leaf": None, } CAMERA_BINARY_TYPES = { "motion_detected": DEVICE_CLASS_MOTION, "sound_detected": DEVICE_CLASS_SOUND, "person_detected": DEVICE_CLASS_OCCUPANCY, } STRUCTURE_BINARY_TYPES = {"away": None} STRUCTURE_BINARY_STATE_MAP = {"away": {"away": True, "home": False}} _BINARY_TYPES_DEPRECATED = [ "hvac_ac_state", "hvac_aux_heater_state", "hvac_heater_state", "hvac_heat_x2_state", "hvac_heat_x3_state", "hvac_alt_heat_state", "hvac_alt_heat_x2_state", "hvac_emer_heat_state", ] _VALID_BINARY_SENSOR_TYPES = { **BINARY_TYPES, **CLIMATE_BINARY_TYPES, **CAMERA_BINARY_TYPES, **STRUCTURE_BINARY_TYPES, } def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Nest binary sensors. No longer used. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up a Nest binary sensor based on a config entry.""" nest = hass.data[DATA_NEST] discovery_info = hass.data.get(DATA_NEST_CONFIG, {}).get(CONF_BINARY_SENSORS, {}) # Add all available binary sensors if no Nest binary sensor config is set if discovery_info == {}: conditions = _VALID_BINARY_SENSOR_TYPES else: conditions = discovery_info.get(CONF_MONITORED_CONDITIONS, {}) for variable in conditions: if variable in _BINARY_TYPES_DEPRECATED: wstr = ( f"{variable} is no a longer supported " "monitored_conditions. See " "https://www.home-assistant.io/integrations/binary_sensor.nest/ " "for valid options." ) _LOGGER.error(wstr) def get_binary_sensors(): """Get the Nest binary sensors.""" sensors = [] for structure in nest.structures(): sensors += [ NestBinarySensor(structure, None, variable) for variable in conditions if variable in STRUCTURE_BINARY_TYPES ] device_chain = chain(nest.thermostats(), nest.smoke_co_alarms(), nest.cameras()) for structure, device in device_chain: sensors += [ NestBinarySensor(structure, device, variable) for variable in conditions if variable in BINARY_TYPES ] sensors += [ NestBinarySensor(structure, device, variable) for variable in conditions if variable in CLIMATE_BINARY_TYPES and device.is_thermostat ] if device.is_camera: sensors += [ NestBinarySensor(structure, device, variable) for variable in conditions if variable in CAMERA_BINARY_TYPES ] for activity_zone in device.activity_zones: sensors += [ NestActivityZoneSensor(structure, device, activity_zone) ] return sensors async_add_entities(await hass.async_add_executor_job(get_binary_sensors), True) class NestBinarySensor(NestSensorDevice, BinarySensorEntity): """Represents a Nest binary sensor.""" @property def is_on(self): """Return true if the binary sensor is on.""" return self._state @property def device_class(self): """Return the device class of the binary sensor.""" return _VALID_BINARY_SENSOR_TYPES.get(self.variable) def update(self): """Retrieve latest state.""" value = getattr(self.device, self.variable) if self.variable in STRUCTURE_BINARY_TYPES: self._state = bool(STRUCTURE_BINARY_STATE_MAP[self.variable].get(value)) else: self._state = bool(value) class NestActivityZoneSensor(NestBinarySensor): """Represents a Nest binary sensor for activity in a zone.""" def __init__(self, structure, device, zone): """Initialize the sensor.""" super().__init__(structure, device, "") self.zone = zone self._name = f"{self._name} {self.zone.name} activity" @property def unique_id(self): """Return unique id based on camera serial and zone id.""" return f"{self.device.serial}-{self.zone.zone_id}" @property def device_class(self): """Return the device class of the binary sensor.""" return DEVICE_CLASS_MOTION def update(self): """Retrieve latest state.""" self._state = self.device.has_ongoing_motion_in_zone(self.zone.zone_id)
import datetime import io import os import urllib.robotparser as robotparser from urllib.parse import urljoin, urlparse import dateutil.tz from nikola.plugin_categories import LateTask from nikola.utils import apply_filters, config_changed, encodelink urlset_header = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"> """ loc_format = """ <url> <loc>{0}</loc> <lastmod>{1}</lastmod>{2} </url> """ urlset_footer = "</urlset>" sitemapindex_header = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"> """ sitemap_format = """ <sitemap> <loc>{0}</loc> <lastmod>{1}</lastmod> </sitemap> """ alternates_format = """\n <xhtml:link rel="alternate" hreflang="{0}" href="{1}" />""" sitemapindex_footer = "</sitemapindex>" def get_base_path(base): """Return the path of a base URL if it contains one. >>> get_base_path('http://some.site') == '/' True >>> get_base_path('http://some.site/') == '/' True >>> get_base_path('http://some.site/some/sub-path') == '/some/sub-path/' True >>> get_base_path('http://some.site/some/sub-path/') == '/some/sub-path/' True """ # first parse the base_url for some path base_parsed = urlparse(base) if not base_parsed.path: sub_path = '' else: sub_path = base_parsed.path if sub_path.endswith('/'): return sub_path else: return sub_path + '/' class Sitemap(LateTask): """Generate a sitemap.""" name = "sitemap" def gen_tasks(self): """Generate a sitemap.""" kw = { "base_url": self.site.config["BASE_URL"], "site_url": self.site.config["SITE_URL"], "output_folder": self.site.config["OUTPUT_FOLDER"], "strip_indexes": self.site.config["STRIP_INDEXES"], "index_file": self.site.config["INDEX_FILE"], "mapped_extensions": self.site.config.get('MAPPED_EXTENSIONS', ['.atom', '.html', '.htm', '.php', '.xml', '.rss']), "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"], "filters": self.site.config["FILTERS"], "translations": self.site.config["TRANSLATIONS"], "tzinfo": self.site.config['__tzinfo__'], "sitemap_plugin_revision": 1, } output = kw['output_folder'] base_url = kw['base_url'] mapped_exts = kw['mapped_extensions'] output_path = kw['output_folder'] sitemapindex_path = os.path.join(output_path, "sitemapindex.xml") sitemap_path = os.path.join(output_path, "sitemap.xml") base_path = get_base_path(kw['base_url']) sitemapindex = {} urlset = {} def scan_locs(): """Scan site locations.""" for root, dirs, files in os.walk(output, followlinks=True): if not dirs and not files: continue # Totally empty, not on sitemap path = os.path.relpath(root, output) # ignore the current directory. if path == '.': path = syspath = '' else: syspath = path + os.sep path = path.replace(os.sep, '/') + '/' lastmod = self.get_lastmod(root) loc = urljoin(base_url, base_path + path) if kw['index_file'] in files and kw['strip_indexes']: # ignore folders when not stripping urls post = self.site.post_per_file.get(syspath + kw['index_file']) if post and (post.is_draft or post.is_private or post.publish_later): continue alternates = [] if post: for lang in post.translated_to: alt_url = post.permalink(lang=lang, absolute=True) if encodelink(loc) == alt_url: continue alternates.append(alternates_format.format(lang, alt_url)) urlset[loc] = loc_format.format(encodelink(loc), lastmod, ''.join(alternates)) for fname in files: if kw['strip_indexes'] and fname == kw['index_file']: continue # We already mapped the folder if os.path.splitext(fname)[-1] in mapped_exts: real_path = os.path.join(root, fname) path = syspath = os.path.relpath(real_path, output) if path.endswith(kw['index_file']) and kw['strip_indexes']: # ignore index files when stripping urls continue if not robot_fetch(path): continue # read in binary mode to make ancient files work with open(real_path, 'rb') as fh: filehead = fh.read(1024) if path.endswith('.html') or path.endswith('.htm') or path.endswith('.php'): # Ignores "html" files without doctype if b'<!doctype html' not in filehead.lower(): continue # Ignores "html" files with noindex robot directives robots_directives = [b'<meta content=noindex name=robots', b'<meta content=none name=robots', b'<meta name=robots content=noindex', b'<meta name=robots content=none'] lowquothead = filehead.lower().decode('utf-8', 'ignore').replace('"', '').encode('utf-8') if any([robot_directive in lowquothead for robot_directive in robots_directives]): continue # put Atom and RSS in sitemapindex[] instead of in urlset[], # sitemap_path is included after it is generated if path.endswith('.xml') or path.endswith('.atom') or path.endswith('.rss'): known_elm_roots = (b'<feed', b'<rss', b'<urlset') if any([elm_root in filehead.lower() for elm_root in known_elm_roots]) and path != sitemap_path: path = path.replace(os.sep, '/') lastmod = self.get_lastmod(real_path) loc = urljoin(base_url, base_path + path) sitemapindex[loc] = sitemap_format.format(encodelink(loc), lastmod) continue else: continue # ignores all XML files except those presumed to be RSS post = self.site.post_per_file.get(syspath) if post and (post.is_draft or post.is_private or post.publish_later): continue path = path.replace(os.sep, '/') lastmod = self.get_lastmod(real_path) loc = urljoin(base_url, base_path + path) alternates = [] if post: for lang in post.translated_to: alt_url = post.permalink(lang=lang, absolute=True) if encodelink(loc) == alt_url: continue alternates.append(alternates_format.format(lang, alt_url)) urlset[loc] = loc_format.format(encodelink(loc), lastmod, '\n'.join(alternates)) def robot_fetch(path): """Check if robots can fetch a file.""" for rule in kw["robots_exclusions"]: robot = robotparser.RobotFileParser() robot.parse(["User-Agent: *", "Disallow: {0}".format(rule)]) if not robot.can_fetch("*", '/' + path): return False # not robot food return True def write_sitemap(): """Write sitemap to file.""" # Have to rescan, because files may have been added between # task dep scanning and task execution with io.open(sitemap_path, 'w+', encoding='utf8') as outf: outf.write(urlset_header) for k in sorted(urlset.keys()): outf.write(urlset[k]) outf.write(urlset_footer) sitemap_url = urljoin(base_url, base_path + "sitemap.xml") sitemapindex[sitemap_url] = sitemap_format.format(sitemap_url, self.get_lastmod(sitemap_path)) def write_sitemapindex(): """Write sitemap index.""" with io.open(sitemapindex_path, 'w+', encoding='utf8') as outf: outf.write(sitemapindex_header) for k in sorted(sitemapindex.keys()): outf.write(sitemapindex[k]) outf.write(sitemapindex_footer) def scan_locs_task(): """Yield a task to calculate the dependencies of the sitemap. Other tasks can depend on this output, instead of having to scan locations. """ scan_locs() # Generate a list of file dependencies for the actual generation # task, so rebuilds are triggered. (Issue #1032) output = kw["output_folder"] file_dep = [] for i in urlset.keys(): p = os.path.join(output, urlparse(i).path.replace(base_path, '', 1)) if not p.endswith('sitemap.xml') and not os.path.isdir(p): file_dep.append(p) if os.path.isdir(p) and os.path.exists(os.path.join(p, 'index.html')): file_dep.append(p + 'index.html') for i in sitemapindex.keys(): p = os.path.join(output, urlparse(i).path.replace(base_path, '', 1)) if not p.endswith('sitemap.xml') and not os.path.isdir(p): file_dep.append(p) if os.path.isdir(p) and os.path.exists(os.path.join(p, 'index.html')): file_dep.append(p + 'index.html') return {'file_dep': file_dep} yield { "basename": "_scan_locs", "name": "sitemap", "actions": [(scan_locs_task)] } yield self.group_task() yield apply_filters({ "basename": "sitemap", "name": sitemap_path, "targets": [sitemap_path], "actions": [(write_sitemap,)], "uptodate": [config_changed(kw, 'nikola.plugins.task.sitemap:write')], "clean": True, "task_dep": ["render_site"], "calc_dep": ["_scan_locs:sitemap"], }, kw['filters']) yield apply_filters({ "basename": "sitemap", "name": sitemapindex_path, "targets": [sitemapindex_path], "actions": [(write_sitemapindex,)], "uptodate": [config_changed(kw, 'nikola.plugins.task.sitemap:write_index')], "clean": True, "file_dep": [sitemap_path] }, kw['filters']) def get_lastmod(self, p): """Get last modification date.""" if self.site.invariant: return '2038-01-01' else: # RFC 3339 (web ISO 8601 profile) represented in UTC with Zulu # zone desgignator as recommeded for sitemaps. Second and # microsecond precision is stripped for compatibility. lastmod = datetime.datetime.utcfromtimestamp(os.stat(p).st_mtime).replace(tzinfo=dateutil.tz.gettz('UTC'), second=0, microsecond=0).isoformat().replace('+00:00', 'Z') return lastmod if __name__ == '__main__': import doctest doctest.testmod()
import re # noqa: F401 import sys # noqa: F401 import nulltype # noqa: F401 from paasta_tools.paastaapi.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) def lazy_import(): from paasta_tools.paastaapi.model.marathon_mesos_nonrunning_task import MarathonMesosNonrunningTask from paasta_tools.paastaapi.model.marathon_mesos_running_task import MarathonMesosRunningTask globals()['MarathonMesosNonrunningTask'] = MarathonMesosNonrunningTask globals()['MarathonMesosRunningTask'] = MarathonMesosRunningTask class MarathonMesosStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { } validations = { } additional_properties_type = None _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'error_message': (str,), # noqa: E501 'non_running_tasks': ([MarathonMesosNonrunningTask],), # noqa: E501 'running_task_count': (int,), # noqa: E501 'running_tasks': ([MarathonMesosRunningTask],), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'error_message': 'error_message', # noqa: E501 'non_running_tasks': 'non_running_tasks', # noqa: E501 'running_task_count': 'running_task_count', # noqa: E501 'running_tasks': 'running_tasks', # noqa: E501 } _composed_schemas = {} required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 """MarathonMesosStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) error_message (str): Error message when Mesos tasks cannot be queried. [optional] # noqa: E501 non_running_tasks ([MarathonMesosNonrunningTask]): Non-running tasks associated to this service. [optional] # noqa: E501 running_task_count (int): The number of running Mesos tasks associated to this service. [optional] # noqa: E501 running_tasks ([MarathonMesosRunningTask]): Currently running tasks associated to this service. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value)
import asyncio import logging from goalzero import Yeti, exceptions from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from .const import DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN, MIN_TIME_BETWEEN_UPDATES _LOGGER = logging.getLogger(__name__) PLATFORMS = ["binary_sensor"] async def async_setup(hass: HomeAssistant, config): """Set up the Goal Zero Yeti component.""" hass.data[DOMAIN] = {} return True async def async_setup_entry(hass, entry): """Set up Goal Zero Yeti from a config entry.""" name = entry.data[CONF_NAME] host = entry.data[CONF_HOST] session = async_get_clientsession(hass) api = Yeti(host, hass.loop, session) try: await api.get_state() except exceptions.ConnectError as ex: _LOGGER.warning("Failed to connect: %s", ex) raise ConfigEntryNotReady from ex async def async_update_data(): """Fetch data from API endpoint.""" try: await api.get_state() except exceptions.ConnectError as err: raise UpdateFailed(f"Failed to communicating with API: {err}") from err coordinator = DataUpdateCoordinator( hass, _LOGGER, name=name, update_method=async_update_data, update_interval=MIN_TIME_BETWEEN_UPDATES, ) hass.data[DOMAIN][entry.entry_id] = { DATA_KEY_API: api, DATA_KEY_COORDINATOR: coordinator, } for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class YetiEntity(CoordinatorEntity): """Representation of a Goal Zero Yeti entity.""" def __init__(self, api, coordinator, name, server_unique_id): """Initialize a Goal Zero Yeti entity.""" super().__init__(coordinator) self.api = api self._name = name self._server_unique_id = server_unique_id self._device_class = None @property def device_info(self): """Return the device information of the entity.""" return { "identifiers": {(DOMAIN, self._server_unique_id)}, "name": self._name, "manufacturer": "Goal Zero", } @property def device_class(self): """Return the class of this device.""" return self._device_class
import datetime import numpy as np import pandas as pd import pytest import xarray as xr from xarray.core.resample_cftime import CFTimeGrouper pytest.importorskip("cftime") # Create a list of pairs of similar-length initial and resample frequencies # that cover: # - Resampling from shorter to longer frequencies # - Resampling from longer to shorter frequencies # - Resampling from one initial frequency to another. # These are used to test the cftime version of resample against pandas # with a standard calendar. FREQS = [ ("8003D", "4001D"), ("8003D", "16006D"), ("8003D", "21AS"), ("6H", "3H"), ("6H", "12H"), ("6H", "400T"), ("3D", "D"), ("3D", "6D"), ("11D", "MS"), ("3MS", "MS"), ("3MS", "6MS"), ("3MS", "85D"), ("7M", "3M"), ("7M", "14M"), ("7M", "2QS-APR"), ("43QS-AUG", "21QS-AUG"), ("43QS-AUG", "86QS-AUG"), ("43QS-AUG", "11A-JUN"), ("11Q-JUN", "5Q-JUN"), ("11Q-JUN", "22Q-JUN"), ("11Q-JUN", "51MS"), ("3AS-MAR", "AS-MAR"), ("3AS-MAR", "6AS-MAR"), ("3AS-MAR", "14Q-FEB"), ("7A-MAY", "3A-MAY"), ("7A-MAY", "14A-MAY"), ("7A-MAY", "85M"), ] def da(index): return xr.DataArray( np.arange(100.0, 100.0 + index.size), coords=[index], dims=["time"] ) @pytest.mark.parametrize("freqs", FREQS, ids=lambda x: "{}->{}".format(*x)) @pytest.mark.parametrize("closed", [None, "left", "right"]) @pytest.mark.parametrize("label", [None, "left", "right"]) @pytest.mark.parametrize("base", [24, 31]) def test_resample(freqs, closed, label, base): initial_freq, resample_freq = freqs start = "2000-01-01T12:07:01" index_kwargs = dict(start=start, periods=5, freq=initial_freq) datetime_index = pd.date_range(**index_kwargs) cftime_index = xr.cftime_range(**index_kwargs) loffset = "12H" try: da_datetime = ( da(datetime_index) .resample( time=resample_freq, closed=closed, label=label, base=base, loffset=loffset, ) .mean() ) except ValueError: with pytest.raises(ValueError): da(cftime_index).resample( time=resample_freq, closed=closed, label=label, base=base, loffset=loffset, ).mean() else: da_cftime = ( da(cftime_index) .resample( time=resample_freq, closed=closed, label=label, base=base, loffset=loffset, ) .mean() ) da_cftime["time"] = da_cftime.indexes["time"].to_datetimeindex() xr.testing.assert_identical(da_cftime, da_datetime) @pytest.mark.parametrize( ("freq", "expected"), [ ("S", "left"), ("T", "left"), ("H", "left"), ("D", "left"), ("M", "right"), ("MS", "left"), ("Q", "right"), ("QS", "left"), ("A", "right"), ("AS", "left"), ], ) def test_closed_label_defaults(freq, expected): assert CFTimeGrouper(freq=freq).closed == expected assert CFTimeGrouper(freq=freq).label == expected @pytest.mark.filterwarnings("ignore:Converting a CFTimeIndex") @pytest.mark.parametrize( "calendar", ["gregorian", "noleap", "all_leap", "360_day", "julian"] ) def test_calendars(calendar): # Limited testing for non-standard calendars freq, closed, label, base = "8001T", None, None, 17 loffset = datetime.timedelta(hours=12) xr_index = xr.cftime_range( start="2004-01-01T12:07:01", periods=7, freq="3D", calendar=calendar ) pd_index = pd.date_range(start="2004-01-01T12:07:01", periods=7, freq="3D") da_cftime = ( da(xr_index) .resample(time=freq, closed=closed, label=label, base=base, loffset=loffset) .mean() ) da_datetime = ( da(pd_index) .resample(time=freq, closed=closed, label=label, base=base, loffset=loffset) .mean() ) da_cftime["time"] = da_cftime.indexes["time"].to_datetimeindex() xr.testing.assert_identical(da_cftime, da_datetime)
import logging import numbers import os import unittest import numpy as np from numpy.testing import assert_allclose from gensim.corpora import mmcorpus, Dictionary from gensim.models import ldamodel, ldamulticore from gensim import matutils, utils from gensim.test import basetmtests from gensim.test.utils import datapath, get_tmpfile, common_texts AZURE = bool(os.environ.get('PIPELINE_WORKSPACE')) dictionary = Dictionary(common_texts) corpus = [dictionary.doc2bow(text) for text in common_texts] def testRandomState(): testcases = [np.random.seed(0), None, np.random.RandomState(0), 0] for testcase in testcases: assert(isinstance(utils.get_random_state(testcase), np.random.RandomState)) class TestLdaModel(unittest.TestCase, basetmtests.TestBaseTopicModel): def setUp(self): self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) self.class_ = ldamodel.LdaModel self.model = self.class_(corpus, id2word=dictionary, num_topics=2, passes=100) def testTransform(self): passed = False # sometimes, LDA training gets stuck at a local minimum # in that case try re-training the model from scratch, hoping for a # better random initialization for i in range(25): # restart at most 5 times # create the transformation model model = self.class_(id2word=dictionary, num_topics=2, passes=100) model.update(self.corpus) # transform one document doc = list(corpus)[0] transformed = model[doc] vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests expected = [0.13, 0.87] # must contain the same values, up to re-ordering passed = np.allclose(sorted(vec), sorted(expected), atol=1e-1) if passed: break logging.warning( "LDA failed to converge on attempt %i (got %s, expected %s)", i, sorted(vec), sorted(expected) ) self.assertTrue(passed) def testAlphaAuto(self): model1 = self.class_(corpus, id2word=dictionary, alpha='symmetric', passes=10) modelauto = self.class_(corpus, id2word=dictionary, alpha='auto', passes=10) # did we learn something? self.assertFalse(all(np.equal(model1.alpha, modelauto.alpha))) def testAlpha(self): kwargs = dict( id2word=dictionary, num_topics=2, alpha=None ) expected_shape = (2,) # should not raise anything self.class_(**kwargs) kwargs['alpha'] = 'symmetric' model = self.class_(**kwargs) self.assertEqual(model.alpha.shape, expected_shape) assert_allclose(model.alpha, np.array([0.5, 0.5])) kwargs['alpha'] = 'asymmetric' model = self.class_(**kwargs) self.assertEqual(model.alpha.shape, expected_shape) assert_allclose(model.alpha, [0.630602, 0.369398], rtol=1e-5) kwargs['alpha'] = 0.3 model = self.class_(**kwargs) self.assertEqual(model.alpha.shape, expected_shape) assert_allclose(model.alpha, np.array([0.3, 0.3])) kwargs['alpha'] = 3 model = self.class_(**kwargs) self.assertEqual(model.alpha.shape, expected_shape) assert_allclose(model.alpha, np.array([3, 3])) kwargs['alpha'] = [0.3, 0.3] model = self.class_(**kwargs) self.assertEqual(model.alpha.shape, expected_shape) assert_allclose(model.alpha, np.array([0.3, 0.3])) kwargs['alpha'] = np.array([0.3, 0.3]) model = self.class_(**kwargs) self.assertEqual(model.alpha.shape, expected_shape) assert_allclose(model.alpha, np.array([0.3, 0.3])) # all should raise an exception for being wrong shape kwargs['alpha'] = [0.3, 0.3, 0.3] self.assertRaises(AssertionError, self.class_, **kwargs) kwargs['alpha'] = [[0.3], [0.3]] self.assertRaises(AssertionError, self.class_, **kwargs) kwargs['alpha'] = [0.3] self.assertRaises(AssertionError, self.class_, **kwargs) kwargs['alpha'] = "gensim is cool" self.assertRaises(ValueError, self.class_, **kwargs) def testEtaAuto(self): model1 = self.class_(corpus, id2word=dictionary, eta='symmetric', passes=10) modelauto = self.class_(corpus, id2word=dictionary, eta='auto', passes=10) # did we learn something? self.assertFalse(np.allclose(model1.eta, modelauto.eta)) def testEta(self): kwargs = dict( id2word=dictionary, num_topics=2, eta=None ) num_terms = len(dictionary) expected_shape = (num_terms,) # should not raise anything model = self.class_(**kwargs) self.assertEqual(model.eta.shape, expected_shape) assert_allclose(model.eta, np.array([0.5] * num_terms)) kwargs['eta'] = 'symmetric' model = self.class_(**kwargs) self.assertEqual(model.eta.shape, expected_shape) assert_allclose(model.eta, np.array([0.5] * num_terms)) kwargs['eta'] = 0.3 model = self.class_(**kwargs) self.assertEqual(model.eta.shape, expected_shape) assert_allclose(model.eta, np.array([0.3] * num_terms)) kwargs['eta'] = 3 model = self.class_(**kwargs) self.assertEqual(model.eta.shape, expected_shape) assert_allclose(model.eta, np.array([3] * num_terms)) kwargs['eta'] = [0.3] * num_terms model = self.class_(**kwargs) self.assertEqual(model.eta.shape, expected_shape) assert_allclose(model.eta, np.array([0.3] * num_terms)) kwargs['eta'] = np.array([0.3] * num_terms) model = self.class_(**kwargs) self.assertEqual(model.eta.shape, expected_shape) assert_allclose(model.eta, np.array([0.3] * num_terms)) # should be ok with num_topics x num_terms testeta = np.array([[0.5] * len(dictionary)] * 2) kwargs['eta'] = testeta self.class_(**kwargs) # all should raise an exception for being wrong shape kwargs['eta'] = testeta.reshape(tuple(reversed(testeta.shape))) self.assertRaises(AssertionError, self.class_, **kwargs) kwargs['eta'] = [0.3] self.assertRaises(AssertionError, self.class_, **kwargs) kwargs['eta'] = [0.3] * (num_terms + 1) self.assertRaises(AssertionError, self.class_, **kwargs) kwargs['eta'] = "gensim is cool" self.assertRaises(ValueError, self.class_, **kwargs) kwargs['eta'] = "asymmetric" self.assertRaises(ValueError, self.class_, **kwargs) def testTopTopics(self): top_topics = self.model.top_topics(self.corpus) for topic, score in top_topics: self.assertTrue(isinstance(topic, list)) self.assertTrue(isinstance(score, float)) for v, k in topic: self.assertTrue(isinstance(k, str)) self.assertTrue(np.issubdtype(v, np.floating)) def testGetTopicTerms(self): topic_terms = self.model.get_topic_terms(1) for k, v in topic_terms: self.assertTrue(isinstance(k, numbers.Integral)) self.assertTrue(np.issubdtype(v, np.floating)) @unittest.skipIf(AZURE, 'see <https://github.com/RaRe-Technologies/gensim/pull/2836>') def testGetDocumentTopics(self): model = self.class_( self.corpus, id2word=dictionary, num_topics=2, passes=100, random_state=np.random.seed(0) ) doc_topics = model.get_document_topics(self.corpus) for topic in doc_topics: self.assertTrue(isinstance(topic, list)) for k, v in topic: self.assertTrue(isinstance(k, numbers.Integral)) self.assertTrue(np.issubdtype(v, np.floating)) # Test case to use the get_document_topic function for the corpus all_topics = model.get_document_topics(self.corpus, per_word_topics=True) self.assertEqual(model.state.numdocs, len(corpus)) for topic in all_topics: self.assertTrue(isinstance(topic, tuple)) for k, v in topic[0]: # list of doc_topics self.assertTrue(isinstance(k, numbers.Integral)) self.assertTrue(np.issubdtype(v, np.floating)) for w, topic_list in topic[1]: # list of word_topics self.assertTrue(isinstance(w, numbers.Integral)) self.assertTrue(isinstance(topic_list, list)) for w, phi_values in topic[2]: # list of word_phis self.assertTrue(isinstance(w, numbers.Integral)) self.assertTrue(isinstance(phi_values, list)) # Test case to check the filtering effect of minimum_probability and minimum_phi_value doc_topic_count_na = 0 word_phi_count_na = 0 all_topics = model.get_document_topics( self.corpus, minimum_probability=0.8, minimum_phi_value=1.0, per_word_topics=True ) self.assertEqual(model.state.numdocs, len(corpus)) for topic in all_topics: self.assertTrue(isinstance(topic, tuple)) for k, v in topic[0]: # list of doc_topics self.assertTrue(isinstance(k, numbers.Integral)) self.assertTrue(np.issubdtype(v, np.floating)) if len(topic[0]) != 0: doc_topic_count_na += 1 for w, topic_list in topic[1]: # list of word_topics self.assertTrue(isinstance(w, numbers.Integral)) self.assertTrue(isinstance(topic_list, list)) for w, phi_values in topic[2]: # list of word_phis self.assertTrue(isinstance(w, numbers.Integral)) self.assertTrue(isinstance(phi_values, list)) if len(phi_values) != 0: word_phi_count_na += 1 self.assertTrue(model.state.numdocs > doc_topic_count_na) self.assertTrue(sum(len(i) for i in corpus) > word_phi_count_na) doc_topics, word_topics, word_phis = model.get_document_topics(self.corpus[1], per_word_topics=True) for k, v in doc_topics: self.assertTrue(isinstance(k, numbers.Integral)) self.assertTrue(np.issubdtype(v, np.floating)) for w, topic_list in word_topics: self.assertTrue(isinstance(w, numbers.Integral)) self.assertTrue(isinstance(topic_list, list)) for w, phi_values in word_phis: self.assertTrue(isinstance(w, numbers.Integral)) self.assertTrue(isinstance(phi_values, list)) # word_topics looks like this: ({word_id => [topic_id_most_probable, topic_id_second_most_probable, ...]). # we check one case in word_topics, i.e of the first word in the doc, and its likely topics. # FIXME: Fails on osx and win # expected_word = 0 # self.assertEqual(word_topics[0][0], expected_word) # self.assertTrue(0 in word_topics[0][1]) def testTermTopics(self): model = self.class_( self.corpus, id2word=dictionary, num_topics=2, passes=100, random_state=np.random.seed(0) ) # check with word_type result = model.get_term_topics(2) for topic_no, probability in result: self.assertTrue(isinstance(topic_no, int)) self.assertTrue(np.issubdtype(probability, np.floating)) # checks if topic '1' is in the result list # FIXME: Fails on osx and win # self.assertTrue(1 in result[0]) # if user has entered word instead, check with word result = model.get_term_topics(str(model.id2word[2])) for topic_no, probability in result: self.assertTrue(isinstance(topic_no, int)) self.assertTrue(np.issubdtype(probability, np.floating)) # checks if topic '1' is in the result list # FIXME: Fails on osx and win # self.assertTrue(1 in result[0]) def testPasses(self): # long message includes the original error message with a custom one self.longMessage = True # construct what we expect when passes aren't involved test_rhots = list() model = self.class_(id2word=dictionary, chunksize=1, num_topics=2) def final_rhot(model): return pow(model.offset + (1 * model.num_updates) / model.chunksize, -model.decay) # generate 5 updates to test rhot on for x in range(5): model.update(self.corpus) test_rhots.append(final_rhot(model)) for passes in [1, 5, 10, 50, 100]: model = self.class_(id2word=dictionary, chunksize=1, num_topics=2, passes=passes) self.assertEqual(final_rhot(model), 1.0) # make sure the rhot matches the test after each update for test_rhot in test_rhots: model.update(self.corpus) msg = ", ".join(str(x) for x in [passes, model.num_updates, model.state.numdocs]) self.assertAlmostEqual(final_rhot(model), test_rhot, msg=msg) self.assertEqual(model.state.numdocs, len(corpus) * len(test_rhots)) self.assertEqual(model.num_updates, len(corpus) * len(test_rhots)) # def testTopicSeeding(self): # for topic in range(2): # passed = False # for i in range(5): # restart at most this many times, to mitigate LDA randomness # # try seeding it both ways round, check you get the same # # topics out but with which way round they are depending # # on the way round they're seeded # eta = np.ones((2, len(dictionary))) * 0.5 # system = dictionary.token2id[u'system'] # trees = dictionary.token2id[u'trees'] # # aggressively seed the word 'system', in one of the # # two topics, 10 times higher than the other words # eta[topic, system] *= 10.0 # model = self.class_(id2word=dictionary, num_topics=2, passes=200, eta=eta) # model.update(self.corpus) # topics = [{word: p for p, word in model.show_topic(j, topn=None)} for j in range(2)] # # check that the word 'system' in the topic we seeded got a high weight, # # and the word 'trees' (the main word in the other topic) a low weight -- # # and vice versa for the other topic (which we didn't seed with 'system') # passed = ( # (topics[topic][u'system'] > topics[topic][u'trees']) # and # (topics[1 - topic][u'system'] < topics[1 - topic][u'trees']) # ) # if passed: # break # logging.warning("LDA failed to converge on attempt %i (got %s)", i, topics) # self.assertTrue(passed) def testPersistence(self): fname = get_tmpfile('gensim_models_lda.tst') model = self.model model.save(fname) model2 = self.class_.load(fname) self.assertEqual(model.num_topics, model2.num_topics) self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) tstvec = [] self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector def testModelCompatibilityWithPythonVersions(self): fname_model_2_7 = datapath('ldamodel_python_2_7') model_2_7 = self.class_.load(fname_model_2_7) fname_model_3_5 = datapath('ldamodel_python_3_5') model_3_5 = self.class_.load(fname_model_3_5) self.assertEqual(model_2_7.num_topics, model_3_5.num_topics) self.assertTrue(np.allclose(model_2_7.expElogbeta, model_3_5.expElogbeta)) tstvec = [] self.assertTrue(np.allclose(model_2_7[tstvec], model_3_5[tstvec])) # try projecting an empty vector id2word_2_7 = dict(model_2_7.id2word.iteritems()) id2word_3_5 = dict(model_3_5.id2word.iteritems()) self.assertEqual(set(id2word_2_7.keys()), set(id2word_3_5.keys())) def testPersistenceIgnore(self): fname = get_tmpfile('gensim_models_lda_testPersistenceIgnore.tst') model = ldamodel.LdaModel(self.corpus, num_topics=2) model.save(fname, ignore='id2word') model2 = ldamodel.LdaModel.load(fname) self.assertTrue(model2.id2word is None) model.save(fname, ignore=['id2word']) model2 = ldamodel.LdaModel.load(fname) self.assertTrue(model2.id2word is None) def testPersistenceCompressed(self): fname = get_tmpfile('gensim_models_lda.tst.gz') model = self.model model.save(fname) model2 = self.class_.load(fname, mmap=None) self.assertEqual(model.num_topics, model2.num_topics) self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) tstvec = [] self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector def testLargeMmap(self): fname = get_tmpfile('gensim_models_lda.tst') model = self.model # simulate storing large arrays separately model.save(fname, sep_limit=0) # test loading the large model arrays with mmap model2 = self.class_.load(fname, mmap='r') self.assertEqual(model.num_topics, model2.num_topics) self.assertTrue(isinstance(model2.expElogbeta, np.memmap)) self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) tstvec = [] self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector def testLargeMmapCompressed(self): fname = get_tmpfile('gensim_models_lda.tst.gz') model = self.model # simulate storing large arrays separately model.save(fname, sep_limit=0) # test loading the large model arrays with mmap self.assertRaises(IOError, self.class_.load, fname, mmap='r') def testRandomStateBackwardCompatibility(self): # load a model saved using a pre-0.13.2 version of Gensim pre_0_13_2_fname = datapath('pre_0_13_2_model') model_pre_0_13_2 = self.class_.load(pre_0_13_2_fname) # set `num_topics` less than `model_pre_0_13_2.num_topics` so that `model_pre_0_13_2.random_state` is used model_topics = model_pre_0_13_2.print_topics(num_topics=2, num_words=3) for i in model_topics: self.assertTrue(isinstance(i[0], int)) self.assertTrue(isinstance(i[1], str)) # save back the loaded model using a post-0.13.2 version of Gensim post_0_13_2_fname = get_tmpfile('gensim_models_lda_post_0_13_2_model.tst') model_pre_0_13_2.save(post_0_13_2_fname) # load a model saved using a post-0.13.2 version of Gensim model_post_0_13_2 = self.class_.load(post_0_13_2_fname) model_topics_new = model_post_0_13_2.print_topics(num_topics=2, num_words=3) for i in model_topics_new: self.assertTrue(isinstance(i[0], int)) self.assertTrue(isinstance(i[1], str)) def testDtypeBackwardCompatibility(self): lda_3_0_1_fname = datapath('lda_3_0_1_model') test_doc = [(0, 1), (1, 1), (2, 1)] expected_topics = [(0, 0.87005886977475178), (1, 0.12994113022524822)] # save model to use in test # self.model.save(lda_3_0_1_fname) # load a model saved using a 3.0.1 version of Gensim model = self.class_.load(lda_3_0_1_fname) # and test it on a predefined document topics = model[test_doc] self.assertTrue(np.allclose(expected_topics, topics)) # endclass TestLdaModel class TestLdaMulticore(TestLdaModel): def setUp(self): self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) self.class_ = ldamulticore.LdaMulticore self.model = self.class_(corpus, id2word=dictionary, num_topics=2, passes=100) # override LdaModel because multicore does not allow alpha=auto def testAlphaAuto(self): self.assertRaises(RuntimeError, self.class_, alpha='auto') # endclass TestLdaMulticore if __name__ == '__main__': logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG) unittest.main()
import os.path as op import numpy as np import matplotlib.pyplot as plt import mne from mne.datasets import somato from mne.baseline import rescale from mne.stats import bootstrap_confidence_interval ############################################################################### # Set parameters data_path = somato.data_path() subject = '01' task = 'somato' raw_fname = op.join(data_path, 'sub-{}'.format(subject), 'meg', 'sub-{}_task-{}_meg.fif'.format(subject, task)) # let's explore some frequency bands iter_freqs = [ ('Theta', 4, 7), ('Alpha', 8, 12), ('Beta', 13, 25), ('Gamma', 30, 45) ] ############################################################################### # We create average power time courses for each frequency band # set epoching parameters event_id, tmin, tmax = 1, -1., 3. baseline = None # get the header to extract events raw = mne.io.read_raw_fif(raw_fname) events = mne.find_events(raw, stim_channel='STI 014') frequency_map = list() for band, fmin, fmax in iter_freqs: # (re)load the data to save memory raw = mne.io.read_raw_fif(raw_fname) raw.pick_types(meg='grad', eog=True) # we just look at gradiometers raw.load_data() # bandpass filter raw.filter(fmin, fmax, n_jobs=1, # use more jobs to speed up. l_trans_bandwidth=1, # make sure filter params are the same h_trans_bandwidth=1) # in each band and skip "auto" option. # epoch epochs = mne.Epochs(raw, events, event_id, tmin, tmax, baseline=baseline, reject=dict(grad=4000e-13, eog=350e-6), preload=True) # remove evoked response epochs.subtract_evoked() # get analytic signal (envelope) epochs.apply_hilbert(envelope=True) frequency_map.append(((band, fmin, fmax), epochs.average())) del epochs del raw ############################################################################### # Now we can compute the Global Field Power # We can track the emergence of spatial patterns compared to baseline # for each frequency band, with a bootstrapped confidence interval. # # We see dominant responses in the Alpha and Beta bands. # Helper function for plotting spread def stat_fun(x): """Return sum of squares.""" return np.sum(x ** 2, axis=0) # Plot fig, axes = plt.subplots(4, 1, figsize=(10, 7), sharex=True, sharey=True) colors = plt.get_cmap('winter_r')(np.linspace(0, 1, 4)) for ((freq_name, fmin, fmax), average), color, ax in zip( frequency_map, colors, axes.ravel()[::-1]): times = average.times * 1e3 gfp = np.sum(average.data ** 2, axis=0) gfp = mne.baseline.rescale(gfp, times, baseline=(None, 0)) ax.plot(times, gfp, label=freq_name, color=color, linewidth=2.5) ax.axhline(0, linestyle='--', color='grey', linewidth=2) ci_low, ci_up = bootstrap_confidence_interval(average.data, random_state=0, stat_fun=stat_fun) ci_low = rescale(ci_low, average.times, baseline=(None, 0)) ci_up = rescale(ci_up, average.times, baseline=(None, 0)) ax.fill_between(times, gfp + ci_up, gfp - ci_low, color=color, alpha=0.3) ax.grid(True) ax.set_ylabel('GFP') ax.annotate('%s (%d-%dHz)' % (freq_name, fmin, fmax), xy=(0.95, 0.8), horizontalalignment='right', xycoords='axes fraction') ax.set_xlim(-1000, 3000) axes.ravel()[-1].set_xlabel('Time [ms]')
from cryptography.fernet import Fernet from cryptography.hazmat.primitives import serialization from jks import PrivateKeyEntry, KeyStore, TrustedCertEntry from lemur.common.defaults import common_name from lemur.common.utils import parse_certificate, parse_cert_chain, parse_private_key from lemur.plugins import lemur_jks as jks from lemur.plugins.bases import ExportPlugin def cert_chain_as_der(cert, chain): """Return a certificate and its chain in a list format, as expected by pyjks.""" certs = [parse_certificate(cert)] certs.extend(parse_cert_chain(chain)) # certs (list) – A list of certificates, as byte strings. The first one should be the one belonging to the private # key, the others the chain (in correct order). return [cert.public_bytes(encoding=serialization.Encoding.DER) for cert in certs] def create_truststore(cert, chain, alias, passphrase): entries = [] for idx, cert_bytes in enumerate(cert_chain_as_der(cert, chain)): # The original cert gets name <ALIAS>_cert, first chain element is <ALIAS>_cert_1, etc. cert_alias = alias + "_cert" + ("_{}".format(idx) if idx else "") entries.append(TrustedCertEntry.new(cert_alias, cert_bytes)) return KeyStore.new("jks", entries).saves(passphrase) def create_keystore(cert, chain, key, alias, passphrase): certs_bytes = cert_chain_as_der(cert, chain) key_bytes = parse_private_key(key).private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption(), ) entry = PrivateKeyEntry.new(alias, certs_bytes, key_bytes) return KeyStore.new("jks", [entry]).saves(passphrase) class JavaTruststoreExportPlugin(ExportPlugin): title = "Java Truststore (JKS)" slug = "java-truststore-jks" description = "Generates a JKS truststore" requires_key = False version = jks.VERSION author = "Marti Raudsepp" author_url = "https://github.com/intgr" options = [ { "name": "alias", "type": "str", "required": False, "helpMessage": "Enter the alias you wish to use for the truststore.", }, { "name": "passphrase", "type": "str", "required": False, "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", "validation": "", }, ] def export(self, body, chain, key, options, **kwargs): """ Generates a Java Truststore """ if self.get_option("alias", options): alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) if self.get_option("passphrase", options): passphrase = self.get_option("passphrase", options) else: passphrase = Fernet.generate_key().decode("utf-8") raw = create_truststore(body, chain, alias, passphrase) return "jks", passphrase, raw class JavaKeystoreExportPlugin(ExportPlugin): title = "Java Keystore (JKS)" slug = "java-keystore-jks" description = "Generates a JKS keystore" version = jks.VERSION author = "Marti Raudsepp" author_url = "https://github.com/intgr" options = [ { "name": "passphrase", "type": "str", "required": False, "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", "validation": "", }, { "name": "alias", "type": "str", "required": False, "helpMessage": "Enter the alias you wish to use for the keystore.", }, ] def export(self, body, chain, key, options, **kwargs): """ Generates a Java Keystore """ if self.get_option("passphrase", options): passphrase = self.get_option("passphrase", options) else: passphrase = Fernet.generate_key().decode("utf-8") if self.get_option("alias", options): alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) raw = create_keystore(body, chain, key, alias, passphrase) return "jks", passphrase, raw
from typing import List import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import toggle_entity from homeassistant.const import CONF_DOMAIN from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.typing import ConfigType from . import DOMAIN TRIGGER_SCHEMA = toggle_entity.TRIGGER_SCHEMA.extend( {vol.Required(CONF_DOMAIN): DOMAIN} ) async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" return await toggle_entity.async_attach_trigger( hass, config, action, automation_info ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers.""" return await toggle_entity.async_get_triggers(hass, device_id, DOMAIN) async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict: """List trigger capabilities.""" return await toggle_entity.async_get_trigger_capabilities(hass, config)
from __future__ import print_function import argparse, console, fileinput, sys def msi(chars): '''Set terminal screen by ANSI code "MSI", and ignore for non-terminal output''' if sys.stdout.isatty(): msi = u'\u009b' sys.stdout.write(msi + chars) def clear_screen(): '''Clear terminal screen by ANSI code "MSI c"''' msi('c') def more(filenames, pagesize=10, clear=False, fmt='{line}'): '''Display content of filenames pagesize lines at a time (cleared if specified) with format fmt for each output line''' fileinput.close() # in case still open try: pageno = 1 if clear: clear_screen() for line in fileinput.input(filenames, openhook=fileinput.hook_encoded("utf-8")): lineno, filename, filelineno = fileinput.lineno(), fileinput.filename(), fileinput.filelineno() print(fmt.format(**locals()), end='') if pagesize and lineno % pagesize == 0: console.alert('Abort or continue', filename, 'Next page') # TODO: use less intrusive mechanism than alert pageno += 1 if clear: clear_screen() finally: fileinput.close() # --- main def main(args): parser = argparse.ArgumentParser( description=__doc__, epilog='This is inefficient for long input, as StaSh pipes do not multitask' ) parser.add_argument('file', help='files to display ("-" is stdin is default)', action='store', nargs='*') parser.add_argument('-p', '--pageno', help='number screen pages cumulatively', action='store_true') parser.add_argument('-l', '--lineno', help='number lines cumulatively', action='store_true') parser.add_argument('-f', '--filename', help='label lines by filename', action='store_true') parser.add_argument('-n', '--filelineno', '-#', help='number lines per file', action='store_true') parser.add_argument( '-s', '--pagesize', help='number of lines per screen page (0 for no pagination)', action='store', type=int, default=40 ) # TODO: use actual number of lines on screen for dynamic screen page size parser.add_argument('-c', '--clear', help='clear terminal screen before each screen page', action='store_true') ns = parser.parse_args(args) ns.line = True fmt = ' '.join('{' + var + '}' for var in 'pageno lineno filename filelineno line'.split() if getattr(ns, var)) more(ns.file, ns.pagesize, ns.clear, fmt) if __name__ == "__main__": main(sys.argv[1:])
from datetime import timedelta import logging import requests import voluptuous as vol from homeassistant.const import ( CONF_API_KEY, CONF_SCAN_INTERVAL, CONF_URL, CONF_WHITELIST, HTTP_OK, STATE_UNAVAILABLE, STATE_UNKNOWN, ) from homeassistant.helpers import state as state_helper import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import track_point_in_time from homeassistant.util import dt as dt_util _LOGGER = logging.getLogger(__name__) DOMAIN = "emoncms_history" CONF_INPUTNODE = "inputnode" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_URL): cv.string, vol.Required(CONF_INPUTNODE): cv.positive_int, vol.Required(CONF_WHITELIST): cv.entity_ids, vol.Optional(CONF_SCAN_INTERVAL, default=30): cv.positive_int, } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up the Emoncms history component.""" conf = config[DOMAIN] whitelist = conf.get(CONF_WHITELIST) def send_data(url, apikey, node, payload): """Send payload data to Emoncms.""" try: fullurl = f"{url}/input/post.json" data = {"apikey": apikey, "data": payload} parameters = {"node": node} req = requests.post( fullurl, params=parameters, data=data, allow_redirects=True, timeout=5 ) except requests.exceptions.RequestException: _LOGGER.error("Error saving data '%s' to '%s'", payload, fullurl) else: if req.status_code != HTTP_OK: _LOGGER.error( "Error saving data %s to %s (http status code = %d)", payload, fullurl, req.status_code, ) def update_emoncms(time): """Send whitelisted entities states regularly to Emoncms.""" payload_dict = {} for entity_id in whitelist: state = hass.states.get(entity_id) if state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE): continue try: payload_dict[entity_id] = state_helper.state_as_number(state) except ValueError: continue if payload_dict: payload = "{%s}" % ",".join( f"{key}:{val}" for key, val in payload_dict.items() ) send_data( conf.get(CONF_URL), conf.get(CONF_API_KEY), str(conf.get(CONF_INPUTNODE)), payload, ) track_point_in_time( hass, update_emoncms, time + timedelta(seconds=conf.get(CONF_SCAN_INTERVAL)) ) update_emoncms(dt_util.utcnow()) return True
import os import os.path import sys import json import types import textwrap import logging import subprocess import attr from PyQt5.QtCore import QStandardPaths import pytest from qutebrowser.utils import standarddir, utils, qtutils # Use a different application name for tests to make sure we don't change real # qutebrowser data if we accidentally access the real path in a test. APPNAME = 'qute_test' pytestmark = pytest.mark.usefixtures('qapp') @pytest.fixture(autouse=True) def clear_standarddir_cache_and_patch(qapp, monkeypatch): """Make sure the standarddir cache is cleared before/after each test. Also, patch APPNAME to qute_test. """ assert qapp.applicationName() == APPNAME monkeypatch.setattr(standarddir, '_locations', {}) monkeypatch.setattr(standarddir, 'APPNAME', APPNAME) yield monkeypatch.setattr(standarddir, '_locations', {}) @pytest.mark.parametrize('orgname, expected', [(None, ''), ('test', 'test')]) def test_unset_organization(qapp, orgname, expected): """Test unset_organization. Args: orgname: The organizationName to set initially. expected: The organizationName which is expected when reading back. """ qapp.setOrganizationName(orgname) assert qapp.organizationName() == expected # sanity check with standarddir._unset_organization(): assert qapp.organizationName() == '' assert qapp.organizationName() == expected def test_unset_organization_no_qapp(monkeypatch): """Without a QApplication, _unset_organization should do nothing.""" monkeypatch.setattr(standarddir.QApplication, 'instance', lambda: None) with standarddir._unset_organization(): pass @pytest.mark.fake_os('mac') @pytest.mark.posix def test_fake_mac_config(tmpdir, monkeypatch): """Test standardir.config on a fake Mac.""" monkeypatch.setenv('HOME', str(tmpdir)) expected = str(tmpdir) + '/.qute_test' # always with / standarddir._init_config(args=None) assert standarddir.config() == expected @pytest.mark.parametrize('what', ['data', 'config', 'cache']) @pytest.mark.not_mac @pytest.mark.fake_os('windows') def test_fake_windows(tmpdir, monkeypatch, what): """Make sure the config/data/cache dirs are correct on a fake Windows.""" monkeypatch.setattr(standarddir.QStandardPaths, 'writableLocation', lambda typ: str(tmpdir / APPNAME)) standarddir._init_config(args=None) standarddir._init_data(args=None) standarddir._init_cache(args=None) func = getattr(standarddir, what) assert func() == str(tmpdir / APPNAME / what) @pytest.mark.posix def test_fake_haiku(tmpdir, monkeypatch): """Test getting data dir on HaikuOS.""" locations = { QStandardPaths.DataLocation: '', QStandardPaths.ConfigLocation: str(tmpdir / 'config' / APPNAME), } monkeypatch.setattr(standarddir.QStandardPaths, 'writableLocation', locations.get) monkeypatch.setattr(standarddir.sys, 'platform', 'haiku1') standarddir._init_data(args=None) assert standarddir.data() == str(tmpdir / 'config' / APPNAME / 'data') class TestWritableLocation: """Tests for _writable_location.""" def test_empty(self, monkeypatch): """Test QStandardPaths returning an empty value.""" monkeypatch.setattr( 'qutebrowser.utils.standarddir.QStandardPaths.writableLocation', lambda typ: '') with pytest.raises(standarddir.EmptyValueError): standarddir._writable_location(QStandardPaths.DataLocation) def test_sep(self, monkeypatch): """Make sure the right kind of separator is used.""" monkeypatch.setattr(standarddir.os, 'sep', '\\') monkeypatch.setattr(standarddir.os.path, 'join', lambda *parts: '\\'.join(parts)) loc = standarddir._writable_location(QStandardPaths.DataLocation) assert '/' not in loc assert '\\' in loc class TestStandardDir: @pytest.mark.parametrize('func, init_func, varname', [ (standarddir.data, standarddir._init_data, 'XDG_DATA_HOME'), (standarddir.config, standarddir._init_config, 'XDG_CONFIG_HOME'), (lambda: standarddir.config(auto=True), standarddir._init_config, 'XDG_CONFIG_HOME'), (standarddir.cache, standarddir._init_cache, 'XDG_CACHE_HOME'), (standarddir.runtime, standarddir._init_runtime, 'XDG_RUNTIME_DIR'), ]) @pytest.mark.linux def test_linux_explicit(self, monkeypatch, tmpdir, func, init_func, varname): """Test dirs with XDG environment variables explicitly set. Args: func: The function to test. init_func: The initialization function to call. varname: The environment variable which should be set. """ monkeypatch.setenv(varname, str(tmpdir)) if varname == 'XDG_RUNTIME_DIR': tmpdir.chmod(0o0700) init_func(args=None) assert func() == str(tmpdir / APPNAME) @pytest.mark.parametrize('func, subdirs', [ (standarddir.data, ['.local', 'share', APPNAME]), (standarddir.config, ['.config', APPNAME]), (lambda: standarddir.config(auto=True), ['.config', APPNAME]), (standarddir.cache, ['.cache', APPNAME]), (standarddir.download, ['Downloads']), ]) @pytest.mark.linux def test_linux_normal(self, monkeypatch, tmpdir, func, subdirs): """Test dirs with XDG_*_HOME not set.""" monkeypatch.setenv('HOME', str(tmpdir)) for var in ['DATA', 'CONFIG', 'CACHE']: monkeypatch.delenv('XDG_{}_HOME'.format(var), raising=False) standarddir._init_dirs() assert func() == str(tmpdir.join(*subdirs)) @pytest.mark.linux @pytest.mark.qt_log_ignore(r'^QStandardPaths: ') @pytest.mark.skipif( qtutils.version_check('5.14', compiled=False), reason="Qt 5.14 automatically creates missing runtime dirs") def test_linux_invalid_runtimedir(self, monkeypatch, tmpdir): """With invalid XDG_RUNTIME_DIR, fall back to TempLocation.""" tmpdir_env = tmpdir / 'temp' tmpdir_env.ensure(dir=True) monkeypatch.setenv('XDG_RUNTIME_DIR', str(tmpdir / 'does-not-exist')) monkeypatch.setenv('TMPDIR', str(tmpdir_env)) standarddir._init_runtime(args=None) assert standarddir.runtime() == str(tmpdir_env / APPNAME) @pytest.mark.fake_os('windows') def test_runtimedir_empty_tempdir(self, monkeypatch, tmpdir): """With an empty tempdir on non-Linux, we should raise.""" monkeypatch.setattr(standarddir.QStandardPaths, 'writableLocation', lambda typ: '') with pytest.raises(standarddir.EmptyValueError): standarddir._init_runtime(args=None) @pytest.mark.parametrize('func, elems, expected', [ (standarddir.data, 2, [APPNAME, 'data']), (standarddir.config, 2, [APPNAME, 'config']), (lambda: standarddir.config(auto=True), 2, [APPNAME, 'config']), (standarddir.cache, 2, [APPNAME, 'cache']), (standarddir.download, 1, ['Downloads']), ]) @pytest.mark.windows def test_windows(self, func, elems, expected): standarddir._init_dirs() assert func().split(os.sep)[-elems:] == expected @pytest.mark.parametrize('func, elems, expected', [ (standarddir.data, 2, ['Application Support', APPNAME]), (lambda: standarddir.config(auto=True), 1, [APPNAME]), (standarddir.config, 0, os.path.expanduser('~').split(os.sep) + ['.qute_test']), (standarddir.cache, 2, ['Caches', APPNAME]), (standarddir.download, 1, ['Downloads']), ]) @pytest.mark.mac def test_mac(self, func, elems, expected): standarddir._init_dirs() assert func().split(os.sep)[-elems:] == expected class TestArguments: """Tests the --basedir argument.""" @pytest.mark.parametrize('typ, args', [ ('config', []), ('config', [True]), # user config ('data', []), ('cache', []), ('download', []), pytest.param('runtime', [], marks=pytest.mark.linux)]) def test_basedir(self, tmpdir, typ, args): """Test --basedir.""" expected = str(tmpdir / typ) init_args = types.SimpleNamespace(basedir=str(tmpdir)) standarddir._init_dirs(init_args) func = getattr(standarddir, typ) assert func(*args) == expected def test_basedir_relative(self, tmpdir): """Test --basedir with a relative path.""" basedir = (tmpdir / 'basedir') basedir.ensure(dir=True) with tmpdir.as_cwd(): args = types.SimpleNamespace(basedir='basedir') standarddir._init_dirs(args) assert standarddir.config() == str(basedir / 'config') def test_config_py_arg(self, tmpdir): basedir = tmpdir / 'basedir' basedir.ensure(dir=True) with tmpdir.as_cwd(): args = types.SimpleNamespace( basedir='foo', config_py='basedir/config.py') standarddir._init_dirs(args) assert standarddir.config_py() == str(basedir / 'config.py') def test_config_py_no_arg(self, tmpdir): basedir = tmpdir / 'basedir' basedir.ensure(dir=True) with tmpdir.as_cwd(): args = types.SimpleNamespace(basedir='basedir') standarddir._init_dirs(args) assert standarddir.config_py() == str( basedir / 'config' / 'config.py') class TestInitCacheDirTag: """Tests for _init_cachedir_tag.""" def test_existent_cache_dir_tag(self, tmpdir, mocker, monkeypatch): """Test with an existent CACHEDIR.TAG.""" monkeypatch.setattr(standarddir, 'cache', lambda: str(tmpdir)) mocker.patch('builtins.open', side_effect=AssertionError) m = mocker.patch('qutebrowser.utils.standarddir.os') m.path.join.side_effect = os.path.join m.path.exists.return_value = True standarddir._init_cachedir_tag() assert not tmpdir.listdir() m.path.exists.assert_called_with(str(tmpdir / 'CACHEDIR.TAG')) def test_new_cache_dir_tag(self, tmpdir, mocker, monkeypatch): """Test creating a new CACHEDIR.TAG.""" monkeypatch.setattr(standarddir, 'cache', lambda: str(tmpdir)) standarddir._init_cachedir_tag() assert tmpdir.listdir() == [(tmpdir / 'CACHEDIR.TAG')] data = (tmpdir / 'CACHEDIR.TAG').read_text('utf-8') assert data == textwrap.dedent(""" Signature: 8a477f597d28d172789f06886806bc55 # This file is a cache directory tag created by qutebrowser. # For information about cache directory tags, see: # http://www.brynosaurus.com/cachedir/ """).lstrip() def test_open_oserror(self, caplog, unwritable_tmp_path, monkeypatch): """Test creating a new CACHEDIR.TAG.""" monkeypatch.setattr(standarddir, 'cache', lambda: str(unwritable_tmp_path)) with caplog.at_level(logging.ERROR, 'init'): standarddir._init_cachedir_tag() assert caplog.messages == ['Failed to create CACHEDIR.TAG'] class TestCreatingDir: """Make sure inexistent directories are created properly.""" DIR_TYPES = ['config', 'data', 'cache', 'download', 'runtime'] @pytest.mark.parametrize('typ', DIR_TYPES) def test_basedir(self, tmpdir, typ): """Test --basedir.""" basedir = tmpdir / 'basedir' assert not basedir.exists() args = types.SimpleNamespace(basedir=str(basedir)) standarddir._init_dirs(args) func = getattr(standarddir, typ) func() assert basedir.exists() if typ == 'download' or (typ == 'runtime' and not utils.is_linux): assert not (basedir / typ).exists() else: assert (basedir / typ).exists() if utils.is_posix: assert (basedir / typ).stat().mode & 0o777 == 0o700 @pytest.mark.parametrize('typ', DIR_TYPES) def test_exists_race_condition(self, mocker, tmpdir, typ): """Make sure there can't be a TOCTOU issue when creating the file. See https://github.com/qutebrowser/qutebrowser/issues/942. """ (tmpdir / typ).ensure(dir=True) m = mocker.patch('qutebrowser.utils.standarddir.os') m.makedirs = os.makedirs m.sep = os.sep m.path.join = os.path.join m.expanduser = os.path.expanduser m.path.exists.return_value = False m.path.abspath = lambda x: x args = types.SimpleNamespace(basedir=str(tmpdir)) standarddir._init_dirs(args) func = getattr(standarddir, typ) func() class TestSystemData: """Test system data path.""" @pytest.mark.linux def test_system_datadir_exist_linux(self, monkeypatch, tmpdir): """Test that /usr/share/qute_test is used if path exists.""" monkeypatch.setenv('XDG_DATA_HOME', str(tmpdir)) monkeypatch.setattr(os.path, 'exists', lambda path: True) standarddir._init_data(args=None) assert standarddir.data(system=True) == "/usr/share/qute_test" @pytest.mark.linux def test_system_datadir_not_exist_linux(self, monkeypatch, tmpdir, fake_args): """Test that system-wide path isn't used on linux if path not exist.""" fake_args.basedir = str(tmpdir) monkeypatch.setattr(os.path, 'exists', lambda path: False) standarddir._init_data(args=fake_args) assert standarddir.data(system=True) == standarddir.data() def test_system_datadir_unsupportedos(self, monkeypatch, tmpdir, fake_args): """Test that system-wide path is not used on non-Linux OS.""" fake_args.basedir = str(tmpdir) monkeypatch.setattr(sys, 'platform', 'potato') standarddir._init_data(args=fake_args) assert standarddir.data(system=True) == standarddir.data() class TestMoveWindowsAndMacOS: """Test other invocations of _move_data.""" @pytest.fixture(autouse=True) def patch_standardpaths(self, files, monkeypatch): locations = { QStandardPaths.DataLocation: str(files.local_data_dir), QStandardPaths.AppDataLocation: str(files.roaming_data_dir), } monkeypatch.setattr(standarddir.QStandardPaths, 'writableLocation', locations.get) monkeypatch.setattr( standarddir, 'config', lambda auto=False: str(files.auto_config_dir if auto else files.config_dir)) @pytest.fixture def files(self, tmpdir): @attr.s class Files: auto_config_dir = attr.ib() config_dir = attr.ib() local_data_dir = attr.ib() roaming_data_dir = attr.ib() return Files( auto_config_dir=tmpdir / 'auto_config' / APPNAME, config_dir=tmpdir / 'config' / APPNAME, local_data_dir=tmpdir / 'data' / APPNAME, roaming_data_dir=tmpdir / 'roaming-data' / APPNAME, ) def test_move_macos(self, files): """Test moving configs on macOS.""" (files.auto_config_dir / 'autoconfig.yml').ensure() (files.auto_config_dir / 'quickmarks').ensure() files.config_dir.ensure(dir=True) standarddir._move_macos() assert (files.auto_config_dir / 'autoconfig.yml').exists() assert not (files.config_dir / 'autoconfig.yml').exists() assert not (files.auto_config_dir / 'quickmarks').exists() assert (files.config_dir / 'quickmarks').exists() def test_move_windows(self, files): """Test moving configs on Windows.""" (files.local_data_dir / 'data' / 'blocked-hosts').ensure() (files.local_data_dir / 'qutebrowser.conf').ensure() (files.local_data_dir / 'cache' / 'cachefile').ensure() standarddir._move_windows() assert (files.roaming_data_dir / 'data' / 'blocked-hosts').exists() assert (files.roaming_data_dir / 'config' / 'qutebrowser.conf').exists() assert not (files.roaming_data_dir / 'cache').exists() assert (files.local_data_dir / 'cache' / 'cachefile').exists() class TestMove: @pytest.fixture def dirs(self, tmpdir): @attr.s class Dirs: old = attr.ib() new = attr.ib() old_file = attr.ib() new_file = attr.ib() old_dir = tmpdir / 'old' new_dir = tmpdir / 'new' return Dirs(old=old_dir, new=new_dir, old_file=old_dir / 'file', new_file=new_dir / 'file') def test_no_old_dir(self, dirs, caplog): """Nothing should happen without any old directory.""" standarddir._move_data(str(dirs.old), str(dirs.new)) assert not any(message.startswith('Migrating data from') for message in caplog.messages) @pytest.mark.parametrize('empty_dest', [True, False]) def test_moving_data(self, dirs, empty_dest): dirs.old_file.ensure() if empty_dest: dirs.new.ensure(dir=True) standarddir._move_data(str(dirs.old), str(dirs.new)) assert not dirs.old_file.exists() assert dirs.new_file.exists() def test_already_existing(self, dirs, caplog): dirs.old_file.ensure() dirs.new_file.ensure() with caplog.at_level(logging.ERROR): standarddir._move_data(str(dirs.old), str(dirs.new)) expected = "Failed to move data from {} as {} is non-empty!".format( dirs.old, dirs.new) assert caplog.messages[-1] == expected def test_deleting_error(self, dirs, monkeypatch, mocker, caplog): """When there was an error it should be logged.""" mock = mocker.Mock(side_effect=OSError('error')) monkeypatch.setattr(standarddir.shutil, 'move', mock) dirs.old_file.ensure() with caplog.at_level(logging.ERROR): standarddir._move_data(str(dirs.old), str(dirs.new)) expected = "Failed to move data from {} to {}: error".format( dirs.old, dirs.new) assert caplog.messages[-1] == expected @pytest.mark.parametrize('args_kind', ['basedir', 'normal', 'none']) def test_init(mocker, tmpdir, monkeypatch, args_kind): """Do some sanity checks for standarddir.init(). Things like _init_cachedir_tag() are tested in more detail in other tests. """ assert standarddir._locations == {} monkeypatch.setenv('HOME', str(tmpdir)) m_windows = mocker.patch('qutebrowser.utils.standarddir._move_windows') m_mac = mocker.patch('qutebrowser.utils.standarddir._move_macos') if args_kind == 'normal': args = types.SimpleNamespace(basedir=None) elif args_kind == 'basedir': args = types.SimpleNamespace(basedir=str(tmpdir)) else: assert args_kind == 'none' args = None standarddir.init(args) assert standarddir._locations != {} if args_kind == 'normal': if utils.is_mac: m_windows.assert_not_called() assert m_mac.called elif utils.is_windows: assert m_windows.called m_mac.assert_not_called() else: m_windows.assert_not_called() m_mac.assert_not_called() else: m_windows.assert_not_called() m_mac.assert_not_called() @pytest.mark.linux def test_downloads_dir_not_created(monkeypatch, tmpdir): """Make sure ~/Downloads is not created.""" download_dir = tmpdir / 'Downloads' monkeypatch.setenv('HOME', str(tmpdir)) # Make sure xdg-user-dirs.dirs is not picked up monkeypatch.delenv('XDG_CONFIG_HOME', raising=False) standarddir._init_dirs() assert standarddir.download() == str(download_dir) assert not download_dir.exists() def test_no_qapplication(qapp, tmpdir, monkeypatch): """Make sure directories with/without QApplication are equal.""" sub_code = """ import sys import json sys.path = sys.argv[1:] # make sure we have the same python path from PyQt5.QtWidgets import QApplication from qutebrowser.utils import standarddir assert QApplication.instance() is None standarddir.APPNAME = 'qute_test' standarddir._init_dirs() locations = {k.name: v for k, v in standarddir._locations.items()} print(json.dumps(locations)) """ pyfile = tmpdir / 'sub.py' pyfile.write_text(textwrap.dedent(sub_code), encoding='ascii') for name in ['CONFIG', 'DATA', 'CACHE']: monkeypatch.delenv('XDG_{}_HOME'.format(name), raising=False) runtime_dir = tmpdir / 'runtime' runtime_dir.ensure(dir=True) runtime_dir.chmod(0o0700) monkeypatch.setenv('XDG_RUNTIME_DIR', str(runtime_dir)) home_dir = tmpdir / 'home' home_dir.ensure(dir=True) monkeypatch.setenv('HOME', str(home_dir)) proc = subprocess.run([sys.executable, str(pyfile)] + sys.path, universal_newlines=True, check=True, stdout=subprocess.PIPE) sub_locations = json.loads(proc.stdout) standarddir._init_dirs() locations = {k.name: v for k, v in standarddir._locations.items()} assert sub_locations == locations
from django.conf import settings from django.db import models, transaction from django.db.models import Count, Q from django.utils import timezone from django.utils.translation import gettext as _ from django.utils.translation import gettext_lazy, ngettext_lazy from jellyfish import damerau_levenshtein_distance from weblate.lang.models import Language from weblate.trans.mixins import UserDisplayMixin from weblate.trans.models.alert import ALERTS from weblate.trans.models.project import Project from weblate.utils.fields import JSONField class ChangeQuerySet(models.QuerySet): # pylint: disable=no-init def content(self, prefetch=False): """Return queryset with content changes.""" base = self if prefetch: base = base.prefetch() return base.filter(action__in=Change.ACTIONS_CONTENT) @staticmethod def count_stats(days, step, dtstart, base): """Count number of changes in given dataset and period grouped by step days.""" # Count number of changes result = [] for _unused in range(0, days, step): # Calculate interval int_start = dtstart int_end = int_start + timezone.timedelta(days=step) # Count changes int_base = base.filter(timestamp__range=(int_start, int_end)) count = int_base.aggregate(Count("id")) # Append to result result.append((int_start, count["id__count"])) # Advance to next interval dtstart = int_end return result def base_stats( self, days, step, project=None, component=None, translation=None, language=None, user=None, ): """Core of daily/weekly/monthly stats calculation.""" # Get range (actually start) dtstart = timezone.now() - timezone.timedelta(days=days + 1) # Base for filtering base = self.all() # Filter by translation/project if translation is not None: base = base.filter(translation=translation) elif component is not None: base = base.filter(component=component) elif project is not None: base = base.filter(project=project) # Filter by language if language is not None: base = base.filter(language=language) # Filter by language if user is not None: base = base.filter(user=user) return self.count_stats(days, step, dtstart, base) def prefetch(self): """Fetch related fields in a big chungs to avoid loading them individually.""" return self.prefetch_related( "user", "translation", "component", "project", "unit", "glossary_term", "translation__language", "translation__component", "translation__component__project", "unit__translation", "unit__translation__language", "unit__translation__plural", "unit__translation__component", "unit__translation__component__project", "component__project", ) def last_changes(self, user): """Return last changes for an user. Prefilter Changes by ACL for users and fetches related fields for last changes display. """ if user.is_superuser: return self.prefetch().order() return ( self.prefetch() .filter( Q(project_id__in=user.allowed_project_ids) & ( Q(component__isnull=True) | Q(component__restricted=False) | Q(component_id__in=user.component_permissions) ) ) .order() ) def authors_list(self, date_range=None): """Return list of authors.""" authors = self.content() if date_range is not None: authors = authors.filter(timestamp__range=date_range) return ( authors.exclude(author__isnull=True) .values("author") .annotate(change_count=Count("id")) .values_list("author__email", "author__full_name", "change_count") ) def order(self): return self.order_by("-timestamp") class ChangeManager(models.Manager): def create(self, user=None, **kwargs): """Wrapper to avoid using anonymous user as change owner.""" if user is not None and not user.is_authenticated: user = None return super().create(user=user, **kwargs) class Change(models.Model, UserDisplayMixin): ACTION_UPDATE = 0 ACTION_COMPLETE = 1 ACTION_CHANGE = 2 ACTION_COMMENT = 3 ACTION_SUGGESTION = 4 ACTION_NEW = 5 ACTION_AUTO = 6 ACTION_ACCEPT = 7 ACTION_REVERT = 8 ACTION_UPLOAD = 9 ACTION_DICTIONARY_NEW = 10 ACTION_DICTIONARY_EDIT = 11 ACTION_DICTIONARY_UPLOAD = 12 ACTION_NEW_SOURCE = 13 ACTION_LOCK = 14 ACTION_UNLOCK = 15 ACTION_DUPLICATE_STRING = 16 ACTION_COMMIT = 17 ACTION_PUSH = 18 ACTION_RESET = 19 ACTION_MERGE = 20 ACTION_REBASE = 21 ACTION_FAILED_MERGE = 22 ACTION_FAILED_REBASE = 23 ACTION_PARSE_ERROR = 24 ACTION_REMOVE_TRANSLATION = 25 ACTION_SUGGESTION_DELETE = 26 ACTION_REPLACE = 27 ACTION_FAILED_PUSH = 28 ACTION_SUGGESTION_CLEANUP = 29 ACTION_SOURCE_CHANGE = 30 ACTION_NEW_UNIT = 31 ACTION_BULK_EDIT = 32 ACTION_ACCESS_EDIT = 33 ACTION_ADD_USER = 34 ACTION_REMOVE_USER = 35 ACTION_APPROVE = 36 ACTION_MARKED_EDIT = 37 ACTION_REMOVE_COMPONENT = 38 ACTION_REMOVE_PROJECT = 39 ACTION_DUPLICATE_LANGUAGE = 40 ACTION_RENAME_PROJECT = 41 ACTION_RENAME_COMPONENT = 42 ACTION_MOVE_COMPONENT = 43 ACTION_NEW_STRING = 44 ACTION_NEW_CONTRIBUTOR = 45 ACTION_ANNOUNCEMENT = 46 ACTION_ALERT = 47 ACTION_ADDED_LANGUAGE = 48 ACTION_REQUESTED_LANGUAGE = 49 ACTION_CREATE_PROJECT = 50 ACTION_CREATE_COMPONENT = 51 ACTION_INVITE_USER = 52 ACTION_HOOK = 53 ACTION_REPLACE_UPLOAD = 54 ACTION_LICENSE_CHANGE = 55 ACTION_AGREEMENT_CHANGE = 56 ACTION_CHOICES = ( # Translators: Name of event in the history (ACTION_UPDATE, gettext_lazy("Resource update")), # Translators: Name of event in the history (ACTION_COMPLETE, gettext_lazy("Translation completed")), # Translators: Name of event in the history (ACTION_CHANGE, gettext_lazy("Translation changed")), # Translators: Name of event in the history (ACTION_NEW, gettext_lazy("New translation")), # Translators: Name of event in the history (ACTION_COMMENT, gettext_lazy("Comment added")), # Translators: Name of event in the history (ACTION_SUGGESTION, gettext_lazy("Suggestion added")), # Translators: Name of event in the history (ACTION_AUTO, gettext_lazy("Automatic translation")), # Translators: Name of event in the history (ACTION_ACCEPT, gettext_lazy("Suggestion accepted")), # Translators: Name of event in the history (ACTION_REVERT, gettext_lazy("Translation reverted")), # Translators: Name of event in the history (ACTION_UPLOAD, gettext_lazy("Translation uploaded")), # Translators: Name of event in the history (ACTION_DICTIONARY_NEW, gettext_lazy("Added to glossary")), # Translators: Name of event in the history (ACTION_DICTIONARY_EDIT, gettext_lazy("Glossary updated")), # Translators: Name of event in the history (ACTION_DICTIONARY_UPLOAD, gettext_lazy("Glossary uploaded")), # Translators: Name of event in the history (ACTION_NEW_SOURCE, gettext_lazy("New source string")), # Translators: Name of event in the history (ACTION_LOCK, gettext_lazy("Component locked")), # Translators: Name of event in the history (ACTION_UNLOCK, gettext_lazy("Component unlocked")), # Translators: Name of event in the history (ACTION_DUPLICATE_STRING, gettext_lazy("Found duplicated string")), # Translators: Name of event in the history (ACTION_COMMIT, gettext_lazy("Committed changes")), # Translators: Name of event in the history (ACTION_PUSH, gettext_lazy("Pushed changes")), # Translators: Name of event in the history (ACTION_RESET, gettext_lazy("Reset repository")), # Translators: Name of event in the history (ACTION_MERGE, gettext_lazy("Merged repository")), # Translators: Name of event in the history (ACTION_REBASE, gettext_lazy("Rebased repository")), # Translators: Name of event in the history (ACTION_FAILED_MERGE, gettext_lazy("Failed merge on repository")), # Translators: Name of event in the history (ACTION_FAILED_REBASE, gettext_lazy("Failed rebase on repository")), # Translators: Name of event in the history (ACTION_FAILED_PUSH, gettext_lazy("Failed push on repository")), # Translators: Name of event in the history (ACTION_PARSE_ERROR, gettext_lazy("Parse error")), # Translators: Name of event in the history (ACTION_REMOVE_TRANSLATION, gettext_lazy("Removed translation")), # Translators: Name of event in the history (ACTION_SUGGESTION_DELETE, gettext_lazy("Suggestion removed")), # Translators: Name of event in the history (ACTION_REPLACE, gettext_lazy("Search and replace")), # Translators: Name of event in the history (ACTION_SUGGESTION_CLEANUP, gettext_lazy("Suggestion removed during cleanup")), # Translators: Name of event in the history (ACTION_SOURCE_CHANGE, gettext_lazy("Source string changed")), # Translators: Name of event in the history (ACTION_NEW_UNIT, gettext_lazy("New string added")), # Translators: Name of event in the history (ACTION_BULK_EDIT, gettext_lazy("Bulk status change")), # Translators: Name of event in the history (ACTION_ACCESS_EDIT, gettext_lazy("Changed visibility")), # Translators: Name of event in the history (ACTION_ADD_USER, gettext_lazy("Added user")), # Translators: Name of event in the history (ACTION_REMOVE_USER, gettext_lazy("Removed user")), # Translators: Name of event in the history (ACTION_APPROVE, gettext_lazy("Translation approved")), # Translators: Name of event in the history (ACTION_MARKED_EDIT, gettext_lazy("Marked for edit")), # Translators: Name of event in the history (ACTION_REMOVE_COMPONENT, gettext_lazy("Removed component")), # Translators: Name of event in the history (ACTION_REMOVE_PROJECT, gettext_lazy("Removed project")), # Translators: Name of event in the history (ACTION_DUPLICATE_LANGUAGE, gettext_lazy("Found duplicated language")), # Translators: Name of event in the history (ACTION_RENAME_PROJECT, gettext_lazy("Renamed project")), # Translators: Name of event in the history (ACTION_RENAME_COMPONENT, gettext_lazy("Renamed component")), # Translators: Name of event in the history (ACTION_MOVE_COMPONENT, gettext_lazy("Moved component")), # Not translated, used plural instead (ACTION_NEW_STRING, "New string to translate"), # Translators: Name of event in the history (ACTION_NEW_CONTRIBUTOR, gettext_lazy("New contributor")), # Translators: Name of event in the history (ACTION_ANNOUNCEMENT, gettext_lazy("New announcement")), # Translators: Name of event in the history (ACTION_ALERT, gettext_lazy("New alert")), # Translators: Name of event in the history (ACTION_ADDED_LANGUAGE, gettext_lazy("Added new language")), # Translators: Name of event in the history (ACTION_REQUESTED_LANGUAGE, gettext_lazy("Requested new language")), # Translators: Name of event in the history (ACTION_CREATE_PROJECT, gettext_lazy("Created project")), # Translators: Name of event in the history (ACTION_CREATE_COMPONENT, gettext_lazy("Created component")), # Translators: Name of event in the history (ACTION_INVITE_USER, gettext_lazy("Invited user")), # Translators: Name of event in the history (ACTION_HOOK, gettext_lazy("Received repository notification")), # Translators: Name of event in the history (ACTION_REPLACE_UPLOAD, gettext_lazy("Replaced file by upload")), # Translators: Name of event in the history (ACTION_LICENSE_CHANGE, gettext_lazy("License changed")), # Translators: Name of event in the history (ACTION_AGREEMENT_CHANGE, gettext_lazy("Contributor agreement changed")), ) ACTIONS_DICT = dict(ACTION_CHOICES) ACTION_STRINGS = { name.lower().replace(" ", "-"): value for value, name in ACTION_CHOICES } ACTION_NAMES = {str(name): value for value, name in ACTION_CHOICES} # Actions which can be reverted ACTIONS_REVERTABLE = { ACTION_ACCEPT, ACTION_REVERT, ACTION_CHANGE, ACTION_UPLOAD, ACTION_NEW, ACTION_REPLACE, ACTION_AUTO, ACTION_APPROVE, ACTION_MARKED_EDIT, } # Content changes considered when looking for last author ACTIONS_CONTENT = { ACTION_CHANGE, ACTION_NEW, ACTION_AUTO, ACTION_ACCEPT, ACTION_REVERT, ACTION_UPLOAD, ACTION_REPLACE, ACTION_BULK_EDIT, ACTION_APPROVE, ACTION_MARKED_EDIT, } # Actions considered as being translated in consistency check ACTIONS_TRANSLATED = { ACTION_CHANGE, ACTION_NEW, ACTION_AUTO, ACTION_ACCEPT, ACTION_REVERT, ACTION_UPLOAD, ACTION_REPLACE, ACTION_APPROVE, } # Actions shown on the repository management page ACTIONS_REPOSITORY = { ACTION_COMMIT, ACTION_PUSH, ACTION_RESET, ACTION_MERGE, ACTION_REBASE, ACTION_FAILED_MERGE, ACTION_FAILED_REBASE, ACTION_FAILED_PUSH, ACTION_LOCK, ACTION_UNLOCK, ACTION_DUPLICATE_LANGUAGE, } # Actions where target is rendered as translation string ACTIONS_SHOW_CONTENT = { ACTION_SUGGESTION, ACTION_SUGGESTION_DELETE, ACTION_SUGGESTION_CLEANUP, ACTION_BULK_EDIT, ACTION_NEW_UNIT, ACTION_DICTIONARY_NEW, ACTION_DICTIONARY_EDIT, } # Actions indicating a repository merge failure ACTIONS_MERGE_FAILURE = { ACTION_FAILED_MERGE, ACTION_FAILED_REBASE, ACTION_FAILED_PUSH, } PLURAL_ACTIONS = { ACTION_NEW_STRING: ngettext_lazy( "New string to translate", "New strings to translate" ), } AUTO_ACTIONS = { # Translators: Name of event in the history ACTION_LOCK: gettext_lazy("Component automatically locked"), # Translators: Name of event in the history ACTION_UNLOCK: gettext_lazy("Component automatically unlocked"), } unit = models.ForeignKey("Unit", null=True, on_delete=models.deletion.CASCADE) language = models.ForeignKey( "lang.Language", null=True, on_delete=models.deletion.CASCADE ) project = models.ForeignKey("Project", null=True, on_delete=models.deletion.CASCADE) component = models.ForeignKey( "Component", null=True, on_delete=models.deletion.CASCADE ) translation = models.ForeignKey( "Translation", null=True, on_delete=models.deletion.CASCADE ) glossary_term = models.ForeignKey( "glossary.Term", null=True, on_delete=models.deletion.CASCADE ) comment = models.ForeignKey( "Comment", null=True, on_delete=models.deletion.SET_NULL ) suggestion = models.ForeignKey( "Suggestion", null=True, on_delete=models.deletion.SET_NULL ) announcement = models.ForeignKey( "Announcement", null=True, on_delete=models.deletion.SET_NULL ) alert = models.ForeignKey("Alert", null=True, on_delete=models.deletion.SET_NULL) user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, on_delete=models.deletion.CASCADE ) author = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, related_name="author_set", on_delete=models.deletion.CASCADE, ) timestamp = models.DateTimeField(auto_now_add=True, db_index=True) action = models.IntegerField( choices=ACTION_CHOICES, default=ACTION_CHANGE, db_index=True ) target = models.TextField(default="", blank=True) old = models.TextField(default="", blank=True) details = JSONField() objects = ChangeManager.from_queryset(ChangeQuerySet)() class Meta: app_label = "trans" index_together = [ ("translation", "action", "timestamp"), ] verbose_name = "history event" verbose_name_plural = "history events" def __str__(self): return _("%(action)s at %(time)s on %(translation)s by %(user)s") % { "action": self.get_action_display(), "time": self.timestamp, "translation": self.translation, "user": self.get_user_display(False), } def save(self, *args, **kwargs): from weblate.accounts.tasks import notify_change if self.unit: self.translation = self.unit.translation if self.translation: self.component = self.translation.component self.language = self.translation.language if self.component: self.project = self.component.project if self.glossary_term: self.project = self.glossary_term.glossary.project self.language = self.glossary_term.language super().save(*args, **kwargs) transaction.on_commit(lambda: notify_change.delay(self.pk)) def get_absolute_url(self): """Return link either to unit or translation.""" if self.unit is not None: return self.unit.get_absolute_url() if self.translation is not None: if self.action == self.ACTION_NEW_STRING: return self.translation.get_translate_url() + "?q=is:untranslated" return self.translation.get_absolute_url() if self.component is not None: return self.component.get_absolute_url() if self.glossary_term is not None: return self.glossary_term.get_absolute_url() if self.project is not None: return self.project.get_absolute_url() return None def __init__(self, *args, **kwargs): self.notify_state = {} super().__init__(*args, **kwargs) @property def plural_count(self): return self.details.get("count", 1) @property def auto_status(self): return self.details.get("auto", False) def get_action_display(self): if self.action in self.PLURAL_ACTIONS: return self.PLURAL_ACTIONS[self.action] % self.plural_count if self.action in self.AUTO_ACTIONS and self.auto_status: return str(self.AUTO_ACTIONS[self.action]) return str(self.ACTIONS_DICT.get(self.action, self.action)) def is_merge_failure(self): return self.action in self.ACTIONS_MERGE_FAILURE def can_revert(self): return ( self.unit is not None and self.old and self.action in self.ACTIONS_REVERTABLE ) def show_source(self): """Whether to show content as source change.""" return self.action == self.ACTION_SOURCE_CHANGE def show_content(self): """Whether to show content as translation.""" return ( self.action in self.ACTIONS_SHOW_CONTENT or self.action in self.ACTIONS_REVERTABLE ) def get_details_display(self): # noqa: C901 from weblate.utils.markdown import render_markdown if self.action in (self.ACTION_ANNOUNCEMENT, self.ACTION_AGREEMENT_CHANGE): return render_markdown(self.target) # Following rendering relies on details present if not self.details: return "" user_actions = { self.ACTION_ADD_USER, self.ACTION_INVITE_USER, self.ACTION_REMOVE_USER, } if self.action == self.ACTION_ACCESS_EDIT: for number, name in Project.ACCESS_CHOICES: if number == self.details["access_control"]: return name return "Unknonwn {}".format(self.details["access_control"]) if self.action in user_actions: if "group" in self.details: return "{username} ({group})".format(**self.details) return self.details["username"] if self.action in ( self.ACTION_ADDED_LANGUAGE, self.ACTION_REQUESTED_LANGUAGE, ): # noqa: E501 try: return Language.objects.get(code=self.details["language"]) except Language.DoesNotExist: return self.details["language"] if self.action == self.ACTION_ALERT: try: return ALERTS[self.details["alert"]].verbose except KeyError: return self.details["alert"] if self.action == self.ACTION_PARSE_ERROR: return "{filename}: {error_message}".format(**self.details) if self.action == self.ACTION_HOOK: return "{service_long_name}: {repo_url}, {branch}".format(**self.details) if self.action == self.ACTION_COMMENT and "comment" in self.details: return render_markdown(self.details["comment"]) return "" def get_distance(self): try: return damerau_levenshtein_distance(self.old, self.target) except MemoryError: # Too long strings return abs(len(self.old) - len(self.target))
import asyncio from itertools import chain, repeat import serial from homeassistant import config_entries, setup from homeassistant.components.dsmr import DOMAIN from tests.async_mock import DEFAULT, AsyncMock, patch from tests.common import MockConfigEntry SERIAL_DATA = {"serial_id": "12345678", "serial_id_gas": "123456789"} async def test_import_usb(hass, dsmr_connection_send_validate_fixture): """Test we can import.""" await setup.async_setup_component(hass, "persistent_notification", {}) entry_data = { "port": "/dev/ttyUSB0", "dsmr_version": "2.2", "precision": 4, "reconnect_interval": 30, } with patch("homeassistant.components.dsmr.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry_data, ) assert result["type"] == "create_entry" assert result["title"] == "/dev/ttyUSB0" assert result["data"] == {**entry_data, **SERIAL_DATA} async def test_import_usb_failed_connection( hass, dsmr_connection_send_validate_fixture ): """Test we can import.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture await setup.async_setup_component(hass, "persistent_notification", {}) entry_data = { "port": "/dev/ttyUSB0", "dsmr_version": "2.2", "precision": 4, "reconnect_interval": 30, } # override the mock to have it fail the first time and succeed after first_fail_connection_factory = AsyncMock( return_value=(transport, protocol), side_effect=chain([serial.serialutil.SerialException], repeat(DEFAULT)), ) with patch( "homeassistant.components.dsmr.async_setup_entry", return_value=True ), patch( "homeassistant.components.dsmr.config_flow.create_dsmr_reader", first_fail_connection_factory, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry_data, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_import_usb_no_data(hass, dsmr_connection_send_validate_fixture): """Test we can import.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture await setup.async_setup_component(hass, "persistent_notification", {}) entry_data = { "port": "/dev/ttyUSB0", "dsmr_version": "2.2", "precision": 4, "reconnect_interval": 30, } # override the mock to have it fail the first time and succeed after wait_closed = AsyncMock( return_value=(transport, protocol), side_effect=chain([asyncio.TimeoutError], repeat(DEFAULT)), ) protocol.wait_closed = wait_closed with patch("homeassistant.components.dsmr.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry_data, ) assert result["type"] == "abort" assert result["reason"] == "cannot_communicate" async def test_import_usb_wrong_telegram(hass, dsmr_connection_send_validate_fixture): """Test we can import.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture await setup.async_setup_component(hass, "persistent_notification", {}) entry_data = { "port": "/dev/ttyUSB0", "dsmr_version": "2.2", "precision": 4, "reconnect_interval": 30, } protocol.telegram = {} with patch("homeassistant.components.dsmr.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry_data, ) assert result["type"] == "abort" assert result["reason"] == "cannot_communicate" async def test_import_network(hass, dsmr_connection_send_validate_fixture): """Test we can import from network.""" await setup.async_setup_component(hass, "persistent_notification", {}) entry_data = { "host": "localhost", "port": "1234", "dsmr_version": "2.2", "precision": 4, "reconnect_interval": 30, } with patch("homeassistant.components.dsmr.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry_data, ) assert result["type"] == "create_entry" assert result["title"] == "localhost:1234" assert result["data"] == {**entry_data, **SERIAL_DATA} async def test_import_update(hass, dsmr_connection_send_validate_fixture): """Test we can import.""" await setup.async_setup_component(hass, "persistent_notification", {}) entry_data = { "port": "/dev/ttyUSB0", "dsmr_version": "2.2", "precision": 4, "reconnect_interval": 30, } entry = MockConfigEntry( domain=DOMAIN, data=entry_data, unique_id="/dev/ttyUSB0", ) entry.add_to_hass(hass) with patch( "homeassistant.components.dsmr.async_setup_entry", return_value=True ), patch("homeassistant.components.dsmr.async_unload_entry", return_value=True): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() new_entry_data = { "port": "/dev/ttyUSB0", "dsmr_version": "2.2", "precision": 3, "reconnect_interval": 30, } with patch( "homeassistant.components.dsmr.async_setup_entry", return_value=True ), patch("homeassistant.components.dsmr.async_unload_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=new_entry_data, ) await hass.async_block_till_done() assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["precision"] == 3
from collections import defaultdict import diamond.collector try: import psycopg2 import psycopg2.extras psycopg2 # workaround for pyflakes issue #13 except ImportError: psycopg2 = None STATS_QUERIES = ['SHOW POOLS', 'SHOW STATS'] IGNORE_COLUMNS = ['user'] class PgbouncerCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(PgbouncerCollector, self).get_default_config_help() config_help.update({ 'user': 'Username', 'password': 'Password', 'instances': 'A subcategory of pgbouncer instances with a host ' 'and port, and optionally user and password can be ' 'overridden per instance (see example).', }) return config_help def get_default_config(self): config = super(PgbouncerCollector, self).get_default_config() config.update({ 'path': 'pgbouncer', 'method': 'Threaded', 'user': 'postgres', 'password': '', 'instances': {}, }) return config def collect(self): if psycopg2 is None: self.log.error('Unable to import module psycopg2.') return {} instances = self.config['instances'] # HACK: setting default with subcategory messes up merging of configs, # so we only set the default if one wasn't provided. if not instances: instances = { 'default': { 'host': 'localhost', 'port': '6432', } } for name, instance in instances.iteritems(): host = instance['host'] port = instance['port'] user = instance.get('user') or self.config['user'] password = instance.get('password') or self.config['password'] for database, stats in self._get_stats_by_database( host, port, user, password).iteritems(): for stat_name, stat_value in stats.iteritems(): self.publish( self._get_metric_name(name, database, stat_name), stat_value) def _get_metric_name(self, name, database, stat_name): name = name.replace('.', '_').replace(':', '_').strip() return '.'.join([name, database, stat_name]) def _get_stats_by_database(self, host, port, user, password): # Mapping of database name -> stats. databases = defaultdict(dict) conn = psycopg2.connect(database='pgbouncer', user=user, password=password, host=host, port=port) # Avoid using transactions, set isolation level to autocommit conn.set_isolation_level(0) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) for query in STATS_QUERIES: cursor.execute(query) for row in cursor.fetchall(): stats = row.copy() database = stats.pop('database') for ignore in IGNORE_COLUMNS: if ignore in stats: stats.pop(ignore) databases[database].update(stats) return databases
import json from app.database import model from app.tasks.tasks import do_webhook_shell from . import WEBHOOKDATA def test_do_webhook_shell(create_server, create_webhook, sql): server = create_server() webhook = create_webhook(server_id=server['id']) data = WEBHOOKDATA['github'] history = model.History( status='1', webhook_id=webhook['id'], data=json.dumps(data) ) sql.add(history) sql.commit() history_id = history.id text = 'select * from history where id=:id' result = sql.execute(text, {'id': history_id}).fetchone() assert result.status == '1' do_webhook_shell.apply(args=(webhook['id'], history_id, data)).get() result = sql.execute(text, {'id': history_id}).fetchone() assert result.status == '5'
import logging import socket from urllib.parse import urlencode import voluptuous as vol from homeassistant.components.notify import ( ATTR_DATA, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import CONF_HOST, CONF_PORT import homeassistant.helpers.config_validation as cv ATTR_METHOD = "method" ATTR_METHOD_DEFAULT = "speak" ATTR_METHOD_ALLOWED = ["speak", "alarm"] DEFAULT_PORT = 1035 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, } ) _LOGGER = logging.getLogger(__name__) def get_service(hass, config, discovery_info=None): """Get the Lannouncer notification service.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) return LannouncerNotificationService(hass, host, port) class LannouncerNotificationService(BaseNotificationService): """Implementation of a notification service for Lannouncer.""" def __init__(self, hass, host, port): """Initialize the service.""" self._hass = hass self._host = host self._port = port def send_message(self, message="", **kwargs): """Send a message to Lannouncer.""" data = kwargs.get(ATTR_DATA) if data is not None and ATTR_METHOD in data: method = data.get(ATTR_METHOD) else: method = ATTR_METHOD_DEFAULT if method not in ATTR_METHOD_ALLOWED: _LOGGER.error("Unknown method %s", method) return cmd = urlencode({method: message}) try: # Open socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(10) sock.connect((self._host, self._port)) # Send message _LOGGER.debug("Sending message: %s", cmd) sock.sendall(cmd.encode()) sock.sendall(b"&@DONE@\n") # Check response buffer = sock.recv(1024) if buffer != b"LANnouncer: OK": _LOGGER.error("Error sending data to Lannnouncer: %s", buffer.decode()) # Close socket sock.close() except socket.gaierror: _LOGGER.error("Unable to connect to host %s", self._host) except OSError: _LOGGER.exception("Failed to send data to Lannnouncer")
import sys import argparse import argcomplete import traceback import pygit2 from subprocess import CalledProcessError from gitless import core from . import ( gl_track, gl_untrack, gl_status, gl_diff, gl_commit, gl_branch, gl_tag, gl_checkout, gl_merge, gl_resolve, gl_fuse, gl_remote, gl_publish, gl_switch, gl_init, gl_history) from . import pprint from . import helpers SUCCESS = 0 ERRORS_FOUND = 1 # 2 is used by argparse to indicate cmd syntax errors. INTERNAL_ERROR = 3 NOT_IN_GL_REPO = 4 __version__ = '0.8.8' URL = 'http://gitless.com' repo = None try: repo = core.Repository() try: pprint.DISABLE_COLOR = not repo.config.get_bool('color.ui') except pygit2.GitError: prrint.DISABLE_COLOR = ( repo.config['color.ui'] in ['no', 'never']) except (core.NotInRepoError, KeyError): pass def print_help(parser): """print help for humans""" print(parser.description) print('\ncommands:\n') # https://stackoverflow.com/questions/20094215/argparse-subparser-monolithic-help-output # retrieve subparsers from parser subparsers_actions = [ action for action in parser._actions if isinstance(action, argparse._SubParsersAction)] # there will probably only be one subparser_action, # but better safe than sorry for subparsers_action in subparsers_actions: # get all subparsers and print help for choice in subparsers_action._choices_actions: print(' {:<19} {}'.format(choice.dest, choice.help)) def build_parser(subcommands, repo): parser = argparse.ArgumentParser( description=( 'Gitless: a version control system built on top of Git.\nMore info, ' 'downloads and documentation at {0}'.format(URL)), formatter_class=argparse.RawDescriptionHelpFormatter) if sys.version_info[0] < 3: parser.register('action', 'parsers', helpers.AliasedSubParsersAction) parser.add_argument( '--version', action='version', version=( 'GL Version: {0}\nYou can check if there\'s a new version of Gitless ' 'available at {1}'.format(__version__, URL))) subparsers = parser.add_subparsers(title='subcommands', dest='subcmd_name') subparsers.required = True for sub_cmd in subcommands: sub_cmd.parser(subparsers, repo) return parser def setup_windows_console(): if sys.platform == 'win32': import ctypes kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) def main(): sub_cmds = [ gl_track, gl_untrack, gl_status, gl_diff, gl_commit, gl_branch, gl_tag, gl_checkout, gl_merge, gl_resolve, gl_fuse, gl_remote, gl_publish, gl_switch, gl_init, gl_history] parser = build_parser(sub_cmds, repo) argcomplete.autocomplete(parser) if len(sys.argv) == 1: print_help(parser) return SUCCESS args = parser.parse_args() try: if args.subcmd_name != 'init' and not repo: raise core.NotInRepoError('You are not in a Gitless\'s repository') setup_windows_console() return SUCCESS if args.func(args, repo) else ERRORS_FOUND except KeyboardInterrupt: pprint.puts('\n') pprint.msg('Keyboard interrupt detected, operation aborted') return SUCCESS except core.NotInRepoError as e: pprint.err(e) pprint.err_exp('do gl init to turn this directory into an empty repository') pprint.err_exp('do gl init remote_repo to clone an existing repository') return NOT_IN_GL_REPO except (ValueError, pygit2.GitError, core.GlError) as e: pprint.err(e) return ERRORS_FOUND except CalledProcessError as e: pprint.err(e.stderr) return ERRORS_FOUND except: pprint.err('Some internal error occurred') pprint.err_exp( 'If you want to help, see {0} for info on how to report bugs and ' 'include the following information:\n\n{1}\n\n{2}'.format( URL, __version__, traceback.format_exc())) return INTERNAL_ERROR
import hangups from common import run_example async def lookup_entities(client, args): """Search for entities by phone number, email, or gaia_id.""" lookup_spec = _get_lookup_spec(args.entity_identifier) request = hangups.hangouts_pb2.GetEntityByIdRequest( request_header=client.get_request_header(), batch_lookup_spec=[lookup_spec], ) res = await client.get_entity_by_id(request) # Print the list of entities in the response. for entity_result in res.entity_result: for entity in entity_result.entity: print(entity) def _get_lookup_spec(identifier): """Return EntityLookupSpec from phone number, email address, or gaia ID.""" if identifier.startswith('+'): return hangups.hangouts_pb2.EntityLookupSpec( phone=identifier, create_offnetwork_gaia=True ) elif '@' in identifier: return hangups.hangouts_pb2.EntityLookupSpec( email=identifier, create_offnetwork_gaia=True ) else: return hangups.hangouts_pb2.EntityLookupSpec(gaia_id=identifier) if __name__ == '__main__': run_example(lookup_entities, '--entity-identifier')
from __future__ import division import re from decimal import Decimal from math import ceil, cos, floor, log10, pi, sin from pygal._compat import _ellipsis, to_unicode, u def float_format(number): """Format a float to a precision of 3, without zeroes or dots""" return ("%.3f" % number).rstrip('0').rstrip('.') def majorize(values): """Filter sequence to return only major considered numbers""" sorted_values = sorted(values) if len(values) <= 3 or ( abs(2 * sorted_values[1] - sorted_values[0] - sorted_values[2]) > abs(1.5 * (sorted_values[1] - sorted_values[0]))): return [] values_step = sorted_values[1] - sorted_values[0] full_range = sorted_values[-1] - sorted_values[0] step = 10**int(log10(full_range)) if step == values_step: step *= 10 step_factor = 10**(int(log10(step)) + 1) if round(step * step_factor) % (round(values_step * step_factor) or 1): # TODO: Find lower common multiple instead step *= values_step if full_range <= 2 * step: step *= .5 elif full_range >= 5 * step: step *= 5 major_values = [ value for value in values if value / step == round(value / step) ] return [value for value in sorted_values if value in major_values] def round_to_int(number, precision): """Round a number to a precision""" precision = int(precision) rounded = (int(number) + precision / 2) // precision * precision return rounded def round_to_float(number, precision): """Round a float to a precision""" rounded = Decimal(str(floor((number + precision / 2) // precision)) ) * Decimal(str(precision)) return float(rounded) def round_to_scale(number, precision): """Round a number or a float to a precision""" if precision < 1: return round_to_float(number, precision) return round_to_int(number, precision) def cut(list_, index=0): """Cut a list by index or arg""" if isinstance(index, int): cut_ = lambda x: x[index] else: cut_ = lambda x: getattr(x, index) return list(map(cut_, list_)) def rad(degrees): """Convert degrees in radiants""" return pi * degrees / 180 def deg(radiants): """Convert radiants in degrees""" return 180 * radiants / pi def _swap_curly(string): """Swap single and double curly brackets""" return ( string.replace('{{ ', '{{').replace('{{', '\x00').replace('{', '{{') .replace('\x00', '{').replace(' }}', '}}').replace('}}', '\x00') .replace('}', '}}').replace('\x00', '}') ) def template(string, **kwargs): """Format a string using double braces""" return _swap_curly(string).format(**kwargs) swap = lambda tuple_: tuple(reversed(tuple_)) ident = lambda x: x def compute_logarithmic_scale(min_, max_, min_scale, max_scale): """Compute an optimal scale for logarithmic""" if max_ <= 0 or min_ <= 0: return [] min_order = int(floor(log10(min_))) max_order = int(ceil(log10(max_))) positions = [] amplitude = max_order - min_order if amplitude <= 1: return [] detail = 10. while amplitude * detail < min_scale * 5: detail *= 2 while amplitude * detail > max_scale * 3: detail /= 2 for order in range(min_order, max_order + 1): for i in range(int(detail)): tick = (10 * i / detail or 1) * 10**order tick = round_to_scale(tick, tick) if min_ <= tick <= max_ and tick not in positions: positions.append(tick) return positions def compute_scale(min_, max_, logarithmic, order_min, min_scale, max_scale): """Compute an optimal scale between min and max""" if min_ == 0 and max_ == 0: return [0] if max_ - min_ == 0: return [min_] if logarithmic: log_scale = compute_logarithmic_scale(min_, max_, min_scale, max_scale) if log_scale: return log_scale # else we fallback to normal scalling order = round(log10(max(abs(min_), abs(max_)))) - 1 if order_min is not None and order < order_min: order = order_min else: while ((max_ - min_) / (10**order) < min_scale and (order_min is None or order > order_min)): order -= 1 step = float(10**order) while (max_ - min_) / step > max_scale: step *= 2. positions = [] position = round_to_scale(min_, step) while position < (max_ + step): rounded = round_to_scale(position, step) if min_ <= rounded <= max_: if rounded not in positions: positions.append(rounded) position += step if len(positions) < 2: return [min_, max_] return positions def text_len(length, fs): """Approximation of text width""" return length * 0.6 * fs def reverse_text_len(width, fs): """Approximation of text length""" return int(width / (0.6 * fs)) def get_text_box(text, fs): """Approximation of text bounds""" return (fs, text_len(len(text), fs)) def get_texts_box(texts, fs): """Approximation of multiple texts bounds""" max_len = max(map(len, texts)) return (fs, text_len(max_len, fs)) def decorate(svg, node, metadata): """Add metedata next to a node""" if not metadata: return node xlink = metadata.get('xlink') if xlink: if not isinstance(xlink, dict): xlink = {'href': xlink, 'target': '_blank'} node = svg.node(node, 'a', **xlink) svg.node( node, 'desc', class_='xlink' ).text = to_unicode(xlink.get('href')) if 'tooltip' in metadata: svg.node(node, 'title').text = to_unicode(metadata['tooltip']) if 'color' in metadata: color = metadata.pop('color') node.attrib['style'] = 'fill: %s; stroke: %s' % (color, color) if 'style' in metadata: node.attrib['style'] = metadata.pop('style') if 'label' in metadata and metadata['label']: svg.node( node, 'desc', class_='label' ).text = to_unicode(metadata['label']) return node def alter(node, metadata): """Override nodes attributes from metadata node mapping""" if node is not None and metadata and 'node' in metadata: node.attrib.update( dict((k, str(v)) for k, v in metadata['node'].items()) ) def truncate(string, index): """Truncate a string at index and add ...""" if len(string) > index and index > 0: string = string[:index - 1] + u('…') return string # # Stolen partly from brownie http://packages.python.org/Brownie/ class cached_property(object): """Memoize a property""" def __init__(self, getter, doc=None): """Initialize the decorator""" self.getter = getter self.__module__ = getter.__module__ self.__name__ = getter.__name__ self.__doc__ = doc or getter.__doc__ def __get__(self, obj, type_=None): """ Get descriptor calling the property function and replacing it with its value or on state if we are in the transient state. """ if obj is None: return self value = self.getter(obj) if hasattr(obj, 'state'): setattr(obj.state, self.__name__, value) else: obj.__dict__[self.__name__] = self.getter(obj) return value css_comments = re.compile(r'/\*.*?\*/', re.MULTILINE | re.DOTALL) def minify_css(css): """Little css minifier""" # Inspired by slimmer by Peter Bengtsson remove_next_comment = 1 for css_comment in css_comments.findall(css): if css_comment[-3:] == r'\*/': remove_next_comment = 0 continue if remove_next_comment: css = css.replace(css_comment, '') else: remove_next_comment = 1 # >= 2 whitespace becomes one whitespace css = re.sub(r'\s\s+', ' ', css) # no whitespace before end of line css = re.sub(r'\s+\n', '', css) # Remove space before and after certain chars for char in ('{', '}', ':', ';', ','): css = re.sub(char + r'\s', char, css) css = re.sub(r'\s' + char, char, css) css = re.sub(r'}\s(#|\w)', r'}\1', css) # no need for the ; before end of attributes css = re.sub(r';}', r'}', css) css = re.sub(r'}//-->', r'}\n//-->', css) return css.strip() def compose(f, g): """Chain functions""" fun = lambda *args, **kwargs: f(g(*args, **kwargs)) fun.__name__ = "%s o %s" % (f.__name__, g.__name__) return fun def safe_enumerate(iterable): """Enumerate which does not yield None values""" for i, v in enumerate(iterable): if v is not None: yield i, v def split_title(title, width, title_fs): """Split a string for a specified width and font size""" titles = [] if not title: return titles size = reverse_text_len(width, title_fs * 1.1) title_lines = title.split("\n") for title_line in title_lines: while len(title_line) > size: title_part = title_line[:size] i = title_part.rfind(' ') if i == -1: i = len(title_part) titles.append(title_part[:i]) title_line = title_line[i:].strip() titles.append(title_line) return titles def filter_kwargs(fun, kwargs): if not hasattr(fun, '__code__'): return {} args = fun.__code__.co_varnames[1:] return dict((k, v) for k, v in kwargs.items() if k in args) def coord_project(rho, alpha): return rho * sin(-alpha), rho * cos(-alpha) def coord_diff(x, y): return (x[0] - y[0], x[1] - y[1]) def coord_format(x): return '%f %f' % x def coord_dual(r): return coord_format((r, r)) def coord_abs_project(center, rho, theta): return coord_format(coord_diff(center, coord_project(rho, theta))) def mergextend(list1, list2): if list1 is None or _ellipsis not in list1: return list1 index = list1.index(_ellipsis) return list(list1[:index]) + list(list2) + list(list1[index + 1:]) def merge(dict1, dict2): from pygal.config import CONFIG_ITEMS, Key _list_items = [item.name for item in CONFIG_ITEMS if item.type == list] for key, val in dict2.items(): if isinstance(val, Key): val = val.value if key in _list_items: dict1[key] = mergextend(val, dict1.get(key, ())) else: dict1[key] = val
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID async def test_setup_entry(hass, config_entry, aioclient_mock_fixture): """Test migration of config entry from v1.""" config_entry.add_to_hass(hass) assert await async_setup_component( hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD} ) await hass.async_block_till_done() assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1 assert await hass.config_entries.async_unload(config_entry.entry_id)
from copy import copy import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( ATTR_APP_ID, ATTR_APP_NAME, ATTR_INPUT_SOURCE, ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_ALBUM_ARTIST, ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ARTIST, ATTR_MEDIA_CHANNEL, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_DURATION, ATTR_MEDIA_EPISODE, ATTR_MEDIA_PLAYLIST, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, ATTR_MEDIA_SEASON, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SERIES_TITLE, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TITLE, ATTR_MEDIA_TRACK, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, DOMAIN, SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, SUPPORT_CLEAR_PLAYLIST, SUPPORT_SELECT_SOURCE, SUPPORT_SHUFFLE_SET, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_ENTITY_PICTURE, ATTR_SUPPORTED_FEATURES, CONF_NAME, CONF_STATE, CONF_STATE_TEMPLATE, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK, SERVICE_MEDIA_STOP, SERVICE_SHUFFLE_SET, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, STATE_IDLE, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) from homeassistant.core import EVENT_HOMEASSISTANT_START, callback from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.event import TrackTemplate, async_track_template_result from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.service import async_call_from_config ATTR_ACTIVE_CHILD = "active_child" ATTR_DATA = "data" CONF_ATTRS = "attributes" CONF_CHILDREN = "children" CONF_COMMANDS = "commands" CONF_SERVICE = "service" CONF_SERVICE_DATA = "service_data" OFF_STATES = [STATE_IDLE, STATE_OFF, STATE_UNAVAILABLE] ATTRS_SCHEMA = cv.schema_with_slug_keys(cv.string) CMD_SCHEMA = cv.schema_with_slug_keys(cv.SERVICE_SCHEMA) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_CHILDREN, default=[]): cv.entity_ids, vol.Optional(CONF_COMMANDS, default={}): CMD_SCHEMA, vol.Optional(CONF_ATTRS, default={}): vol.Or( cv.ensure_list(ATTRS_SCHEMA), ATTRS_SCHEMA ), vol.Optional(CONF_STATE_TEMPLATE): cv.template, }, extra=vol.REMOVE_EXTRA, ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the universal media players.""" await async_setup_reload_service(hass, "universal", ["media_player"]) player = UniversalMediaPlayer( hass, config.get(CONF_NAME), config.get(CONF_CHILDREN), config.get(CONF_COMMANDS), config.get(CONF_ATTRS), config.get(CONF_STATE_TEMPLATE), ) async_add_entities([player]) class UniversalMediaPlayer(MediaPlayerEntity): """Representation of an universal media player.""" def __init__(self, hass, name, children, commands, attributes, state_template=None): """Initialize the Universal media device.""" self.hass = hass self._name = name self._children = children self._cmds = commands self._attrs = {} for key, val in attributes.items(): attr = val.split("|", 1) if len(attr) == 1: attr.append(None) self._attrs[key] = attr self._child_state = None self._state_template_result = None self._state_template = state_template async def async_added_to_hass(self): """Subscribe to children and template state changes.""" @callback def _async_on_dependency_update(event): """Update ha state when dependencies update.""" self.async_set_context(event.context) self.async_schedule_update_ha_state(True) @callback def _async_on_template_update(event, updates): """Update ha state when dependencies update.""" result = updates.pop().result if isinstance(result, TemplateError): self._state_template_result = None else: self._state_template_result = result if event: self.async_set_context(event.context) self.async_schedule_update_ha_state(True) if self._state_template is not None: result = async_track_template_result( self.hass, [TrackTemplate(self._state_template, None)], _async_on_template_update, ) self.hass.bus.async_listen_once( EVENT_HOMEASSISTANT_START, callback(lambda _: result.async_refresh()) ) self.async_on_remove(result.async_remove) depend = copy(self._children) for entity in self._attrs.values(): depend.append(entity[0]) self.async_on_remove( self.hass.helpers.event.async_track_state_change_event( list(set(depend)), _async_on_dependency_update ) ) def _entity_lkp(self, entity_id, state_attr=None): """Look up an entity state.""" state_obj = self.hass.states.get(entity_id) if state_obj is None: return if state_attr: return state_obj.attributes.get(state_attr) return state_obj.state def _override_or_child_attr(self, attr_name): """Return either the override or the active child for attr_name.""" if attr_name in self._attrs: return self._entity_lkp( self._attrs[attr_name][0], self._attrs[attr_name][1] ) return self._child_attr(attr_name) def _child_attr(self, attr_name): """Return the active child's attributes.""" active_child = self._child_state return active_child.attributes.get(attr_name) if active_child else None async def _async_call_service( self, service_name, service_data=None, allow_override=False ): """Call either a specified or active child's service.""" if service_data is None: service_data = {} if allow_override and service_name in self._cmds: await async_call_from_config( self.hass, self._cmds[service_name], variables=service_data, blocking=True, validate_config=False, ) return active_child = self._child_state if active_child is None: # No child to call service on return service_data[ATTR_ENTITY_ID] = active_child.entity_id await self.hass.services.async_call( DOMAIN, service_name, service_data, blocking=True, context=self._context ) @property def should_poll(self): """No polling needed.""" return False @property def master_state(self): """Return the master state for entity or None.""" if self._state_template is not None: return self._state_template_result if CONF_STATE in self._attrs: master_state = self._entity_lkp( self._attrs[CONF_STATE][0], self._attrs[CONF_STATE][1] ) return master_state if master_state else STATE_OFF return None @property def name(self): """Return the name of universal player.""" return self._name @property def state(self): """Return the current state of media player. Off if master state is off else Status of first active child else master state or off """ master_state = self.master_state # avoid multiple lookups if (master_state == STATE_OFF) or (self._state_template is not None): return master_state active_child = self._child_state if active_child: return active_child.state return master_state if master_state else STATE_OFF @property def volume_level(self): """Volume level of entity specified in attributes or active child.""" try: return float(self._override_or_child_attr(ATTR_MEDIA_VOLUME_LEVEL)) except (TypeError, ValueError): return None @property def is_volume_muted(self): """Boolean if volume is muted.""" return self._override_or_child_attr(ATTR_MEDIA_VOLUME_MUTED) in [True, STATE_ON] @property def media_content_id(self): """Return the content ID of current playing media.""" return self._child_attr(ATTR_MEDIA_CONTENT_ID) @property def media_content_type(self): """Return the content type of current playing media.""" return self._child_attr(ATTR_MEDIA_CONTENT_TYPE) @property def media_duration(self): """Return the duration of current playing media in seconds.""" return self._child_attr(ATTR_MEDIA_DURATION) @property def media_image_url(self): """Image url of current playing media.""" return self._child_attr(ATTR_ENTITY_PICTURE) @property def entity_picture(self): """ Return image of the media playing. The universal media player doesn't use the parent class logic, since the url is coming from child entity pictures which have already been sent through the API proxy. """ return self.media_image_url @property def media_title(self): """Title of current playing media.""" return self._child_attr(ATTR_MEDIA_TITLE) @property def media_artist(self): """Artist of current playing media (Music track only).""" return self._child_attr(ATTR_MEDIA_ARTIST) @property def media_album_name(self): """Album name of current playing media (Music track only).""" return self._child_attr(ATTR_MEDIA_ALBUM_NAME) @property def media_album_artist(self): """Album artist of current playing media (Music track only).""" return self._child_attr(ATTR_MEDIA_ALBUM_ARTIST) @property def media_track(self): """Track number of current playing media (Music track only).""" return self._child_attr(ATTR_MEDIA_TRACK) @property def media_series_title(self): """Return the title of the series of current playing media (TV).""" return self._child_attr(ATTR_MEDIA_SERIES_TITLE) @property def media_season(self): """Season of current playing media (TV Show only).""" return self._child_attr(ATTR_MEDIA_SEASON) @property def media_episode(self): """Episode of current playing media (TV Show only).""" return self._child_attr(ATTR_MEDIA_EPISODE) @property def media_channel(self): """Channel currently playing.""" return self._child_attr(ATTR_MEDIA_CHANNEL) @property def media_playlist(self): """Title of Playlist currently playing.""" return self._child_attr(ATTR_MEDIA_PLAYLIST) @property def app_id(self): """ID of the current running app.""" return self._child_attr(ATTR_APP_ID) @property def app_name(self): """Name of the current running app.""" return self._child_attr(ATTR_APP_NAME) @property def source(self): """Return the current input source of the device.""" return self._override_or_child_attr(ATTR_INPUT_SOURCE) @property def source_list(self): """List of available input sources.""" return self._override_or_child_attr(ATTR_INPUT_SOURCE_LIST) @property def shuffle(self): """Boolean if shuffling is enabled.""" return self._override_or_child_attr(ATTR_MEDIA_SHUFFLE) @property def supported_features(self): """Flag media player features that are supported.""" flags = self._child_attr(ATTR_SUPPORTED_FEATURES) or 0 if SERVICE_TURN_ON in self._cmds: flags |= SUPPORT_TURN_ON if SERVICE_TURN_OFF in self._cmds: flags |= SUPPORT_TURN_OFF if any([cmd in self._cmds for cmd in [SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN]]): flags |= SUPPORT_VOLUME_STEP if SERVICE_VOLUME_SET in self._cmds: flags |= SUPPORT_VOLUME_SET if SERVICE_VOLUME_MUTE in self._cmds and ATTR_MEDIA_VOLUME_MUTED in self._attrs: flags |= SUPPORT_VOLUME_MUTE if SERVICE_SELECT_SOURCE in self._cmds: flags |= SUPPORT_SELECT_SOURCE if SERVICE_CLEAR_PLAYLIST in self._cmds: flags |= SUPPORT_CLEAR_PLAYLIST if SERVICE_SHUFFLE_SET in self._cmds and ATTR_MEDIA_SHUFFLE in self._attrs: flags |= SUPPORT_SHUFFLE_SET return flags @property def device_state_attributes(self): """Return device specific state attributes.""" active_child = self._child_state return {ATTR_ACTIVE_CHILD: active_child.entity_id} if active_child else {} @property def media_position(self): """Position of current playing media in seconds.""" return self._child_attr(ATTR_MEDIA_POSITION) @property def media_position_updated_at(self): """When was the position of the current playing media valid.""" return self._child_attr(ATTR_MEDIA_POSITION_UPDATED_AT) async def async_turn_on(self): """Turn the media player on.""" await self._async_call_service(SERVICE_TURN_ON, allow_override=True) async def async_turn_off(self): """Turn the media player off.""" await self._async_call_service(SERVICE_TURN_OFF, allow_override=True) async def async_mute_volume(self, mute): """Mute the volume.""" data = {ATTR_MEDIA_VOLUME_MUTED: mute} await self._async_call_service(SERVICE_VOLUME_MUTE, data, allow_override=True) async def async_set_volume_level(self, volume): """Set volume level, range 0..1.""" data = {ATTR_MEDIA_VOLUME_LEVEL: volume} await self._async_call_service(SERVICE_VOLUME_SET, data, allow_override=True) async def async_media_play(self): """Send play command.""" await self._async_call_service(SERVICE_MEDIA_PLAY) async def async_media_pause(self): """Send pause command.""" await self._async_call_service(SERVICE_MEDIA_PAUSE) async def async_media_stop(self): """Send stop command.""" await self._async_call_service(SERVICE_MEDIA_STOP) async def async_media_previous_track(self): """Send previous track command.""" await self._async_call_service(SERVICE_MEDIA_PREVIOUS_TRACK) async def async_media_next_track(self): """Send next track command.""" await self._async_call_service(SERVICE_MEDIA_NEXT_TRACK) async def async_media_seek(self, position): """Send seek command.""" data = {ATTR_MEDIA_SEEK_POSITION: position} await self._async_call_service(SERVICE_MEDIA_SEEK, data) async def async_play_media(self, media_type, media_id, **kwargs): """Play a piece of media.""" data = {ATTR_MEDIA_CONTENT_TYPE: media_type, ATTR_MEDIA_CONTENT_ID: media_id} await self._async_call_service(SERVICE_PLAY_MEDIA, data) async def async_volume_up(self): """Turn volume up for media player.""" await self._async_call_service(SERVICE_VOLUME_UP, allow_override=True) async def async_volume_down(self): """Turn volume down for media player.""" await self._async_call_service(SERVICE_VOLUME_DOWN, allow_override=True) async def async_media_play_pause(self): """Play or pause the media player.""" await self._async_call_service(SERVICE_MEDIA_PLAY_PAUSE) async def async_select_source(self, source): """Set the input source.""" data = {ATTR_INPUT_SOURCE: source} await self._async_call_service(SERVICE_SELECT_SOURCE, data, allow_override=True) async def async_clear_playlist(self): """Clear players playlist.""" await self._async_call_service(SERVICE_CLEAR_PLAYLIST) async def async_set_shuffle(self, shuffle): """Enable/disable shuffling.""" data = {ATTR_MEDIA_SHUFFLE: shuffle} await self._async_call_service(SERVICE_SHUFFLE_SET, data, allow_override=True) async def async_update(self): """Update state in HA.""" for child_name in self._children: child_state = self.hass.states.get(child_name) if child_state and child_state.state not in OFF_STATES: self._child_state = child_state return self._child_state = None
import abodepy.helpers.constants as CONST from homeassistant.components.lock import LockEntity from . import AbodeDevice from .const import DOMAIN async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Abode lock devices.""" data = hass.data[DOMAIN] entities = [] for device in data.abode.get_devices(generic_type=CONST.TYPE_LOCK): entities.append(AbodeLock(data, device)) async_add_entities(entities) class AbodeLock(AbodeDevice, LockEntity): """Representation of an Abode lock.""" def lock(self, **kwargs): """Lock the device.""" self._device.lock() def unlock(self, **kwargs): """Unlock the device.""" self._device.unlock() @property def is_locked(self): """Return true if device is on.""" return self._device.is_locked
from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys import tarfile from six.moves import urllib import tensorflow as tf LABELS_FILENAME = 'labels.txt' def int64_feature(values): """Returns a TF-Feature of int64s. Args: values: A scalar or list of values. Returns: a TF-Feature. """ if not isinstance(values, (tuple, list)): values = [values] return tf.train.Feature(int64_list=tf.train.Int64List(value=values)) def float_feature(value): """Wrapper for inserting float features into Example proto. """ if not isinstance(value, list): value = [value] return tf.train.Feature(float_list=tf.train.FloatList(value=value)) def bytes_feature(value): """Wrapper for inserting bytes features into Example proto. """ if not isinstance(value, list): value = [value] return tf.train.Feature(bytes_list=tf.train.BytesList(value=value)) def image_to_tfexample(image_data, image_format, height, width, class_id): return tf.train.Example(features=tf.train.Features(feature={ 'image/encoded': bytes_feature(image_data), 'image/format': bytes_feature(image_format), 'image/class/label': int64_feature(class_id), 'image/height': int64_feature(height), 'image/width': int64_feature(width), })) def download_and_uncompress_tarball(tarball_url, dataset_dir): """Downloads the `tarball_url` and uncompresses it locally. Args: tarball_url: The URL of a tarball file. dataset_dir: The directory where the temporary files are stored. """ filename = tarball_url.split('/')[-1] filepath = os.path.join(dataset_dir, filename) def _progress(count, block_size, total_size): sys.stdout.write('\r>> Downloading %s %.1f%%' % ( filename, float(count * block_size) / float(total_size) * 100.0)) sys.stdout.flush() filepath, _ = urllib.request.urlretrieve(tarball_url, filepath, _progress) print() statinfo = os.stat(filepath) print('Successfully downloaded', filename, statinfo.st_size, 'bytes.') tarfile.open(filepath, 'r:gz').extractall(dataset_dir) def write_label_file(labels_to_class_names, dataset_dir, filename=LABELS_FILENAME): """Writes a file with the list of class names. Args: labels_to_class_names: A map of (integer) labels to class names. dataset_dir: The directory in which the labels file should be written. filename: The filename where the class names are written. """ labels_filename = os.path.join(dataset_dir, filename) with tf.gfile.Open(labels_filename, 'w') as f: for label in labels_to_class_names: class_name = labels_to_class_names[label] f.write('%d:%s\n' % (label, class_name)) def has_labels(dataset_dir, filename=LABELS_FILENAME): """Specifies whether or not the dataset directory contains a label map file. Args: dataset_dir: The directory in which the labels file is found. filename: The filename where the class names are written. Returns: `True` if the labels file exists and `False` otherwise. """ return tf.gfile.Exists(os.path.join(dataset_dir, filename)) def read_label_file(dataset_dir, filename=LABELS_FILENAME): """Reads the labels file and returns a mapping from ID to class name. Args: dataset_dir: The directory in which the labels file is found. filename: The filename where the class names are written. Returns: A map from a label (integer) to class name. """ labels_filename = os.path.join(dataset_dir, filename) with tf.gfile.Open(labels_filename, 'r') as f: lines = f.read().decode() lines = lines.split('\n') lines = filter(None, lines) labels_to_class_names = {} for line in lines: index = line.index(':') labels_to_class_names[int(line[:index])] = line[index+1:] return labels_to_class_names
import argparse import multiprocessing import numpy as np import chainer from chainer.optimizer_hooks import WeightDecay from chainer import serializers from chainer import training from chainer.training import extensions from chainer.training import triggers import chainermn from chainercv.chainer_experimental.datasets.sliceable \ import ConcatenatedDataset from chainercv.chainer_experimental.datasets.sliceable import TransformDataset from chainercv.datasets import voc_bbox_label_names from chainercv.datasets import VOCBboxDataset from chainercv.extensions import DetectionVOCEvaluator from chainercv.links.model.ssd import GradientScaling from chainercv.links.model.ssd import multibox_loss from chainercv.links import SSD300 from chainercv.links import SSD512 from train import Transform # https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator import cv2 cv2.setNumThreads(0) class MultiboxTrainChain(chainer.Chain): def __init__(self, model, alpha=1, k=3, comm=None): super(MultiboxTrainChain, self).__init__() with self.init_scope(): self.model = model self.alpha = alpha self.k = k self.comm = comm def forward(self, imgs, gt_mb_locs, gt_mb_labels): mb_locs, mb_confs = self.model(imgs) loc_loss, conf_loss = multibox_loss( mb_locs, mb_confs, gt_mb_locs, gt_mb_labels, self.k, self.comm) loss = loc_loss * self.alpha + conf_loss chainer.reporter.report( {'loss': loss, 'loss/loc': loc_loss, 'loss/conf': conf_loss}, self) return loss def main(): parser = argparse.ArgumentParser() parser.add_argument( '--model', choices=('ssd300', 'ssd512'), default='ssd300') parser.add_argument('--batchsize', type=int, default=32) parser.add_argument('--test-batchsize', type=int, default=16) parser.add_argument('--iteration', type=int, default=120000) parser.add_argument('--step', type=int, nargs='*', default=[80000, 100000]) parser.add_argument('--out', default='result') parser.add_argument('--resume') args = parser.parse_args() # https://docs.chainer.org/en/stable/chainermn/tutorial/tips_faqs.html#using-multiprocessiterator if hasattr(multiprocessing, 'set_start_method'): multiprocessing.set_start_method('forkserver') p = multiprocessing.Process() p.start() p.join() comm = chainermn.create_communicator('pure_nccl') device = comm.intra_rank if args.model == 'ssd300': model = SSD300( n_fg_class=len(voc_bbox_label_names), pretrained_model='imagenet') elif args.model == 'ssd512': model = SSD512( n_fg_class=len(voc_bbox_label_names), pretrained_model='imagenet') model.use_preset('evaluate') train_chain = MultiboxTrainChain(model, comm=comm) chainer.cuda.get_device_from_id(device).use() model.to_gpu() train = TransformDataset( ConcatenatedDataset( VOCBboxDataset(year='2007', split='trainval'), VOCBboxDataset(year='2012', split='trainval') ), ('img', 'mb_loc', 'mb_label'), Transform(model.coder, model.insize, model.mean)) if comm.rank == 0: indices = np.arange(len(train)) else: indices = None indices = chainermn.scatter_dataset(indices, comm, shuffle=True) train = train.slice[indices] train_iter = chainer.iterators.MultiprocessIterator( train, args.batchsize // comm.size, n_processes=2) if comm.rank == 0: test = VOCBboxDataset( year='2007', split='test', use_difficult=True, return_difficult=True) test_iter = chainer.iterators.SerialIterator( test, args.test_batchsize, repeat=False, shuffle=False) # initial lr is set to 1e-3 by ExponentialShift optimizer = chainermn.create_multi_node_optimizer( chainer.optimizers.MomentumSGD(), comm) optimizer.setup(train_chain) for param in train_chain.params(): if param.name == 'b': param.update_rule.add_hook(GradientScaling(2)) else: param.update_rule.add_hook(WeightDecay(0.0005)) updater = training.updaters.StandardUpdater( train_iter, optimizer, device=device) trainer = training.Trainer( updater, (args.iteration, 'iteration'), args.out) trainer.extend( extensions.ExponentialShift('lr', 0.1, init=1e-3), trigger=triggers.ManualScheduleTrigger(args.step, 'iteration')) if comm.rank == 0: trainer.extend( DetectionVOCEvaluator( test_iter, model, use_07_metric=True, label_names=voc_bbox_label_names), trigger=triggers.ManualScheduleTrigger( args.step + [args.iteration], 'iteration')) log_interval = 10, 'iteration' trainer.extend(extensions.LogReport(trigger=log_interval)) trainer.extend(extensions.observe_lr(), trigger=log_interval) trainer.extend(extensions.PrintReport( ['epoch', 'iteration', 'lr', 'main/loss', 'main/loss/loc', 'main/loss/conf', 'validation/main/map']), trigger=log_interval) trainer.extend(extensions.ProgressBar(update_interval=10)) trainer.extend( extensions.snapshot(), trigger=triggers.ManualScheduleTrigger( args.step + [args.iteration], 'iteration')) trainer.extend( extensions.snapshot_object( model, 'model_iter_{.updater.iteration}'), trigger=(args.iteration, 'iteration')) if args.resume: serializers.load_npz(args.resume, trainer) trainer.run() if __name__ == '__main__': main()
class ObjCClass(object): def __init__(self, *args, **kwargs): pass def __call__(self, *args, **kwargs): return ObjCClass() def __getattr__(self, item): return ObjCClass() class ObjCInstance(ObjCClass): pass class UIColor(ObjCClass): @classmethod def blackColor(cls): pass @classmethod def redColor(cls): pass @classmethod def greenColor(cls): pass @classmethod def brownColor(cls): pass @classmethod def blueColor(cls): pass @classmethod def magentaColor(cls): pass @classmethod def cyanColor(cls): pass @classmethod def whiteColor(cls): pass @classmethod def grayColor(cls): pass @classmethod def yellowColor(cls): pass @classmethod def colorWithRed_green_blue_alpha_(cls, *args, **kwargs): pass class NSRange(ObjCClass): pass def create_objc_class(*args, **kwargs): return ObjCClass() def ns(*args, **kwargs): return ObjCInstance() def on_main_thread(func): return func class ctypes(object): class pythonapi(object): @staticmethod def PyThreadState_SetAsyncExc( tid, exectype, ): return 1 @staticmethod def c_long(val): return val @staticmethod def py_object(val): return val
import posixpath from .utils import fix_filename, make_temp_dir, NoEscape, escape_latex from .base_classes import Float, UnsafeCommand from .package import Package import uuid class Figure(Float): """A class that represents a Figure environment.""" def add_image(self, filename, *, width=NoEscape(r'0.8\textwidth'), placement=NoEscape(r'\centering')): """Add an image to the figure. Args ---- filename: str Filename of the image. width: str The width of the image placement: str Placement of the figure, `None` is also accepted. """ if width is not None: if self.escape: width = escape_latex(width) width = 'width=' + str(width) if placement is not None: self.append(placement) self.append(StandAloneGraphic(image_options=width, filename=fix_filename(filename))) def _save_plot(self, *args, extension='pdf', **kwargs): """Save the plot. Returns ------- str The basename with which the plot has been saved. """ import matplotlib.pyplot as plt tmp_path = make_temp_dir() filename = '{}.{}'.format(str(uuid.uuid4()), extension.strip('.')) filepath = posixpath.join(tmp_path, filename) plt.savefig(filepath, *args, **kwargs) return filepath def add_plot(self, *args, extension='pdf', **kwargs): """Add the current Matplotlib plot to the figure. The plot that gets added is the one that would normally be shown when using ``plt.show()``. Args ---- args: Arguments passed to plt.savefig for displaying the plot. extension : str extension of image file indicating figure file type kwargs: Keyword arguments passed to plt.savefig for displaying the plot. In case these contain ``width`` or ``placement``, they will be used for the same purpose as in the add_image command. Namely the width and placement of the generated plot in the LaTeX document. """ add_image_kwargs = {} for key in ('width', 'placement'): if key in kwargs: add_image_kwargs[key] = kwargs.pop(key) filename = self._save_plot(*args, extension=extension, **kwargs) self.add_image(filename, **add_image_kwargs) class SubFigure(Figure): """A class that represents a subfigure from the subcaption package.""" packages = [Package('subcaption')] #: By default a subfigure is not on its own paragraph since that looks #: weird inside another figure. separate_paragraph = False _repr_attributes_mapping = { 'width': 'arguments', } def __init__(self, width=NoEscape(r'0.45\linewidth'), **kwargs): """ Args ---- width: str Width of the subfigure itself. It needs a width because it is inside another figure. """ super().__init__(arguments=width, **kwargs) def add_image(self, filename, *, width=NoEscape(r'\linewidth'), placement=None): """Add an image to the subfigure. Args ---- filename: str Filename of the image. width: str Width of the image in LaTeX terms. placement: str Placement of the figure, `None` is also accepted. """ super().add_image(filename, width=width, placement=placement) class StandAloneGraphic(UnsafeCommand): r"""A class representing a stand alone image.""" _latex_name = "includegraphics" packages = [Package('graphicx')] _repr_attributes_mapping = { "filename": "arguments", "image_options": "options" } def __init__(self, filename, image_options=NoEscape(r'width=0.8\textwidth'), extra_arguments=None): r""" Args ---- filename: str The path to the image file image_options: str or `list` Specifies the options for the image (ie. height, width) """ arguments = [NoEscape(filename)] super().__init__(command=self._latex_name, arguments=arguments, options=image_options, extra_arguments=extra_arguments)
import logging from requests import RequestException import voluptuous as vol from homeassistant.components.climate import ( PLATFORM_SCHEMA, SCAN_INTERVAL, TEMP_CELSIUS, ClimateEntity, ) from homeassistant.components.climate.const import ( CURRENT_HVAC_HEAT, CURRENT_HVAC_IDLE, HVAC_MODE_HEAT, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, CONF_SCAN_INTERVAL from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from . import DATA_SCHLUTER_API, DATA_SCHLUTER_SESSION, DOMAIN _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_SCAN_INTERVAL): vol.All(vol.Coerce(int), vol.Range(min=1))} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Schluter thermostats.""" if discovery_info is None: return session_id = hass.data[DOMAIN][DATA_SCHLUTER_SESSION] api = hass.data[DOMAIN][DATA_SCHLUTER_API] async def async_update_data(): try: thermostats = await hass.async_add_executor_job( api.get_thermostats, session_id ) except RequestException as err: raise UpdateFailed(f"Error communicating with Schluter API: {err}") from err if thermostats is None: return {} return {thermo.serial_number: thermo for thermo in thermostats} coordinator = DataUpdateCoordinator( hass, _LOGGER, name="schluter", update_method=async_update_data, update_interval=SCAN_INTERVAL, ) await coordinator.async_refresh() async_add_entities( SchluterThermostat(coordinator, serial_number, api, session_id) for serial_number, thermostat in coordinator.data.items() ) class SchluterThermostat(CoordinatorEntity, ClimateEntity): """Representation of a Schluter thermostat.""" def __init__(self, coordinator, serial_number, api, session_id): """Initialize the thermostat.""" super().__init__(coordinator) self._serial_number = serial_number self._api = api self._session_id = session_id self._support_flags = SUPPORT_TARGET_TEMPERATURE @property def supported_features(self): """Return the list of supported features.""" return self._support_flags @property def unique_id(self): """Return unique ID for this device.""" return self._serial_number @property def name(self): """Return the name of the thermostat.""" return self.coordinator.data[self._serial_number].name @property def temperature_unit(self): """Schluter API always uses celsius.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self.coordinator.data[self._serial_number].temperature @property def hvac_mode(self): """Return current mode. Only heat available for floor thermostat.""" return HVAC_MODE_HEAT @property def hvac_action(self): """Return current operation. Can only be heating or idle.""" return ( CURRENT_HVAC_HEAT if self.coordinator.data[self._serial_number].is_heating else CURRENT_HVAC_IDLE ) @property def target_temperature(self): """Return the temperature we try to reach.""" return self.coordinator.data[self._serial_number].set_point_temp @property def hvac_modes(self): """List of available operation modes.""" return [HVAC_MODE_HEAT] @property def min_temp(self): """Identify min_temp in Schluter API.""" return self.coordinator.data[self._serial_number].min_temp @property def max_temp(self): """Identify max_temp in Schluter API.""" return self.coordinator.data[self._serial_number].max_temp async def async_set_hvac_mode(self, hvac_mode): """Mode is always heating, so do nothing.""" def set_temperature(self, **kwargs): """Set new target temperature.""" target_temp = None target_temp = kwargs.get(ATTR_TEMPERATURE) serial_number = self.coordinator.data[self._serial_number].serial_number _LOGGER.debug("Setting thermostat temperature: %s", target_temp) try: if target_temp is not None: self._api.set_temperature(self._session_id, serial_number, target_temp) except RequestException as ex: _LOGGER.error("An error occurred while setting temperature: %s", ex)
from datetime import timedelta import time import pytest import zigpy.zcl.clusters.general as general from homeassistant.components.device_tracker import DOMAIN, SOURCE_TYPE_ROUTER from homeassistant.components.zha.core.registries import ( SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, ) from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE import homeassistant.util.dt as dt_util from .common import ( async_enable_traffic, async_test_rejoin, find_entity_id, send_attributes_report, ) from tests.common import async_fire_time_changed @pytest.fixture def zigpy_device_dt(zigpy_device_mock): """Device tracker zigpy device.""" endpoints = { 1: { "in_clusters": [ general.Basic.cluster_id, general.PowerConfiguration.cluster_id, general.Identify.cluster_id, general.PollControl.cluster_id, general.BinaryInput.cluster_id, ], "out_clusters": [general.Identify.cluster_id, general.Ota.cluster_id], "device_type": SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, } } return zigpy_device_mock(endpoints) async def test_device_tracker(hass, zha_device_joined_restored, zigpy_device_dt): """Test zha device tracker platform.""" zha_device = await zha_device_joined_restored(zigpy_device_dt) cluster = zigpy_device_dt.endpoints.get(1).power entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None assert hass.states.get(entity_id).state == STATE_HOME await async_enable_traffic(hass, [zha_device], enabled=False) # test that the device tracker was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE zigpy_device_dt.last_seen = time.time() - 120 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() # allow traffic to flow through the gateway and device await async_enable_traffic(hass, [zha_device]) # test that the state has changed from unavailable to not home assert hass.states.get(entity_id).state == STATE_NOT_HOME # turn state flip await send_attributes_report( hass, cluster, {0x0000: 0, 0x0020: 23, 0x0021: 200, 0x0001: 2} ) zigpy_device_dt.last_seen = time.time() + 10 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_HOME entity = hass.data[DOMAIN].get_entity(entity_id) assert entity.is_connected is True assert entity.source_type == SOURCE_TYPE_ROUTER assert entity.battery_level == 100 # test adding device tracker to the network and HA await async_test_rejoin(hass, zigpy_device_dt, [cluster], (2,)) assert hass.states.get(entity_id).state == STATE_HOME
from pyspark import SparkContext, SparkConf from pyspark.sql import SQLContext import pytest import logging def quiet_py4j(): """ turn down spark logging for the test context """ logger = logging.getLogger('py4j') logger.setLevel(logging.WARN) @pytest.fixture(scope="session") def spark_context(request): """ fixture for creating a SparkContext Args: request: pytest.FixtureRequest object """ conf = (SparkConf().setMaster("local[2]").setAppName( "pytest-pyspark-local-testing")) sc = SparkContext(conf=conf) request.addfinalizer(lambda: sc.stop()) quiet_py4j() return sc @pytest.fixture(scope="session") def sql_context(request): """ fixture for creating a Spark SQLContext Args: request: pytest.FixtureRequest object """ conf = (SparkConf().setMaster("local[2]").setAppName( "pytest-pyspark-local-testing")) sc = SparkContext(conf=conf) sql_context = SQLContext(sc) request.addfinalizer(lambda: sc.stop()) quiet_py4j() return sql_context
from django.contrib.admin import ModelAdmin from django.contrib.admin import site from django.contrib.admin.views.main import ChangeList from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.test import RequestFactory from django.test import TestCase from django.test.utils import override_settings from django.utils.translation import activate from django.utils.translation import deactivate from zinnia.admin.filters import AuthorListFilter from zinnia.admin.filters import CategoryListFilter from zinnia.managers import PUBLISHED from zinnia.models.author import Author from zinnia.models.category import Category from zinnia.models.entry import Entry from zinnia.signals import disconnect_entry_signals from zinnia.tests.utils import skip_if_custom_user class MiniEntryAuthorAdmin(ModelAdmin): list_filter = [AuthorListFilter] class MiniEntryCategoryAdmin(ModelAdmin): list_filter = [CategoryListFilter] @override_settings( ROOT_URLCONF='zinnia.tests.implementations.urls.default' ) class BaseListFilterTestCase(TestCase): """Base TestCase for testing Filters""" def setUp(self): disconnect_entry_signals() activate('en') self.root = User.objects.create_superuser( 'root', '[email protected]', 'toor' ) self.request_factory = RequestFactory() self.site = Site.objects.get_current() params = {'title': 'My entry 1', 'content': 'My content 1', 'status': PUBLISHED, 'slug': 'my-entry-1'} self.entry_1 = Entry.objects.create(**params) self.entry_1.sites.add(self.site) params = {'title': 'My entry 2', 'content': 'My content 2', 'status': PUBLISHED, 'slug': 'my-entry-2'} self.entry_2 = Entry.objects.create(**params) self.entry_2.sites.add(self.site) params = {'title': 'My entry draft', 'content': 'My content draft', 'slug': 'my-entry-draft'} self.entry_draft = Entry.objects.create(**params) self.entry_draft.sites.add(self.site) def tearDown(self): deactivate() def get_changelist(self, request, model, modeladmin): return ChangeList( request, model, modeladmin.list_display, modeladmin.list_display_links, modeladmin.list_filter, modeladmin.date_hierarchy, modeladmin.search_fields, modeladmin.list_select_related, modeladmin.list_per_page, modeladmin.list_max_show_all, modeladmin.list_editable, modeladmin, modeladmin.sortable_by) @skip_if_custom_user class AuthorListFilterTestCase(BaseListFilterTestCase): """Test case for AuthorListFilter""" def setUp(self): super(AuthorListFilterTestCase, self).setUp() self.authors = [ Author.objects.create_user(username='webmaster', email='[email protected]'), Author.objects.create_user(username='contributor', email='[email protected]'), Author.objects.create_user(username='reader', email='[email protected]')] self.entry_1.authors.add(self.authors[0]) self.entry_2.authors.add(*self.authors[:-1]) self.entry_draft.authors.add(*self.authors) def test_filter(self): modeladmin = MiniEntryAuthorAdmin(Entry, site) request = self.request_factory.get('/') request.user = self.root changelist = self.get_changelist(request, Entry, modeladmin) queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 3) request = self.request_factory.get('/', {'author': self.authors[1].pk}) request.user = self.root changelist = self.get_changelist(request, Entry, modeladmin) queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 2) with self.assertNumQueries(1): filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'published authors') self.assertEqual(filterspec.used_parameters, {'author': str(self.authors[1].pk)}) self.assertEqual(filterspec.lookup_choices, [(str(self.authors[0].pk), 'webmaster (2 entries)'), (str(self.authors[1].pk), 'contributor (1 entry)')]) @skip_if_custom_user class CategoryListFilterTestCase(BaseListFilterTestCase): """Test case for CategoryListFilter""" def setUp(self): super(CategoryListFilterTestCase, self).setUp() self.categories = [ Category.objects.create(title='Category 1', slug='cat-1'), Category.objects.create(title='Category 2', slug='cat-2'), Category.objects.create(title='Category 3', slug='cat-3')] self.entry_1.categories.add(self.categories[0]) self.entry_2.categories.add(*self.categories[:-1]) self.entry_draft.categories.add(*self.categories) def test_filter(self): modeladmin = MiniEntryCategoryAdmin(Entry, site) request = self.request_factory.get('/') request.user = self.root changelist = self.get_changelist(request, Entry, modeladmin) queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 3) request = self.request_factory.get( '/', {'category': str(self.categories[1].pk)} ) request.user = self.root changelist = self.get_changelist(request, Entry, modeladmin) queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 2) with self.assertNumQueries(1): filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'published categories') self.assertEqual(filterspec.used_parameters, {'category': str(self.categories[1].pk)}) self.assertEqual(filterspec.lookup_choices, [(str(self.categories[0].pk), 'Category 1 (2 entries)'), (str(self.categories[1].pk), 'Category 2 (1 entry)')])
import vcr import zlib import json import http.client as httplib from assertions import assert_is_json def _headers_are_case_insensitive(host, port): conn = httplib.HTTPConnection(host, port) conn.request("GET", "/cookies/set?k1=v1") r1 = conn.getresponse() cookie_data1 = r1.getheader("set-cookie") conn = httplib.HTTPConnection(host, port) conn.request("GET", "/cookies/set?k1=v1") r2 = conn.getresponse() cookie_data2 = r2.getheader("Set-Cookie") return cookie_data1 == cookie_data2 def test_case_insensitivity(tmpdir, httpbin): testfile = str(tmpdir.join("case_insensitivity.yml")) # check if headers are case insensitive outside of vcrpy host, port = httpbin.host, httpbin.port outside = _headers_are_case_insensitive(host, port) with vcr.use_cassette(testfile): # check if headers are case insensitive inside of vcrpy inside = _headers_are_case_insensitive(host, port) # check if headers are case insensitive after vcrpy deserializes headers inside2 = _headers_are_case_insensitive(host, port) # behavior should be the same both inside and outside assert outside == inside == inside2 def _multiple_header_value(httpbin): conn = httplib.HTTPConnection(httpbin.host, httpbin.port) conn.request("GET", "/response-headers?foo=bar&foo=baz") r = conn.getresponse() return r.getheader("foo") def test_multiple_headers(tmpdir, httpbin): testfile = str(tmpdir.join("multiple_headers.yaml")) outside = _multiple_header_value(httpbin) with vcr.use_cassette(testfile): inside = _multiple_header_value(httpbin) assert outside == inside def test_original_decoded_response_is_not_modified(tmpdir, httpbin): testfile = str(tmpdir.join("decoded_response.yml")) host, port = httpbin.host, httpbin.port conn = httplib.HTTPConnection(host, port) conn.request("GET", "/gzip") outside = conn.getresponse() with vcr.use_cassette(testfile, decode_compressed_response=True): conn = httplib.HTTPConnection(host, port) conn.request("GET", "/gzip") inside = conn.getresponse() # Assert that we do not modify the original response while appending # to the casssette. assert "gzip" == inside.headers["content-encoding"] # They should effectively be the same response. inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date") outside_headers = (h for h in outside.getheaders() if h[0].lower() != "date") assert set(inside_headers) == set(outside_headers) inside = zlib.decompress(inside.read(), 16 + zlib.MAX_WBITS) outside = zlib.decompress(outside.read(), 16 + zlib.MAX_WBITS) assert inside == outside # Even though the above are raw bytes, the JSON data should have been # decoded and saved to the cassette. with vcr.use_cassette(testfile): conn = httplib.HTTPConnection(host, port) conn.request("GET", "/gzip") inside = conn.getresponse() assert "content-encoding" not in inside.headers assert_is_json(inside.read()) def _make_before_record_response(fields, replacement="[REDACTED]"): def before_record_response(response): string_body = response["body"]["string"].decode("utf8") body = json.loads(string_body) for field in fields: if field in body: body[field] = replacement response["body"]["string"] = json.dumps(body).encode() return response return before_record_response def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin): testfile = str(tmpdir.join("sensitive_data_scrubbed_response.yml")) host, port = httpbin.host, httpbin.port field_to_scrub = "url" replacement = "[YOU_CANT_HAVE_THE_MANGO]" conn = httplib.HTTPConnection(host, port) conn.request("GET", "/get") outside = conn.getresponse() callback = _make_before_record_response([field_to_scrub], replacement) with vcr.use_cassette(testfile, before_record_response=callback): conn = httplib.HTTPConnection(host, port) conn.request("GET", "/get") inside = conn.getresponse() # The scrubbed field should be the same, because no cassette existed. # Furthermore, the responses should be identical. inside_body = json.loads(inside.read().decode("utf-8")) outside_body = json.loads(outside.read().decode("utf-8")) assert not inside_body[field_to_scrub] == replacement assert inside_body[field_to_scrub] == outside_body[field_to_scrub] # Ensure that when a cassette exists, the scrubbed response is returned. with vcr.use_cassette(testfile, before_record_response=callback): conn = httplib.HTTPConnection(host, port) conn.request("GET", "/get") inside = conn.getresponse() inside_body = json.loads(inside.read().decode("utf-8")) assert inside_body[field_to_scrub] == replacement
import getpass import logging import os import shutil import re import tempfile from git import Repo from packaging import version from kalliope.core.ConfigurationManager import SettingLoader from kalliope.core.ConfigurationManager.DnaLoader import DnaLoader from kalliope.core.Models import Neuron from kalliope.core.NeuronLauncher import NeuronLauncher from kalliope.core.Utils import Utils logging.basicConfig() logger = logging.getLogger("kalliope") # Global values for processing: # LOCAL_TMP_FOLDER = "/tmp/kalliope/resources/" TMP_GIT_FOLDER = "kalliope_new_module_temp_name" DNA_FILE_NAME = "dna.yml" INSTALL_FILE_NAME = "install.yml" # Global values for required parameters in DNA: DNA_NAME = "name" DNA_TYPE = "type" # Global_Names for 'types' to match: TYPE_NEURON = "neuron" TYPE_TTS = "tts" TYPE_STT = "stt" TYPE_TRIGGER = "trigger" TYPE_SIGNAL = "signal" class ResourcesManagerException(Exception): pass class ResourcesManager(object): def __init__(self, **kwargs): """ This class is used to manage community resources. :param kwargs: git-url: the url of the module to clone and install """ super(ResourcesManager, self).__init__() # get settings sl = SettingLoader() self.settings = sl.settings # in case of update or install, url where self.git_url = kwargs.get('git_url', None) # temp path where we install the new module self.tmp_path = tempfile.gettempdir() + "/kalliope/resources/" + TMP_GIT_FOLDER self.dna_file_path = self.tmp_path + os.sep + DNA_FILE_NAME self.install_file_path = self.tmp_path + os.sep + INSTALL_FILE_NAME self.dna = None self.sudo_password = kwargs.get('sudo_password', None) def install(self, force=False): """ Module installation method. :arg force: True to skip the version compatibility :return Dna object if resource installed """ # first, we clone the repo self._clone_repo(path=self.tmp_path, git_url=self.git_url) # check the content of the cloned repo if not self.is_repo_ok(dna_file_path=self.dna_file_path, install_file_path=self.install_file_path): logger.debug("[ResourcesManager] Invalid resource repository") self.cleanup_after_failed_installation() raise ResourcesManagerException("Invalid resource repository") # Load the dna.yml file self.dna = DnaLoader(self.dna_file_path).get_dna() if self.dna is None: logger.debug("[ResourcesManager] No DNA file found in the resource to install") self.cleanup_after_failed_installation() raise ResourcesManagerException("No DNA file found in the resource to install") logger.debug("[ResourcesManager] DNA file content: " + str(self.dna)) if not self.is_settings_ok(resources=self.settings.resources, dna=self.dna): logger.debug("[ResourcesManager] Invalid settings") self.cleanup_after_failed_installation() raise ResourcesManagerException("Invalid settings") # the dna file is ok, check the supported version if not force: if not self._check_supported_version(current_version=self.settings.kalliope_version, supported_versions=self.dna.kalliope_supported_version): logger.debug("[ResourcesManager] Non supported version") self.cleanup_after_failed_installation() raise ResourcesManagerException("Non supported version") # Let's find the target folder depending the type module_type = self.dna.module_type.lower() target_folder = self._get_target_folder(resources=self.settings.resources, module_type=module_type) if target_folder is None: self.cleanup_after_failed_installation() raise ResourcesManagerException("No resource folder set in settings") # let's move the tmp folder in the right folder and get a new path for the module module_name = self.dna.name.lower() target_path = self._rename_temp_folder(name=self.dna.name.lower(), target_folder=target_folder, tmp_path=self.tmp_path) # if the target_path exists, then run the install file within the new repository if target_path is None: raise ResourcesManagerException("Resource already present") else: self.install_file_path = target_path + os.sep + INSTALL_FILE_NAME if self.run_ansible_playbook_module(install_file_path=self.install_file_path): Utils.print_success("Module: %s installed" % module_name) return self.dna else: Utils.print_danger("Module: %s not installed" % module_name) return None def uninstall(self, neuron_name=None, tts_name=None, stt_name=None, trigger_name=None, signal_name=None): """ Uninstall a community resource """ target_path_to_delete = None module_name = "" if neuron_name is not None: target_path_to_delete = self._get_target_folder(resources=self.settings.resources, module_type=TYPE_NEURON) module_name = neuron_name if tts_name is not None: target_path_to_delete = self._get_target_folder(resources=self.settings.resources, module_type=TYPE_TTS) module_name = tts_name if stt_name is not None: target_path_to_delete = self._get_target_folder(resources=self.settings.resources, module_type=TYPE_STT) module_name = stt_name if trigger_name is not None: target_path_to_delete = self._get_target_folder(resources=self.settings.resources, module_type=TYPE_TRIGGER) module_name = trigger_name if signal_name is not None: target_path_to_delete = self._get_target_folder(resources=self.settings.resources, module_type=TYPE_SIGNAL) module_name = signal_name if target_path_to_delete is not None: try: shutil.rmtree(target_path_to_delete + os.sep + module_name.lower()) Utils.print_success("Module %s deleted" % module_name.lower()) except shutil.Error: Utils.print_warning("The module %s doest not exist in the path %s" % (module_name.lower(), target_path_to_delete)) except OSError: Utils.print_warning( "The module %s doest not exist in the path %s" % (module_name.lower(), target_path_to_delete)) @staticmethod def is_settings_ok(resources, dna): """ Test if required settings files in config of Kalliope are ok. The resource object must not be empty Check id the use have set the an installation path in his settings for the target module type :param resources: the Resources model :param dna: DNA info about the module to install :return: """ settings_ok = True if resources is None: message = "Resources folder not set in settings, cannot install." logger.debug(message) Utils.print_danger(message) settings_ok = False else: if dna.module_type == "neuron" and resources.neuron_folder is None: message = "Resources folder for neuron installation not set in settings, cannot install." logger.debug(message) Utils.print_danger(message) settings_ok = False if dna.module_type == "stt" and resources.stt_folder is None: message = "Resources folder for stt installation not set in settings, cannot install." logger.debug(message) Utils.print_danger(message) settings_ok = False if dna.module_type == "tts" and resources.tts_folder is None: message = "Resources folder for tts installation not set in settings, cannot install." logger.debug(message) Utils.print_danger(message) settings_ok = False if dna.module_type == "trigger" and resources.trigger_folder is None: message = "Resources folder for trigger installation not set in settings, cannot install." logger.debug(message) Utils.print_danger(message) settings_ok = False if dna.module_type == "signal" and resources.signal_folder is None: message = "Resources folder for signal installation not set in settings, cannot install." logger.debug(message) Utils.print_danger(message) settings_ok = False return settings_ok @staticmethod def is_repo_ok(dna_file_path, install_file_path): """ Check if the git cloned repo is fine to be installed :return: True if repo is ok to be installed, False otherwise """ Utils.print_info("Checking repository...") repo_ok = True # check that a install.yml file is present if not os.path.exists(install_file_path): Utils.print_danger("Missing %s file" % INSTALL_FILE_NAME) repo_ok = False if not os.path.exists(dna_file_path): Utils.print_danger("Missing %s file" % DNA_FILE_NAME) repo_ok = False return repo_ok @staticmethod def _get_target_folder(resources, module_type): """ Return the folder from the resources and given a module type :param resources: Resource object :type resources: Resources :param module_type: type of the module (TYPE_NEURON, TYPE_STT, TYPE_TTS, TYPE_TRIGGER, TYPE_SIGNAL) :return: path of the folder """ module_type_converter = dict() # dict to get the path behind a type of resource try: module_type_converter = { TYPE_NEURON: resources.neuron_folder, TYPE_STT: resources.stt_folder, TYPE_TTS: resources.tts_folder, TYPE_TRIGGER: resources.trigger_folder, TYPE_SIGNAL: resources.signal_folder } except AttributeError: # will be raised if the resource folder is not set in settings pass # Let's find the right path depending of the type try: folder_path = module_type_converter[module_type] except KeyError: folder_path = None # No folder_path has been found message = "No %s folder set in settings." % module_type if folder_path is None: logger.debug(message) Utils.print_danger(message) return folder_path @staticmethod def _clone_repo(path, git_url): """ Use git to clone locally the neuron in a temp folder :return: """ # clone the repo logger.debug("[ResourcesManager] GIT clone into folder: %s" % path) Utils.print_info("Cloning repository...") # if the folder already exist we remove it if os.path.exists(path): shutil.rmtree(path) else: os.makedirs(path) Repo.clone_from(git_url, path) @staticmethod def _rename_temp_folder(name, target_folder, tmp_path): """ Rename the temp folder of the cloned repo Return the name of the path to install :return: path to install, None if already exists """ logger.debug("[ResourcesManager] Rename temp folder") new_absolute_neuron_path = target_folder + os.sep + name try: shutil.move(tmp_path, new_absolute_neuron_path) return new_absolute_neuron_path except shutil.Error: # the folder already exist Utils.print_warning("The module %s already exist in the path %s" % (name, target_folder)) # remove the cloned repo logger.debug("[ResourcesManager] Deleting temp folder %s" % str(tmp_path)) shutil.rmtree(tmp_path) def run_ansible_playbook_module(self, install_file_path): """ Run the install.yml file through an Ansible playbook using the dedicated neuron ! :param sudo_password: local machine sudo password required to install libraries :param install_file_path: the path of the Ansible playbook to run. :return: """ logger.debug("[ResourcesManager] Run ansible playbook") Utils.print_info("Starting neuron installation") # ask the sudo password if self.sudo_password is not None: pswd = self.sudo_password else: pswd = getpass.getpass('Sudo password:') if not pswd or pswd == "": Utils.print_warning("You must enter a sudo password") return False else: ansible_neuron_parameters = { "task_file": install_file_path, "sudo": True, "sudo_user": "root", "sudo_password": pswd } neuron = Neuron(name="ansible_playbook", parameters=ansible_neuron_parameters) NeuronLauncher.start_neuron(neuron) return True @staticmethod def _check_supported_version(current_version, supported_versions): """ The dna file contains supported Kalliope version for the module to install. Check if supported versions are match the current installed version. If not, ask the user to confirm the installation anyway :param current_version: current version installed of Kalliope. E.g 0.4.0 :param supported_versions: list of supported version :return: True if the version is supported or user has confirmed the installation """ logger.debug("[ResourcesManager] Current installed version of Kalliope: %s" % str(current_version)) logger.debug("[ResourcesManager] Module supported version: %s" % str(supported_versions)) supported_version_found = False # Extract major version match_current_version = re.search('^[\d]*[.][\d]*', current_version) if match_current_version: current_version = match_current_version.group(0) for supported_version in supported_versions: if version.parse(str(current_version)) == version.parse(str(supported_version)): # we found the exact version supported_version_found = True break if not supported_version_found: # we ask the user if we want to install the module even if the version doesn't match Utils.print_info("Current installed version of Kalliope: %s" % current_version) Utils.print_info("Module supported versions: %s" % str(supported_versions)) Utils.print_warning("The neuron seems to be not supported by your current version of Kalliope") supported_version_found = Utils.query_yes_no("install it anyway?") logger.debug("[ResourcesManager] install it anyway user answer: %s" % supported_version_found) logger.debug("[ResourcesManager] check_supported_version: %s" % str(supported_version_found)) return supported_version_found def cleanup_after_failed_installation(self): logger.debug("[ResourcesManager] installation cancelled, deleting temp repo %s" % str(self.tmp_path)) shutil.rmtree(self.tmp_path)
import json from test.common import test_dict import ruamel.yaml as yaml from box import Box, SBox class TestSBox: def test_property_box(self): td = test_dict.copy() td["inner"] = {"CamelCase": "Item"} pbox = SBox(td, camel_killer_box=True) assert isinstance(pbox.inner, SBox) assert pbox.inner.camel_case == "Item" assert json.loads(pbox.json)["inner"]["camel_case"] == "Item" test_item = yaml.load(pbox.yaml, Loader=yaml.SafeLoader) assert test_item["inner"]["camel_case"] == "Item" assert repr(pbox["inner"]).startswith("<ShorthandBox") assert not isinstance(pbox.dict, Box) assert pbox.dict["inner"]["camel_case"] == "Item" assert pbox.toml.startswith('key1 = "value1"')
import logging from typing import Optional from google_nest_sdm.camera_traits import CameraImageTrait, CameraLiveStreamTrait from google_nest_sdm.device import Device from homeassistant.components.camera import SUPPORT_STREAM, Camera from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import HomeAssistantType from .const import DOMAIN, SIGNAL_NEST_UPDATE from .device_info import DeviceInfo _LOGGER = logging.getLogger(__name__) async def async_setup_sdm_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the cameras.""" subscriber = hass.data[DOMAIN][entry.entry_id] device_manager = await subscriber.async_get_device_manager() # Fetch initial data so we have data when entities subscribe. entities = [] for device in device_manager.devices.values(): if ( CameraImageTrait.NAME in device.traits or CameraLiveStreamTrait.NAME in device.traits ): entities.append(NestCamera(device)) async_add_entities(entities) class NestCamera(Camera): """Devices that support cameras.""" def __init__(self, device: Device): """Initialize the camera.""" super().__init__() self._device = device self._device_info = DeviceInfo(device) @property def should_poll(self) -> bool: """Disable polling since entities have state pushed via pubsub.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" # The API "name" field is a unique device identifier. return f"{self._device.name}-camera" @property def name(self): """Return the name of the camera.""" return self._device_info.device_name @property def device_info(self): """Return device specific attributes.""" return self._device_info.device_info @property def brand(self): """Return the camera brand.""" return self._device_info.device_brand @property def model(self): """Return the camera model.""" return self._device_info.device_model @property def supported_features(self): """Flag supported features.""" features = 0 if CameraLiveStreamTrait.NAME in self._device.traits: features = features | SUPPORT_STREAM return features async def stream_source(self): """Return the source of the stream.""" if CameraLiveStreamTrait.NAME not in self._device.traits: return None trait = self._device.traits[CameraLiveStreamTrait.NAME] rtsp_stream = await trait.generate_rtsp_stream() # Note: This is only valid for a few minutes, and probably needs # to be improved with an occasional call to .extend_rtsp_stream() which # returns a new rtsp_stream object. return rtsp_stream.rtsp_stream_url async def async_added_to_hass(self): """Run when entity is added to register update signal handler.""" # Event messages trigger the SIGNAL_NEST_UPDATE, which is intercepted # here to re-fresh the signals from _device. Unregister this callback # when the entity is removed. self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_NEST_UPDATE, self.async_write_ha_state ) ) async def async_camera_image(self): """Return bytes of camera image.""" # No support for still images yet. Still images are only available # in response to an event on the feed. For now, suppress a # NotImplementedError in the parent class. return None
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from . import init_integration async def test_download_switch(hass, nzbget_api) -> None: """Test the creation and values of the download switch.""" instance = nzbget_api.return_value entry = await init_integration(hass) assert entry registry = await hass.helpers.entity_registry.async_get_registry() entity_id = "switch.nzbgettest_download" entity_entry = registry.async_get(entity_id) assert entity_entry assert entity_entry.unique_id == f"{entry.entry_id}_download" state = hass.states.get(entity_id) assert state assert state.state == STATE_ON # test download paused instance.status.return_value["DownloadPaused"] = True await hass.helpers.entity_component.async_update_entity(entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state assert state.state == STATE_OFF async def test_download_switch_services(hass, nzbget_api) -> None: """Test download switch services.""" instance = nzbget_api.return_value entry = await init_integration(hass) entity_id = "switch.nzbgettest_download" assert entry await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) instance.pausedownload.assert_called_once() await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) instance.resumedownload.assert_called_once()
from unittest import TestCase import numpy as np from scattertext.termranking import AbsoluteFrequencyRanker from scattertext.termranking import DocLengthDividedFrequencyRanker from scattertext.termranking import DocLengthNormalizedFrequencyRanker from scattertext.termranking.OncePerDocFrequencyRanker import OncePerDocFrequencyRanker from scattertext.test.test_TermDocMat import make_a_test_term_doc_matrix class TestTermRanker(TestCase): def test_absolute_frequency_ranker(self): tdm = make_a_test_term_doc_matrix() ranker = AbsoluteFrequencyRanker(tdm) rank_df = ranker.get_ranks() self.assertEqual(len(rank_df), 58) self.assertEqual(rank_df.loc['hello'].tolist(), [1, 0]) self.assertEqual(rank_df.loc['blah'].tolist(), [0, 3]) self.assertEqual(rank_df.loc['name'].tolist(), [1, 1]) def test_doc_length_normalized_frequency_ranker(self): tdm = make_a_test_term_doc_matrix() len_ranker = DocLengthNormalizedFrequencyRanker(tdm) abs_ranker = AbsoluteFrequencyRanker(tdm) abs_rank_df = abs_ranker.get_ranks() len_ranker_df = len_ranker.get_ranks() self.assertEqual(len(abs_rank_df), len(len_ranker_df)) doc_lengths = [12, 35, 29] avg_length = sum(doc_lengths) * 1. / len(doc_lengths) np.testing.assert_almost_equal(np.array(len_ranker_df.loc['blah']), [0, avg_length * 3. / 12]) np.testing.assert_almost_equal(np.array(len_ranker_df.loc['name']), [avg_length * 1. / 35, avg_length * 1. / 29]) def test_doc_length_divided_frequency_ranker(self): tdm = make_a_test_term_doc_matrix() len_ranker = DocLengthDividedFrequencyRanker(tdm) abs_ranker = AbsoluteFrequencyRanker(tdm) abs_rank_df = abs_ranker.get_ranks() len_ranker_df = len_ranker.get_ranks() self.assertEqual(len(abs_rank_df), len(len_ranker_df)) doc_lengths = [12, 35, 29] np.testing.assert_almost_equal(np.array(len_ranker_df.loc['blah']), [0, 3. / 12]) np.testing.assert_almost_equal(np.array(len_ranker_df.loc['name']), [1. / 35, 1. / 29]) def test_once_per_doc_frequency_ranker(self): tdm = make_a_test_term_doc_matrix() abs_ranker = DocLengthDividedFrequencyRanker(tdm) one_ranker = OncePerDocFrequencyRanker(tdm) abs_rank_df = abs_ranker.get_ranks() len_ranker_df = one_ranker.get_ranks() self.assertEqual(len(abs_rank_df), len(len_ranker_df)) np.testing.assert_almost_equal(np.array(len_ranker_df.loc['blah']), [0, 1]) np.testing.assert_almost_equal(np.array(len_ranker_df.loc['name']), [1, 1])
from __future__ import print_function import os import sys import inspect import warnings import argparse import codecs from collections import Counter # hack for python2/3 compatibility from io import open argparse.open = open def create_parser(subparsers=None): if subparsers: parser = subparsers.add_parser('get-vocab', formatter_class=argparse.RawDescriptionHelpFormatter, description="Generates vocabulary") else: parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description="Generates vocabulary") parser.add_argument( '--input', '-i', type=argparse.FileType('r'), default=sys.stdin, metavar='PATH', help="Input file (default: standard input).") parser.add_argument( '--output', '-o', type=argparse.FileType('w'), default=sys.stdout, metavar='PATH', help="Output file (default: standard output)") return parser def get_vocab(train_file, vocab_file): c = Counter() for line in train_file: for word in line.strip('\r\n ').split(' '): if word: c[word] += 1 for key,f in sorted(c.items(), key=lambda x: x[1], reverse=True): vocab_file.write(key+" "+ str(f) + "\n") if __name__ == "__main__": currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) newdir = os.path.join(currentdir, 'subword_nmt') if os.path.isdir(newdir): warnings.simplefilter('default') warnings.warn( "this script's location has moved to {0}. This symbolic link will be removed in a future version. Please point to the new location, or install the package and use the command 'subword-nmt'".format(newdir), DeprecationWarning ) # python 2/3 compatibility if sys.version_info < (3, 0): sys.stderr = codecs.getwriter('UTF-8')(sys.stderr) sys.stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdin = codecs.getreader('UTF-8')(sys.stdin) else: sys.stderr = codecs.getwriter('UTF-8')(sys.stderr.buffer) sys.stdout = codecs.getwriter('UTF-8')(sys.stdout.buffer) sys.stdin = codecs.getreader('UTF-8')(sys.stdin.buffer) parser = create_parser() args = parser.parse_args() # read/write files as UTF-8 if args.input.name != '<stdin>': args.input = codecs.open(args.input.name, encoding='utf-8') if args.output.name != '<stdout>': args.output = codecs.open(args.output.name, 'w', encoding='utf-8') get_vocab(args.input, args.output)
import os import json import pytest import vcr from urllib.request import urlopen def test_set_serializer_default_config(tmpdir, httpbin): my_vcr = vcr.VCR(serializer="json") with my_vcr.use_cassette(str(tmpdir.join("test.json"))): assert my_vcr.serializer == "json" urlopen(httpbin.url + "/get") with open(str(tmpdir.join("test.json"))) as f: file_content = f.read() assert file_content.endswith("\n") assert json.loads(file_content) def test_default_set_cassette_library_dir(tmpdir, httpbin): my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir"))) with my_vcr.use_cassette("test.json"): urlopen(httpbin.url + "/get") assert os.path.exists(str(tmpdir.join("subdir").join("test.json"))) def test_override_set_cassette_library_dir(tmpdir, httpbin): my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir"))) cld = str(tmpdir.join("subdir2")) with my_vcr.use_cassette("test.json", cassette_library_dir=cld): urlopen(httpbin.url + "/get") assert os.path.exists(str(tmpdir.join("subdir2").join("test.json"))) assert not os.path.exists(str(tmpdir.join("subdir").join("test.json"))) def test_override_match_on(tmpdir, httpbin): my_vcr = vcr.VCR(match_on=["method"]) with my_vcr.use_cassette(str(tmpdir.join("test.json"))): urlopen(httpbin.url) with my_vcr.use_cassette(str(tmpdir.join("test.json"))) as cass: urlopen(httpbin.url + "/get") assert len(cass) == 1 assert cass.play_count == 1 def test_missing_matcher(): my_vcr = vcr.VCR() my_vcr.register_matcher("awesome", object) with pytest.raises(KeyError): with my_vcr.use_cassette("test.yaml", match_on=["notawesome"]): pass
from homeassistant.components.switch import SwitchEntity from .const import DOMAIN SWITCH_PREFIX = "Switch" ICON = "mdi:toggle-switch" async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Smappee Comfort Plugs.""" smappee_base = hass.data[DOMAIN][config_entry.entry_id] entities = [] for service_location in smappee_base.smappee.service_locations.values(): for actuator_id, actuator in service_location.actuators.items(): if actuator.type in ["SWITCH", "COMFORT_PLUG"]: entities.append( SmappeeActuator( smappee_base, service_location, actuator.name, actuator_id, actuator.type, actuator.serialnumber, ) ) elif actuator.type == "INFINITY_OUTPUT_MODULE": for option in actuator.state_options: entities.append( SmappeeActuator( smappee_base, service_location, actuator.name, actuator_id, actuator.type, actuator.serialnumber, actuator_state_option=option, ) ) async_add_entities(entities, True) class SmappeeActuator(SwitchEntity): """Representation of a Smappee Comport Plug.""" def __init__( self, smappee_base, service_location, name, actuator_id, actuator_type, actuator_serialnumber, actuator_state_option=None, ): """Initialize a new Smappee Comfort Plug.""" self._smappee_base = smappee_base self._service_location = service_location self._actuator_name = name self._actuator_id = actuator_id self._actuator_type = actuator_type self._actuator_serialnumber = actuator_serialnumber self._actuator_state_option = actuator_state_option self._state = self._service_location.actuators.get(actuator_id).state self._connection_state = self._service_location.actuators.get( actuator_id ).connection_state @property def name(self): """Return the name of the switch.""" if self._actuator_type == "INFINITY_OUTPUT_MODULE": return ( f"{self._service_location.service_location_name} - " f"Output module - {self._actuator_name} - {self._actuator_state_option}" ) # Switch or comfort plug return ( f"{self._service_location.service_location_name} - " f"{self._actuator_type.title()} - {self._actuator_name}" ) @property def is_on(self): """Return true if switch is on.""" if self._actuator_type == "INFINITY_OUTPUT_MODULE": return ( self._service_location.actuators.get(self._actuator_id).state == self._actuator_state_option ) # Switch or comfort plug return self._state == "ON_ON" @property def icon(self): """Icon to use in the frontend.""" return ICON def turn_on(self, **kwargs): """Turn on Comport Plug.""" if self._actuator_type in ["SWITCH", "COMFORT_PLUG"]: self._service_location.set_actuator_state(self._actuator_id, state="ON_ON") elif self._actuator_type == "INFINITY_OUTPUT_MODULE": self._service_location.set_actuator_state( self._actuator_id, state=self._actuator_state_option ) def turn_off(self, **kwargs): """Turn off Comport Plug.""" if self._actuator_type in ["SWITCH", "COMFORT_PLUG"]: self._service_location.set_actuator_state( self._actuator_id, state="OFF_OFF" ) elif self._actuator_type == "INFINITY_OUTPUT_MODULE": self._service_location.set_actuator_state( self._actuator_id, state="PLACEHOLDER", api=False ) @property def available(self): """Return True if entity is available. Unavailable for COMFORT_PLUGS.""" return ( self._connection_state == "CONNECTED" or self._actuator_type == "COMFORT_PLUG" ) @property def today_energy_kwh(self): """Return the today total energy usage in kWh.""" if self._actuator_type == "SWITCH": cons = self._service_location.actuators.get( self._actuator_id ).consumption_today if cons is not None: return round(cons / 1000.0, 2) return None @property def unique_id( self, ): """Return the unique ID for this switch.""" if self._actuator_type == "INFINITY_OUTPUT_MODULE": return ( f"{self._service_location.device_serial_number}-" f"{self._service_location.service_location_id}-actuator-" f"{self._actuator_id}-{self._actuator_state_option}" ) # Switch or comfort plug return ( f"{self._service_location.device_serial_number}-" f"{self._service_location.service_location_id}-actuator-" f"{self._actuator_id}" ) @property def device_info(self): """Return the device info for this switch.""" return { "identifiers": {(DOMAIN, self._service_location.device_serial_number)}, "name": self._service_location.service_location_name, "manufacturer": "Smappee", "model": self._service_location.device_model, "sw_version": self._service_location.firmware_version, } async def async_update(self): """Get the latest data from Smappee and update the state.""" await self._smappee_base.async_update() new_state = self._service_location.actuators.get(self._actuator_id).state if new_state != self._state: self._state = new_state self.async_write_ha_state() self._connection_state = self._service_location.actuators.get( self._actuator_id ).connection_state
from copy import deepcopy from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.gateway import get_gateway_from_config_entry from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED from homeassistant.setup import async_setup_component from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration from tests.async_mock import patch LOCKS = { "1": { "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", "hascolor": False, "lastannounced": None, "lastseen": "2020-08-22T15:29:03Z", "manufacturername": "Danalock", "modelid": "V3-BTZB", "name": "Door lock", "state": {"alert": "none", "on": False, "reachable": True}, "swversion": "19042019", "type": "Door Lock", "uniqueid": "00:00:00:00:00:00:00:00-00", } } async def test_platform_manually_configured(hass): """Test that we do not discover anything or try to set up a gateway.""" assert ( await async_setup_component( hass, LOCK_DOMAIN, {"lock": {"platform": DECONZ_DOMAIN}} ) is True ) assert DECONZ_DOMAIN not in hass.data async def test_no_locks(hass): """Test that no lock entities are created.""" await setup_deconz_integration(hass) assert len(hass.states.async_all()) == 0 async def test_locks(hass): """Test that all supported lock entities are created.""" data = deepcopy(DECONZ_WEB_REQUEST) data["lights"] = deepcopy(LOCKS) config_entry = await setup_deconz_integration(hass, get_state_response=data) gateway = get_gateway_from_config_entry(hass, config_entry) assert len(hass.states.async_all()) == 1 assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED door_lock = hass.states.get("lock.door_lock") assert door_lock.state == STATE_UNLOCKED state_changed_event = { "t": "event", "e": "changed", "r": "lights", "id": "1", "state": {"on": True}, } gateway.api.event_handler(state_changed_event) await hass.async_block_till_done() assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls door_lock_device = gateway.api.lights["1"] # Service lock door with patch.object(door_lock_device, "_request", return_value=True) as set_callback: await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: "lock.door_lock"}, blocking=True, ) await hass.async_block_till_done() set_callback.assert_called_with("put", "/lights/1/state", json={"on": True}) # Service unlock door with patch.object(door_lock_device, "_request", return_value=True) as set_callback: await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: "lock.door_lock"}, blocking=True, ) await hass.async_block_till_done() set_callback.assert_called_with("put", "/lights/1/state", json={"on": False}) await hass.config_entries.async_unload(config_entry.entry_id) assert len(hass.states.async_all()) == 0
from homeassistant.components.device_tracker import ( ATTR_BATTERY, ATTR_GPS, ATTR_GPS_ACCURACY, ATTR_LOCATION_NAME, ) from homeassistant.components.device_tracker.config_entry import TrackerEntity from homeassistant.components.device_tracker.const import SOURCE_TYPE_GPS from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_LATITUDE, ATTR_LONGITUDE from homeassistant.core import callback from homeassistant.helpers.restore_state import RestoreEntity from .const import ( ATTR_ALTITUDE, ATTR_COURSE, ATTR_DEVICE_ID, ATTR_DEVICE_NAME, ATTR_SPEED, ATTR_VERTICAL_ACCURACY, SIGNAL_LOCATION_UPDATE, ) from .helpers import device_info ATTR_KEYS = (ATTR_ALTITUDE, ATTR_COURSE, ATTR_SPEED, ATTR_VERTICAL_ACCURACY) async def async_setup_entry(hass, entry, async_add_entities): """Set up OwnTracks based off an entry.""" entity = MobileAppEntity(entry) async_add_entities([entity]) return True class MobileAppEntity(TrackerEntity, RestoreEntity): """Represent a tracked device.""" def __init__(self, entry, data=None): """Set up OwnTracks entity.""" self._entry = entry self._data = data self._dispatch_unsub = None @property def unique_id(self): """Return the unique ID.""" return self._entry.data[ATTR_DEVICE_ID] @property def battery_level(self): """Return the battery level of the device.""" return self._data.get(ATTR_BATTERY) @property def device_state_attributes(self): """Return device specific attributes.""" attrs = {} for key in ATTR_KEYS: value = self._data.get(key) if value is not None: attrs[key] = value return attrs @property def location_accuracy(self): """Return the gps accuracy of the device.""" return self._data.get(ATTR_GPS_ACCURACY) @property def latitude(self): """Return latitude value of the device.""" gps = self._data.get(ATTR_GPS) if gps is None: return None return gps[0] @property def longitude(self): """Return longitude value of the device.""" gps = self._data.get(ATTR_GPS) if gps is None: return None return gps[1] @property def location_name(self): """Return a location name for the current location of the device.""" return self._data.get(ATTR_LOCATION_NAME) @property def name(self): """Return the name of the device.""" return self._entry.data[ATTR_DEVICE_NAME] @property def source_type(self): """Return the source type, eg gps or router, of the device.""" return SOURCE_TYPE_GPS @property def device_info(self): """Return the device info.""" return device_info(self._entry.data) async def async_added_to_hass(self): """Call when entity about to be added to Home Assistant.""" await super().async_added_to_hass() self._dispatch_unsub = self.hass.helpers.dispatcher.async_dispatcher_connect( SIGNAL_LOCATION_UPDATE.format(self._entry.entry_id), self.update_data ) # Don't restore if we got set up with data. if self._data is not None: return state = await self.async_get_last_state() if state is None: self._data = {} return attr = state.attributes data = { ATTR_GPS: (attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE)), ATTR_GPS_ACCURACY: attr.get(ATTR_GPS_ACCURACY), ATTR_BATTERY: attr.get(ATTR_BATTERY_LEVEL), } data.update({key: attr[key] for key in attr if key in ATTR_KEYS}) self._data = data async def async_will_remove_from_hass(self): """Call when entity is being removed from hass.""" await super().async_will_remove_from_hass() if self._dispatch_unsub: self._dispatch_unsub() self._dispatch_unsub = None @callback def update_data(self, data): """Mark the device as seen.""" self._data = data self.async_write_ha_state()
import pytest import numpy as np from numpy.testing import assert_allclose from mne.channels import make_standard_montage from mne.io._digitization import _get_dig_eeg, _get_fid_coords from mne.channels.montage import get_builtin_montages, HEAD_SIZE_DEFAULT from mne.io.constants import FIFF @pytest.mark.parametrize('kind', get_builtin_montages()) def test_standard_montages_have_fids(kind): """Test standard montage are all in unknown coord (have fids).""" montage = make_standard_montage(kind) fids, coord_frame = _get_fid_coords(montage.dig) for k, v in fids.items(): assert v is not None, k for d in montage.dig: assert d['coord_frame'] == FIFF.FIFFV_COORD_UNKNOWN def test_standard_montage_errors(): """Test error handling for wrong keys.""" _msg = "Invalid value for the 'kind' parameter..*but got.*not-here" with pytest.raises(ValueError, match=_msg): _ = make_standard_montage('not-here') @pytest.mark.parametrize('head_size', (HEAD_SIZE_DEFAULT, 0.05)) @pytest.mark.parametrize('kind, tol', [ ['EGI_256', 1e-5], ['easycap-M1', 1e-8], ['easycap-M10', 1e-8], ['biosemi128', 1e-8], ['biosemi16', 1e-8], ['biosemi160', 1e-8], ['biosemi256', 1e-8], ['biosemi32', 1e-8], ['biosemi64', 1e-8], ]) def test_standard_montages_on_sphere(kind, tol, head_size): """Test some standard montage are on sphere.""" kwargs = dict() if head_size != HEAD_SIZE_DEFAULT: kwargs['head_size'] = head_size montage = make_standard_montage(kind, **kwargs) eeg_loc = np.array([ch['r'] for ch in _get_dig_eeg(montage.dig)]) assert_allclose( actual=np.linalg.norm(eeg_loc, axis=1), desired=np.full((eeg_loc.shape[0], ), head_size), atol=tol, ) def test_standard_superset(): """Test some properties that should hold for superset montages.""" # new montages, tweaked to end up at the same size as the others m_1005 = make_standard_montage('standard_1005', 0.0970) m_1020 = make_standard_montage('standard_1020', 0.0991) assert len(set(m_1005.ch_names) - set(m_1020.ch_names)) > 0 # XXX weird that this is not a proper superset... assert set(m_1020.ch_names) - set(m_1005.ch_names) == {'O10', 'O9'} c_1005 = m_1005._get_ch_pos() for key, value in m_1020._get_ch_pos().items(): if key not in ('O10', 'O9'): assert_allclose(c_1005[key], value, atol=1e-4, err_msg=key)
from datetime import timedelta from typing import Any, Dict, Optional from pymodbus.exceptions import ConnectionException, ModbusException from pymodbus.pdu import ExceptionResponse from homeassistant.components.cover import SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity from homeassistant.const import ( CONF_COVERS, CONF_DEVICE_CLASS, CONF_NAME, CONF_SCAN_INTERVAL, CONF_SLAVE, ) from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ( ConfigType, DiscoveryInfoType, HomeAssistantType, ) from . import ModbusHub from .const import ( CALL_TYPE_COIL, CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT, CONF_REGISTER, CONF_STATE_CLOSED, CONF_STATE_CLOSING, CONF_STATE_OPEN, CONF_STATE_OPENING, CONF_STATUS_REGISTER, CONF_STATUS_REGISTER_TYPE, MODBUS_DOMAIN, ) async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info: Optional[DiscoveryInfoType] = None, ): """Read configuration and create Modbus cover.""" if discovery_info is None: return covers = [] for cover in discovery_info[CONF_COVERS]: hub: ModbusHub = hass.data[MODBUS_DOMAIN][discovery_info[CONF_NAME]] covers.append(ModbusCover(hub, cover)) async_add_entities(covers) class ModbusCover(CoverEntity, RestoreEntity): """Representation of a Modbus cover.""" def __init__( self, hub: ModbusHub, config: Dict[str, Any], ): """Initialize the modbus cover.""" self._hub: ModbusHub = hub self._coil = config.get(CALL_TYPE_COIL) self._device_class = config.get(CONF_DEVICE_CLASS) self._name = config[CONF_NAME] self._register = config.get(CONF_REGISTER) self._slave = config[CONF_SLAVE] self._state_closed = config[CONF_STATE_CLOSED] self._state_closing = config[CONF_STATE_CLOSING] self._state_open = config[CONF_STATE_OPEN] self._state_opening = config[CONF_STATE_OPENING] self._status_register = config.get(CONF_STATUS_REGISTER) self._status_register_type = config[CONF_STATUS_REGISTER_TYPE] self._scan_interval = timedelta(seconds=config[CONF_SCAN_INTERVAL]) self._value = None self._available = True # If we read cover status from coil, and not from optional status register, # we interpret boolean value False as closed cover, and value True as open cover. # Intermediate states are not supported in such a setup. if self._coil is not None and self._status_register is None: self._state_closed = False self._state_open = True self._state_closing = None self._state_opening = None # If we read cover status from the main register (i.e., an optional # status register is not specified), we need to make sure the register_type # is set to "holding". if self._register is not None and self._status_register is None: self._status_register = self._register self._status_register_type = CALL_TYPE_REGISTER_HOLDING async def async_added_to_hass(self): """Handle entity which will be added.""" state = await self.async_get_last_state() if not state: return self._value = state.state async_track_time_interval( self.hass, lambda arg: self._update(), self._scan_interval ) @property def device_class(self) -> Optional[str]: """Return the device class of the sensor.""" return self._device_class @property def name(self): """Return the name of the switch.""" return self._name @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE @property def available(self) -> bool: """Return True if entity is available.""" return self._available @property def is_opening(self): """Return if the cover is opening or not.""" return self._value == self._state_opening @property def is_closing(self): """Return if the cover is closing or not.""" return self._value == self._state_closing @property def is_closed(self): """Return if the cover is closed or not.""" return self._value == self._state_closed @property def should_poll(self): """Return True if entity has to be polled for state. False if entity pushes its state to HA. """ # Handle polling directly in this entity return False def open_cover(self, **kwargs: Any) -> None: """Open cover.""" if self._coil is not None: self._write_coil(True) else: self._write_register(self._state_open) self._update() def close_cover(self, **kwargs: Any) -> None: """Close cover.""" if self._coil is not None: self._write_coil(False) else: self._write_register(self._state_closed) self._update() def _update(self): """Update the state of the cover.""" if self._coil is not None and self._status_register is None: self._value = self._read_coil() else: self._value = self._read_status_register() self.schedule_update_ha_state() def _read_status_register(self) -> Optional[int]: """Read status register using the Modbus hub slave.""" try: if self._status_register_type == CALL_TYPE_REGISTER_INPUT: result = self._hub.read_input_registers( self._slave, self._status_register, 1 ) else: result = self._hub.read_holding_registers( self._slave, self._status_register, 1 ) except ConnectionException: self._available = False return if isinstance(result, (ModbusException, ExceptionResponse)): self._available = False return value = int(result.registers[0]) self._available = True return value def _write_register(self, value): """Write holding register using the Modbus hub slave.""" try: self._hub.write_register(self._slave, self._register, value) except ConnectionException: self._available = False return self._available = True def _read_coil(self) -> Optional[bool]: """Read coil using the Modbus hub slave.""" try: result = self._hub.read_coils(self._slave, self._coil, 1) except ConnectionException: self._available = False return if isinstance(result, (ModbusException, ExceptionResponse)): self._available = False return value = bool(result.bits[0] & 1) self._available = True return value def _write_coil(self, value): """Write coil using the Modbus hub slave.""" try: self._hub.write_coil(self._slave, self._coil, value) except ConnectionException: self._available = False return self._available = True
from datetime import timedelta import logging import numpy import requests import voluptuous as vol from homeassistant.components.image_processing import ( CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingEntity, ) from homeassistant.core import split_entity_id import homeassistant.helpers.config_validation as cv try: # Verify that the OpenCV python package is pre-installed import cv2 CV2_IMPORTED = True except ImportError: CV2_IMPORTED = False _LOGGER = logging.getLogger(__name__) ATTR_MATCHES = "matches" ATTR_TOTAL_MATCHES = "total_matches" CASCADE_URL = ( "https://raw.githubusercontent.com/opencv/opencv/master/data/" "lbpcascades/lbpcascade_frontalface.xml" ) CONF_CLASSIFIER = "classifier" CONF_FILE = "file" CONF_MIN_SIZE = "min_size" CONF_NEIGHBORS = "neighbors" CONF_SCALE = "scale" DEFAULT_CLASSIFIER_PATH = "lbp_frontalface.xml" DEFAULT_MIN_SIZE = (30, 30) DEFAULT_NEIGHBORS = 4 DEFAULT_SCALE = 1.1 DEFAULT_TIMEOUT = 10 SCAN_INTERVAL = timedelta(seconds=2) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_CLASSIFIER): { cv.string: vol.Any( cv.isfile, vol.Schema( { vol.Required(CONF_FILE): cv.isfile, vol.Optional(CONF_SCALE, DEFAULT_SCALE): float, vol.Optional( CONF_NEIGHBORS, DEFAULT_NEIGHBORS ): cv.positive_int, vol.Optional(CONF_MIN_SIZE, DEFAULT_MIN_SIZE): vol.Schema( vol.All(vol.ExactSequence([int, int]), vol.Coerce(tuple)) ), } ), ) } } ) def _create_processor_from_config(hass, camera_entity, config): """Create an OpenCV processor from configuration.""" classifier_config = config.get(CONF_CLASSIFIER) name = f"{config[CONF_NAME]} {split_entity_id(camera_entity)[1].replace('_', ' ')}" processor = OpenCVImageProcessor(hass, camera_entity, name, classifier_config) return processor def _get_default_classifier(dest_path): """Download the default OpenCV classifier.""" _LOGGER.info("Downloading default classifier") req = requests.get(CASCADE_URL, stream=True) with open(dest_path, "wb") as fil: for chunk in req.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks fil.write(chunk) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the OpenCV image processing platform.""" if not CV2_IMPORTED: _LOGGER.error( "No OpenCV library found! Install or compile for your system " "following instructions here: http://opencv.org/releases.html" ) return entities = [] if CONF_CLASSIFIER not in config: dest_path = hass.config.path(DEFAULT_CLASSIFIER_PATH) _get_default_classifier(dest_path) config[CONF_CLASSIFIER] = {"Face": dest_path} for camera in config[CONF_SOURCE]: entities.append( OpenCVImageProcessor( hass, camera[CONF_ENTITY_ID], camera.get(CONF_NAME), config[CONF_CLASSIFIER], ) ) add_entities(entities) class OpenCVImageProcessor(ImageProcessingEntity): """Representation of an OpenCV image processor.""" def __init__(self, hass, camera_entity, name, classifiers): """Initialize the OpenCV entity.""" self.hass = hass self._camera_entity = camera_entity if name: self._name = name else: self._name = f"OpenCV {split_entity_id(camera_entity)[1]}" self._classifiers = classifiers self._matches = {} self._total_matches = 0 self._last_image = None @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera_entity @property def name(self): """Return the name of the image processor.""" return self._name @property def state(self): """Return the state of the entity.""" return self._total_matches @property def state_attributes(self): """Return device specific state attributes.""" return {ATTR_MATCHES: self._matches, ATTR_TOTAL_MATCHES: self._total_matches} def process_image(self, image): """Process the image.""" cv_image = cv2.imdecode(numpy.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED) matches = {} total_matches = 0 for name, classifier in self._classifiers.items(): scale = DEFAULT_SCALE neighbors = DEFAULT_NEIGHBORS min_size = DEFAULT_MIN_SIZE if isinstance(classifier, dict): path = classifier[CONF_FILE] scale = classifier.get(CONF_SCALE, scale) neighbors = classifier.get(CONF_NEIGHBORS, neighbors) min_size = classifier.get(CONF_MIN_SIZE, min_size) else: path = classifier cascade = cv2.CascadeClassifier(path) detections = cascade.detectMultiScale( cv_image, scaleFactor=scale, minNeighbors=neighbors, minSize=min_size ) regions = [] # pylint: disable=invalid-name for (x, y, w, h) in detections: regions.append((int(x), int(y), int(w), int(h))) total_matches += 1 matches[name] = regions self._matches = matches self._total_matches = total_matches
import codecs import keyword import re import sys import token import tokenize from coverage import env from coverage.backward import iternext, unicode_class from coverage.misc import contract def phys_tokens(toks): """Return all physical tokens, even line continuations. tokenize.generate_tokens() doesn't return a token for the backslash that continues lines. This wrapper provides those tokens so that we can re-create a faithful representation of the original source. Returns the same values as generate_tokens() """ last_line = None last_lineno = -1 last_ttext = None for ttype, ttext, (slineno, scol), (elineno, ecol), ltext in toks: if last_lineno != elineno: if last_line and last_line.endswith("\\\n"): # We are at the beginning of a new line, and the last line # ended with a backslash. We probably have to inject a # backslash token into the stream. Unfortunately, there's more # to figure out. This code:: # # usage = """\ # HEY THERE # """ # # triggers this condition, but the token text is:: # # '"""\\\nHEY THERE\n"""' # # so we need to figure out if the backslash is already in the # string token or not. inject_backslash = True if last_ttext.endswith("\\"): inject_backslash = False elif ttype == token.STRING: if "\n" in ttext and ttext.split('\n', 1)[0][-1] == '\\': # It's a multi-line string and the first line ends with # a backslash, so we don't need to inject another. inject_backslash = False if inject_backslash: # Figure out what column the backslash is in. ccol = len(last_line.split("\n")[-2]) - 1 # Yield the token, with a fake token type. yield ( 99999, "\\\n", (slineno, ccol), (slineno, ccol+2), last_line ) last_line = ltext if ttype not in (tokenize.NEWLINE, tokenize.NL): last_ttext = ttext yield ttype, ttext, (slineno, scol), (elineno, ecol), ltext last_lineno = elineno @contract(source='unicode') def source_token_lines(source): """Generate a series of lines, one for each line in `source`. Each line is a list of pairs, each pair is a token:: [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ] Each pair has a token class, and the token text. If you concatenate all the token texts, and then join them with newlines, you should have your original `source` back, with two differences: trailing whitespace is not preserved, and a final line with no newline is indistinguishable from a final line with a newline. """ ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]) line = [] col = 0 source = source.expandtabs(8).replace('\r\n', '\n') tokgen = generate_tokens(source) for ttype, ttext, (_, scol), (_, ecol), _ in phys_tokens(tokgen): mark_start = True for part in re.split('(\n)', ttext): if part == '\n': yield line line = [] col = 0 mark_end = False elif part == '': mark_end = False elif ttype in ws_tokens: mark_end = False else: if mark_start and scol > col: line.append(("ws", u" " * (scol - col))) mark_start = False tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3] if ttype == token.NAME and keyword.iskeyword(ttext): tok_class = "key" line.append((tok_class, part)) mark_end = True scol = 0 if mark_end: col = ecol if line: yield line class CachedTokenizer(object): """A one-element cache around tokenize.generate_tokens. When reporting, coverage.py tokenizes files twice, once to find the structure of the file, and once to syntax-color it. Tokenizing is expensive, and easily cached. This is a one-element cache so that our twice-in-a-row tokenizing doesn't actually tokenize twice. """ def __init__(self): self.last_text = None self.last_tokens = None @contract(text='unicode') def generate_tokens(self, text): """A stand-in for `tokenize.generate_tokens`.""" if text != self.last_text: self.last_text = text readline = iternext(text.splitlines(True)) self.last_tokens = list(tokenize.generate_tokens(readline)) return self.last_tokens # Create our generate_tokens cache as a callable replacement function. generate_tokens = CachedTokenizer().generate_tokens COOKIE_RE = re.compile(r"^[ \t]*#.*coding[:=][ \t]*([-\w.]+)", flags=re.MULTILINE) @contract(source='bytes') def _source_encoding_py2(source): """Determine the encoding for `source`, according to PEP 263. `source` is a byte string, the text of the program. Returns a string, the name of the encoding. """ assert isinstance(source, bytes) # Do this so the detect_encode code we copied will work. readline = iternext(source.splitlines(True)) # This is mostly code adapted from Py3.2's tokenize module. def _get_normal_name(orig_enc): """Imitates get_normal_name in tokenizer.c.""" # Only care about the first 12 characters. enc = orig_enc[:12].lower().replace("_", "-") if re.match(r"^utf-8($|-)", enc): return "utf-8" if re.match(r"^(latin-1|iso-8859-1|iso-latin-1)($|-)", enc): return "iso-8859-1" return orig_enc # From detect_encode(): # It detects the encoding from the presence of a UTF-8 BOM or an encoding # cookie as specified in PEP-0263. If both a BOM and a cookie are present, # but disagree, a SyntaxError will be raised. If the encoding cookie is an # invalid charset, raise a SyntaxError. Note that if a UTF-8 BOM is found, # 'utf-8-sig' is returned. # If no encoding is specified, then the default will be returned. default = 'ascii' bom_found = False encoding = None def read_or_stop(): """Get the next source line, or ''.""" try: return readline() except StopIteration: return '' def find_cookie(line): """Find an encoding cookie in `line`.""" try: line_string = line.decode('ascii') except UnicodeDecodeError: return None matches = COOKIE_RE.findall(line_string) if not matches: return None encoding = _get_normal_name(matches[0]) try: codec = codecs.lookup(encoding) except LookupError: # This behavior mimics the Python interpreter raise SyntaxError("unknown encoding: " + encoding) if bom_found: # codecs in 2.3 were raw tuples of functions, assume the best. codec_name = getattr(codec, 'name', encoding) if codec_name != 'utf-8': # This behavior mimics the Python interpreter raise SyntaxError('encoding problem: utf-8') encoding += '-sig' return encoding first = read_or_stop() if first.startswith(codecs.BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if not first: return default encoding = find_cookie(first) if encoding: return encoding second = read_or_stop() if not second: return default encoding = find_cookie(second) if encoding: return encoding return default @contract(source='bytes') def _source_encoding_py3(source): """Determine the encoding for `source`, according to PEP 263. `source` is a byte string: the text of the program. Returns a string, the name of the encoding. """ readline = iternext(source.splitlines(True)) return tokenize.detect_encoding(readline)[0] if env.PY3: source_encoding = _source_encoding_py3 else: source_encoding = _source_encoding_py2 @contract(source='unicode') def compile_unicode(source, filename, mode): """Just like the `compile` builtin, but works on any Unicode string. Python 2's compile() builtin has a stupid restriction: if the source string is Unicode, then it may not have a encoding declaration in it. Why not? Who knows! It also decodes to utf8, and then tries to interpret those utf8 bytes according to the encoding declaration. Why? Who knows! This function neuters the coding declaration, and compiles it. """ source = neuter_encoding_declaration(source) if env.PY2 and isinstance(filename, unicode_class): filename = filename.encode(sys.getfilesystemencoding(), "replace") code = compile(source, filename, mode) return code @contract(source='unicode', returns='unicode') def neuter_encoding_declaration(source): """Return `source`, with any encoding declaration neutered.""" if COOKIE_RE.search(source): source_lines = source.splitlines(True) for lineno in range(min(2, len(source_lines))): source_lines[lineno] = COOKIE_RE.sub("# (deleted declaration)", source_lines[lineno]) source = "".join(source_lines) return source
import asyncio from homeassistant.core import callback async def async_pulse(hass, ihc_controller, ihc_id: int): """Send a short on/off pulse to an IHC controller resource.""" await async_set_bool(hass, ihc_controller, ihc_id, True) await asyncio.sleep(0.1) await async_set_bool(hass, ihc_controller, ihc_id, False) @callback def async_set_bool(hass, ihc_controller, ihc_id: int, value: bool): """Set a bool value on an IHC controller resource.""" return hass.async_add_executor_job( ihc_controller.set_runtime_value_bool, ihc_id, value ) @callback def async_set_int(hass, ihc_controller, ihc_id: int, value: int): """Set a int value on an IHC controller resource.""" return hass.async_add_executor_job( ihc_controller.set_runtime_value_int, ihc_id, value )
import logging import voluptuous as vol from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity from homeassistant.components.climate.const import ( ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF, PRESET_AWAY, PRESET_BOOST, PRESET_ECO, PRESET_NONE, SUPPORT_FAN_MODE, SUPPORT_PRESET_MODE, SUPPORT_SWING_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, CONF_HOST, CONF_NAME, TEMP_CELSIUS import homeassistant.helpers.config_validation as cv from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, ATTR_STATE_OFF, ATTR_STATE_ON, ATTR_TARGET_TEMPERATURE, ) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string} ) HA_STATE_TO_DAIKIN = { HVAC_MODE_FAN_ONLY: "fan", HVAC_MODE_DRY: "dry", HVAC_MODE_COOL: "cool", HVAC_MODE_HEAT: "hot", HVAC_MODE_HEAT_COOL: "auto", HVAC_MODE_OFF: "off", } DAIKIN_TO_HA_STATE = { "fan": HVAC_MODE_FAN_ONLY, "dry": HVAC_MODE_DRY, "cool": HVAC_MODE_COOL, "hot": HVAC_MODE_HEAT, "auto": HVAC_MODE_HEAT_COOL, "off": HVAC_MODE_OFF, } HA_PRESET_TO_DAIKIN = { PRESET_AWAY: "on", PRESET_NONE: "off", PRESET_BOOST: "powerful", PRESET_ECO: "econo", } HA_ATTR_TO_DAIKIN = { ATTR_PRESET_MODE: "en_hol", ATTR_HVAC_MODE: "mode", ATTR_FAN_MODE: "f_rate", ATTR_SWING_MODE: "f_dir", ATTR_INSIDE_TEMPERATURE: "htemp", ATTR_OUTSIDE_TEMPERATURE: "otemp", ATTR_TARGET_TEMPERATURE: "stemp", } DAIKIN_ATTR_ADVANCED = "adv" async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old way of setting up the Daikin HVAC platform. Can only be called when a user accidentally mentions the platform in their config. But even in that case it would have been ignored. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up Daikin climate based on config_entry.""" daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) async_add_entities([DaikinClimate(daikin_api)], update_before_add=True) class DaikinClimate(ClimateEntity): """Representation of a Daikin HVAC.""" def __init__(self, api): """Initialize the climate device.""" self._api = api self._list = { ATTR_HVAC_MODE: list(HA_STATE_TO_DAIKIN), ATTR_FAN_MODE: self._api.device.fan_rate, ATTR_SWING_MODE: self._api.device.swing_modes, } self._supported_features = SUPPORT_TARGET_TEMPERATURE if ( self._api.device.support_away_mode or self._api.device.support_advanced_modes ): self._supported_features |= SUPPORT_PRESET_MODE if self._api.device.support_fan_rate: self._supported_features |= SUPPORT_FAN_MODE if self._api.device.support_swing_mode: self._supported_features |= SUPPORT_SWING_MODE async def _set(self, settings): """Set device settings using API.""" values = {} for attr in [ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_HVAC_MODE]: value = settings.get(attr) if value is None: continue daikin_attr = HA_ATTR_TO_DAIKIN.get(attr) if daikin_attr is not None: if attr == ATTR_HVAC_MODE: values[daikin_attr] = HA_STATE_TO_DAIKIN[value] elif value in self._list[attr]: values[daikin_attr] = value.lower() else: _LOGGER.error("Invalid value %s for %s", attr, value) # temperature elif attr == ATTR_TEMPERATURE: try: values[HA_ATTR_TO_DAIKIN[ATTR_TARGET_TEMPERATURE]] = str(int(value)) except ValueError: _LOGGER.error("Invalid temperature %s", value) if values: await self._api.device.set(values) @property def supported_features(self): """Return the list of supported features.""" return self._supported_features @property def name(self): """Return the name of the thermostat, if any.""" return self._api.name @property def unique_id(self): """Return a unique ID.""" return self._api.device.mac @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self._api.device.inside_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._api.device.target_temperature @property def target_temperature_step(self): """Return the supported step of target temperature.""" return 1 async def async_set_temperature(self, **kwargs): """Set new target temperature.""" await self._set(kwargs) @property def hvac_mode(self): """Return current operation ie. heat, cool, idle.""" daikin_mode = self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1] return DAIKIN_TO_HA_STATE.get(daikin_mode, HVAC_MODE_HEAT_COOL) @property def hvac_modes(self): """Return the list of available operation modes.""" return self._list.get(ATTR_HVAC_MODE) async def async_set_hvac_mode(self, hvac_mode): """Set HVAC mode.""" await self._set({ATTR_HVAC_MODE: hvac_mode}) @property def fan_mode(self): """Return the fan setting.""" return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE])[1].title() async def async_set_fan_mode(self, fan_mode): """Set fan mode.""" await self._set({ATTR_FAN_MODE: fan_mode}) @property def fan_modes(self): """List of available fan modes.""" return self._list.get(ATTR_FAN_MODE) @property def swing_mode(self): """Return the fan setting.""" return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE])[1].title() async def async_set_swing_mode(self, swing_mode): """Set new target temperature.""" await self._set({ATTR_SWING_MODE: swing_mode}) @property def swing_modes(self): """List of available swing modes.""" return self._list.get(ATTR_SWING_MODE) @property def preset_mode(self): """Return the preset_mode.""" if ( self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_PRESET_MODE])[1] == HA_PRESET_TO_DAIKIN[PRESET_AWAY] ): return PRESET_AWAY if ( HA_PRESET_TO_DAIKIN[PRESET_BOOST] in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_BOOST if ( HA_PRESET_TO_DAIKIN[PRESET_ECO] in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_ECO return PRESET_NONE async def async_set_preset_mode(self, preset_mode): """Set preset mode.""" if preset_mode == PRESET_AWAY: await self._api.device.set_holiday(ATTR_STATE_ON) elif preset_mode == PRESET_BOOST: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_ON ) elif preset_mode == PRESET_ECO: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_ON ) else: if self.preset_mode == PRESET_AWAY: await self._api.device.set_holiday(ATTR_STATE_OFF) elif self.preset_mode == PRESET_BOOST: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_OFF ) elif self.preset_mode == PRESET_ECO: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_OFF ) @property def preset_modes(self): """List of available preset modes.""" ret = [PRESET_NONE] if self._api.device.support_away_mode: ret.append(PRESET_AWAY) if self._api.device.support_advanced_modes: ret += [PRESET_ECO, PRESET_BOOST] return ret async def async_update(self): """Retrieve latest state.""" await self._api.async_update() async def async_turn_on(self): """Turn device on.""" await self._api.device.set({}) async def async_turn_off(self): """Turn device off.""" await self._api.device.set( {HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVAC_MODE_OFF]} ) @property def device_info(self): """Return a device description for device registry.""" return self._api.device_info
from __future__ import print_function import contextlib import fnmatch import jinja2 import os import re import sys import anyconfig import colorama import yaml from molecule.logger import get_logger LOG = get_logger(__name__) MERGE_STRATEGY = anyconfig.MS_DICTS class SafeDumper(yaml.SafeDumper): def increase_indent(self, flow=False, indentless=False): return super(SafeDumper, self).increase_indent(flow, False) def print_debug(title, data): title = 'DEBUG: {}'.format(title) title = [ colorama.Back.WHITE, colorama.Style.BRIGHT, colorama.Fore.BLACK, title, colorama.Fore.RESET, colorama.Back.RESET, colorama.Style.RESET_ALL ] print(''.join(title)) data = [ colorama.Fore.BLACK, colorama.Style.BRIGHT, data, colorama.Style.RESET_ALL, colorama.Fore.RESET ] print(''.join(data)) def print_environment_vars(env): """ Print ``Ansible`` and ``Molecule`` environment variables and returns None. :param env: A dict containing the shell's environment as collected by ``os.environ``. :return: None """ ansible_env = {k: v for (k, v) in env.items() if 'ANSIBLE_' in k} print_debug('ANSIBLE ENVIRONMENT', safe_dump(ansible_env)) molecule_env = {k: v for (k, v) in env.items() if 'MOLECULE_' in k} print_debug('MOLECULE ENVIRONMENT', safe_dump(molecule_env)) combined_env = ansible_env.copy() combined_env.update(molecule_env) print_debug( 'SHELL REPLAY', " ".join( ["{}={}".format(k, v) for (k, v) in sorted(combined_env.items())])) print() def sysexit(code=1): sys.exit(code) def sysexit_with_message(msg, code=1): LOG.critical(msg) sysexit(code) def run_command(cmd, debug=False): """ Execute the given command and returns None. :param cmd: A ``sh.Command`` object to execute. :param debug: An optional bool to toggle debug output. :return: ``sh`` object """ if debug: # WARN(retr0h): Uses an internal ``sh`` data structure to dig # the environment out of the ``sh.command`` object. print_environment_vars(cmd._partial_call_args.get('env', {})) print_debug('COMMAND', str(cmd)) print() return cmd(_truncate_exc=False) def os_walk(directory, pattern, excludes=[]): for root, dirs, files in os.walk(directory, topdown=True): dirs[:] = [d for d in dirs if d not in excludes] for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename) yield filename def render_template(template, **kwargs): t = jinja2.Environment() t = t.from_string(template) return t.render(kwargs) def write_file(filename, content): """ Writes a file with the given filename and content and returns None. :param filename: A string containing the target filename. :param content: A string containing the data to be written. :return: None """ with open_file(filename, 'w') as f: f.write(content) file_prepender(filename) def molecule_prepender(content): return '# Molecule managed\n\n' + content def file_prepender(filename): """ Prepend an informational header on files managed by Molecule and returns None. :param filename: A string containing the target filename. :return: None """ with open_file(filename, 'r+') as f: content = f.read() f.seek(0, 0) f.write(molecule_prepender(content)) def safe_dump(data): """ Dump the provided data to a YAML document and returns a string. :param data: A string containing an absolute path to the file to parse. :return: str """ # TODO(retr0h): Do we need to encode? # yaml.dump(data) produces the document as a str object in both python # 2 and 3. return yaml.dump( data, Dumper=SafeDumper, default_flow_style=False, explicit_start=True) def safe_load(string): """ Parse the provided string returns a dict. :param string: A string to be parsed. :return: dict """ try: return yaml.safe_load(string) or {} except yaml.scanner.ScannerError as e: sysexit_with_message(str(e)) def safe_load_file(filename): """ Parse the provided YAML file and returns a dict. :param filename: A string containing an absolute path to the file to parse. :return: dict """ with open_file(filename) as stream: return safe_load(stream) @contextlib.contextmanager def open_file(filename, mode='r'): """ Open the provide file safely and returns a file type. :param filename: A string containing an absolute path to the file to open. :param mode: A string describing the way in which the file will be used. :return: file type """ with open(filename, mode) as stream: yield stream def instance_with_scenario_name(instance_name, scenario_name): return '{}-{}'.format(instance_name, scenario_name) def strip_ansi_escape(string): return re.sub(r'\x1b[^m]*m', '', string) def strip_ansi_color(s): # Taken from tabulate invisible_codes = re.compile(r'\x1b\[\d*m') return re.sub(invisible_codes, '', s) def verbose_flag(options): verbose = 'v' verbose_flag = [] for i in range(0, 3): if options.get(verbose): verbose_flag = ['-{}'.format(verbose)] del options[verbose] if options.get('verbose'): del options['verbose'] break verbose = verbose + 'v' return verbose_flag def filter_verbose_permutation(options): return {k: options[k] for k in options if not re.match('^[v]+$', k)} def title(word): return ' '.join(x.capitalize() or '_' for x in word.split('_')) def abs_path(path): if path: return os.path.abspath(path) def camelize(string): # NOTE(retr0h): Taken from jpvanhal/inflection # https://github.com/jpvanhal/inflection return re.sub(r"(?:^|_)(.)", lambda m: m.group(1).upper(), string) def underscore(string): # NOTE(retr0h): Taken from jpvanhal/inflection # https://github.com/jpvanhal/inflection string = re.sub(r"([A-Z]+)([A-Z][a-z])", r'\1_\2', string) string = re.sub(r"([a-z\d])([A-Z])", r'\1_\2', string) string = string.replace("-", "_") return string.lower() def merge_dicts(a, b): """ Merges the values of B into A and returns a mutated dict A. :: dict a b: - c: 0 - c: 2 d: e: "aaa" f: 3 dict b a: 1 b: - c: 3 d: e: "bbb" Will give an object such as:: {'a': 1, 'b': [{'c': 3}], 'd': {'e': "bbb", 'f': 3}} :param a: the target dictionary :param b: the dictionary to import :return: dict """ anyconfig.merge(a, b, ac_merge=MERGE_STRATEGY) return a def memoize(function): memo = {} def wrapper(*args, **kwargs): if args not in memo: rv = function(*args, **kwargs) memo[args] = rv return rv return memo[args] return wrapper
import json import os import pkgutil import re from unittest import TestCase import numpy as np import pandas as pd from scattertext import whitespace_nlp, CorpusFromParsedDocuments, ParsedCorpus from scattertext.TermDocMatrixFactory import TermDocMatrixFactory from scattertext.test.test_TermDocMat import get_hamlet_docs, get_hamlet_snippet_binary_category from scattertext.test.test_corpusFromPandas import get_docs_categories def clean_function_factory(): only_speaker_text_re = re.compile( r'((^|\n)((ANNOUNCER|AUDIENCE MEMBERS?): .+)($|\n)|(\n|^)((([A-Z\.()\- ]+): ))|\(.+\) *)', re.M) assert only_speaker_text_re.sub('', 'AUDIENCE MEMBERS: (Chanting.) USA! USA! USA! USA!') == '' assert only_speaker_text_re.sub('', 'AUDIENCE MEMBER: (Chanting.) USA! USA! USA! USA!') == '' assert only_speaker_text_re.sub('', 'ANNOUNCER: (Chanting.) USA! USA! USA! USA!') == '' assert only_speaker_text_re.sub('', 'TOM SMITH: (Chanting.) USA! USA! USA! USA!') == 'USA! USA! USA! USA!' assert only_speaker_text_re.sub('', 'DONALD TRUMP: blah blah blah!') == 'blah blah blah!' assert only_speaker_text_re.sub('', 'HILLARY CLINTON: (something parenthetical) blah blah blah!') == 'blah blah blah!' assert only_speaker_text_re.sub \ ('', 'ANNOUNCER: (Chanting.) USA! USA! USA! USA!\nTOM SMITH: (Chanting.) ONLY INCLUDE THIS! ONLY KEEP THIS! \nAUDIENCE MEMBER: (Chanting.) USA! USA! USA! USA!').strip() \ == 'ONLY INCLUDE THIS! ONLY KEEP THIS!' def clean_document(text): return only_speaker_text_re.sub('', text) return clean_document def convention_speech_iter(): relative_path = os.path.join('../scattertext/data', 'political_data.json') try: cwd = os.path.dirname(os.path.abspath(__file__)) path = os.path.join(cwd, relative_path) return json.load(open(path)) except: return json.loads(pkgutil.get_data('scattertext', relative_path).decode('utf-8')) def iter_party_speech_pairs(): for speaker_obj in convention_speech_iter(): political_party = speaker_obj['name'] for speech in speaker_obj['speeches']: yield political_party, speech def build_term_doc_matrix(): term_doc_matrix = TermDocMatrixFactory( category_text_iter=iter_party_speech_pairs(), clean_function=clean_function_factory(), nlp=whitespace_nlp ).build() return term_doc_matrix class TestCorpusFromParsedDocuments(TestCase): @classmethod def setUp(cls): cls.categories, cls.documents = get_docs_categories() cls.parsed_docs = [] for doc in cls.documents: cls.parsed_docs.append(whitespace_nlp(doc)) cls.df = pd.DataFrame({'category': cls.categories, 'parsed': cls.parsed_docs}) cls.corpus_fact = CorpusFromParsedDocuments(cls.df, 'category', 'parsed') def test_same_as_term_doc_matrix(self): term_doc_matrix = build_term_doc_matrix() corpus = self._make_political_corpus() self.assertEqual(term_doc_matrix._X.shape, corpus._X.shape) self.assertEqual((corpus._X != term_doc_matrix._X).nnz, 0) corpus_scores = corpus.get_scaled_f_scores('democrat') term_doc_matrix_scores = corpus.get_scaled_f_scores('democrat') self.assertTrue(np.array_equal(term_doc_matrix_scores, corpus_scores)) def _make_political_corpus(self): clean = clean_function_factory() data = [] for party, speech in iter_party_speech_pairs(): cleaned_speech = clean(speech) if cleaned_speech and cleaned_speech != '': parsed_speech = whitespace_nlp(cleaned_speech) data.append({'party': party, 'text': parsed_speech}) corpus = CorpusFromParsedDocuments(pd.DataFrame(data), category_col='party', parsed_col='text').build() return corpus def test_get_y_and_populate_category_idx_store(self): self.corpus_fact.build() self.assertEqual([0, 0, 0, 0, 1, 1, 1, 1, 1, 2], list(self.corpus_fact._y)) self.assertEqual([(0, 'hamlet'), (1, 'jay-z/r. kelly'), (2, '???')], list(sorted(list(self.corpus_fact._category_idx_store.items())))) def test_get_term_idx_and_x(self): docs = [whitespace_nlp('aa aa bb.'), whitespace_nlp('bb aa a.')] df = pd.DataFrame({'category': ['a', 'b'], 'parsed': docs}) # corpus_fact = CorpusFromParsedDocuments(convention_df, 'category', 'parsed') corpus_fact = CorpusFromParsedDocuments(df, category_col='category', parsed_col='parsed') corpus = corpus_fact.build() kvs = list(corpus_fact._term_idx_store.items()) keys = [k for k, v in kvs] values = [v for k, v in kvs] self.assertEqual(sorted(keys), list(range(7))) self.assertEqual(sorted(values), ['a', 'aa', 'aa a', 'aa aa', 'aa bb', 'bb', 'bb aa']) def assert_word_in_doc_cnt(doc, word, count): self.assertEqual(corpus_fact._X[doc, corpus_fact._term_idx_store.getidx(word)], count) assert_word_in_doc_cnt(0, 'aa', 2) assert_word_in_doc_cnt(0, 'bb', 1) assert_word_in_doc_cnt(0, 'aa aa', 1) assert_word_in_doc_cnt(0, 'aa bb', 1) assert_word_in_doc_cnt(0, 'bb aa', 0) assert_word_in_doc_cnt(1, 'bb', 1) assert_word_in_doc_cnt(1, 'aa', 1) assert_word_in_doc_cnt(1, 'a', 1) assert_word_in_doc_cnt(1, 'bb aa', 1) assert_word_in_doc_cnt(1, 'aa aa', 0) assert_word_in_doc_cnt(1, 'aa a', 1) self.assertTrue(isinstance(corpus, ParsedCorpus)) def test_hamlet(self): raw_docs = get_hamlet_docs() categories = [get_hamlet_snippet_binary_category(doc) for doc in raw_docs] docs = [whitespace_nlp(doc) for doc in raw_docs] df = pd.DataFrame({'category': categories, 'parsed': docs}) corpus_fact = CorpusFromParsedDocuments(df, 'category', 'parsed') corpus = corpus_fact.build() tdf = corpus.get_term_freq_df() self.assertEqual(list(tdf.loc['play']), [37, 5]) self.assertFalse(any(corpus.search('play').apply(lambda x: 'plfay' in str(x['parsed']), axis=1))) self.assertTrue(all(corpus.search('play').apply(lambda x: 'play' in str(x['parsed']), axis=1))) # !!! to do verify term doc matrix play_term_idx = corpus_fact._term_idx_store.getidx('play') play_X = corpus_fact._X.todok()[:, play_term_idx] self.assertEqual(play_X.sum(), 37 + 5)
from teslajsonpy import TeslaException from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.tesla.const import ( CONF_WAKE_ON_START, DEFAULT_SCAN_INTERVAL, DEFAULT_WAKE_ON_START, DOMAIN, MIN_SCAN_INTERVAL, ) from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_USERNAME, HTTP_NOT_FOUND, ) from tests.async_mock import patch from tests.common import MockConfigEntry async def test_form(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.tesla.config_flow.TeslaAPI.connect", return_value=("test-refresh-token", "test-access-token"), ), patch( "homeassistant.components.tesla.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.tesla.async_setup_entry", return_value=True ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_PASSWORD: "test", CONF_USERNAME: "[email protected]"} ) await hass.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result2["title"] == "[email protected]" assert result2["data"] == { CONF_TOKEN: "test-refresh-token", CONF_ACCESS_TOKEN: "test-access-token", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass): """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.tesla.config_flow.TeslaAPI.connect", side_effect=TeslaException(401), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth"} async def test_form_cannot_connect(hass): """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.tesla.config_flow.TeslaAPI.connect", side_effect=TeslaException(code=HTTP_NOT_FOUND), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_PASSWORD: "test-password", CONF_USERNAME: "test-username"}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "cannot_connect"} async def test_form_repeat_identifier(hass): """Test we handle repeat identifiers.""" entry = MockConfigEntry(domain=DOMAIN, title="test-username", data={}, options=None) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.tesla.config_flow.TeslaAPI.connect", return_value=("test-refresh-token", "test-access-token"), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, ) assert result2["type"] == "form" assert result2["errors"] == {CONF_USERNAME: "already_configured"} async def test_import(hass): """Test import step.""" with patch( "homeassistant.components.tesla.config_flow.TeslaAPI.connect", return_value=("test-refresh-token", "test-access-token"), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={CONF_PASSWORD: "test-password", CONF_USERNAME: "test-username"}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "test-username" assert result["data"][CONF_ACCESS_TOKEN] == "test-access-token" assert result["data"][CONF_TOKEN] == "test-refresh-token" assert result["description_placeholders"] is None async def test_option_flow(hass): """Test config flow options.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] == "form" assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 350, CONF_WAKE_ON_START: True}, ) assert result["type"] == "create_entry" assert result["data"] == {CONF_SCAN_INTERVAL: 350, CONF_WAKE_ON_START: True} async def test_option_flow_defaults(hass): """Test config flow options.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] == "form" assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={} ) assert result["type"] == "create_entry" assert result["data"] == { CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_WAKE_ON_START: DEFAULT_WAKE_ON_START, } async def test_option_flow_input_floor(hass): """Test config flow options.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] == "form" assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 1} ) assert result["type"] == "create_entry" assert result["data"] == { CONF_SCAN_INTERVAL: MIN_SCAN_INTERVAL, CONF_WAKE_ON_START: DEFAULT_WAKE_ON_START, }
import base64 import hashlib import itertools import os import shutil from tempfile import NamedTemporaryFile import dateutil.parser from decorator import decorator from google.protobuf.message import DecodeError import mutagen import json from gmusicapi.exceptions import CallFailure from gmusicapi.protocol import upload_pb2, locker_pb2, download_pb2 from gmusicapi.protocol.shared import Call, ParseException, authtypes from gmusicapi.utils import utils log = utils.DynamicClientLogger(__name__) _android_url = 'https://android.clients.google.com/upsj/' @decorator def pb(f, *args, **kwargs): """Decorator to serialize a protobuf message.""" msg = f(*args, **kwargs) return msg.SerializeToString() class MmCall(Call): """Abstract base for Music Manager calls.""" static_method = 'POST' # remember that setting this in a subclass overrides, not merges # static + dynamic does merge, though static_headers = {'User-agent': 'Music Manager (1, 0, 55, 7425 HTTPS - Windows)'} required_auth = authtypes(oauth=True) # this is a shared union class that has all specific upload types # nearly all of the proto calls return a message of this form res_msg_type = upload_pb2.UploadResponse @classmethod def parse_response(cls, response): """Parse the cls.res_msg_type proto msg.""" res_msg = cls.res_msg_type() try: res_msg.ParseFromString(response.content) except DecodeError as e: raise ParseException(str(e)) from e return res_msg @classmethod def filter_response(cls, msg): return Call._filter_proto(msg) class GetClientState(MmCall): static_url = _android_url + 'clientstate' @classmethod @pb def dynamic_data(cls, uploader_id): """ :param uploader_id: MM uses host MAC address """ req_msg = upload_pb2.ClientStateRequest() req_msg.uploader_id = uploader_id return req_msg class AuthenticateUploader(MmCall): """Sent to auth, reauth, or register our upload client.""" static_url = _android_url + 'upauth' @classmethod def check_success(cls, response, msg): if msg.HasField('auth_status') and msg.auth_status != upload_pb2.UploadResponse.OK: enum_desc = upload_pb2._UPLOADRESPONSE.enum_types[1] res_name = enum_desc.values_by_number[msg.auth_status].name raise CallFailure( "Upload auth error code %s: %s." " See http://goo.gl/O6xe7 for more information. " % ( msg.auth_status, res_name ), cls.__name__ ) @classmethod @pb def dynamic_data(cls, uploader_id, uploader_friendly_name): """ :param uploader_id: MM uses host MAC address :param uploader_friendly_name: MM uses hostname """ req_msg = upload_pb2.UpAuthRequest() req_msg.uploader_id = uploader_id req_msg.friendly_name = uploader_friendly_name return req_msg class UploadMetadata(MmCall): static_url = _android_url + 'metadata' static_params = {'version': 1} @staticmethod def get_track_clientid(filepath): # The id is a 22 char hash of the file. It is found by: # stripping tags # getting an md5 sum # converting sum to base64 # removing trailing === m = hashlib.md5() try: ext = os.path.splitext(filepath)[1] # delete=False is needed because the NamedTemporaryFile # can't be opened by name a second time on Windows otherwise. with NamedTemporaryFile(suffix=ext, delete=False) as temp: shutil.copy(filepath, temp.name) audio = mutagen.File(temp.name, easy=True) audio.delete() audio.save() while True: data = temp.read(65536) if not data: break m.update(data) finally: try: os.remove(temp.name) except OSError: log.exception("Could not remove temporary file %r", temp.name) return base64.encodestring(m.digest())[:-3] # these collections define how locker_pb2.Track fields align to mutagen's. shared_fields = ('album', 'artist', 'composer', 'genre') field_map = { # mutagen: Track 'albumartist': 'album_artist', 'bpm': 'beats_per_minute', } count_fields = { # mutagen: (part, total) 'discnumber': ('disc_number', 'total_disc_count'), 'tracknumber': ('track_number', 'total_track_count'), } @classmethod def fill_track_info(cls, filepath): """Given the path and contents of a track, return a filled locker_pb2.Track. On problems, raise ValueError.""" track = locker_pb2.Track() # The track protobuf message supports an additional metadata list field. # ALBUM_ART_HASH has been observed being sent in this field so far. # Append locker_pb2.AdditionalMetadata objects to additional_metadata. # AdditionalMetadata objects consist of two fields, 'tag_name' and 'value'. additional_metadata = [] track.client_id = cls.get_track_clientid(filepath) audio = mutagen.File(filepath, easy=True) if audio is None: raise ValueError("could not open to read metadata") elif isinstance(audio, mutagen.asf.ASF): # WMA entries store more info than just the value. # Monkeypatch in a dict {key: value} to keep interface the same for all filetypes. asf_dict = {k: [ve.value for ve in v] for (k, v) in audio.tags.as_dict().items()} audio.tags = asf_dict extension = os.path.splitext(filepath)[1].upper() if isinstance(extension, bytes): extension = extension.decode('utf8') if extension: # Trim leading period if it exists (ie extension not empty). extension = extension[1:] if isinstance(audio, mutagen.mp4.MP4) and ( audio.info.codec == 'alac' or audio.info.codec_description == 'ALAC'): extension = 'ALAC' elif isinstance(audio, mutagen.mp4.MP4) and audio.info.codec_description.startswith('AAC'): extension = 'AAC' if extension.upper() == 'M4B': # M4B are supported by the music manager, and transcoded like normal. extension = 'M4A' if not hasattr(locker_pb2.Track, extension): raise ValueError("unsupported filetype: {0} for file {1}".format(extension, filepath)) track.original_content_type = getattr(locker_pb2.Track, extension) track.estimated_size = os.path.getsize(filepath) track.last_modified_timestamp = int(os.path.getmtime(filepath)) # These are typically zeroed in my examples. track.play_count = 0 track.client_date_added = 0 track.recent_timestamp = 0 track.rating = locker_pb2.Track.NOT_RATED # star rating track.duration_millis = int(audio.info.length * 1000) try: bitrate = audio.info.bitrate // 1000 except AttributeError: # mutagen doesn't provide bitrate for some lossless formats (eg FLAC), so # provide an estimation instead. This shouldn't matter too much; # the bitrate will always be > 320, which is the highest scan and match quality. bitrate = (track.estimated_size * 8) // track.duration_millis track.original_bit_rate = bitrate # Populate metadata. def track_set(field_name, val, msg=track): """Returns result of utils.pb_set and logs on failures. Should be used when setting directly from metadata.""" success = utils.pb_set(msg, field_name, val) if not success: log.info("could not pb_set track.%s = %r for '%r'", field_name, val, filepath) return success # Title is required. # If it's not in the metadata, the filename will be used. if "title" in audio: title = audio['title'][0] if isinstance(title, mutagen.asf.ASFUnicodeAttribute): title = title.value track_set('title', title) else: # Assume ascii or unicode. track.title = os.path.basename(filepath) if "date" in audio: date_val = str(audio['date'][0]) try: datetime = dateutil.parser.parse(date_val, fuzzy=True) except (ValueError, TypeError) as e: # TypeError provides compatibility with: # https://bugs.launchpad.net/dateutil/+bug/1247643 log.warning("could not parse date md for '%r': (%s)", filepath, e) else: track_set('year', datetime.year) for null_field in ['artist', 'album']: # If these fields aren't provided, they'll render as "undefined" in the web interface; # see https://github.com/simon-weber/gmusicapi/issues/236. # Defaulting them to an empty string fixes this. if null_field not in audio: track_set(null_field, '') # Mass-populate the rest of the simple fields. # Merge shared and unshared fields into {mutagen: Track}. fields = dict( itertools.chain( ((shared, shared) for shared in cls.shared_fields), cls.field_map.items())) for mutagen_f, track_f in fields.items(): if mutagen_f in audio: track_set(track_f, audio[mutagen_f][0]) for mutagen_f, (track_f, track_total_f) in cls.count_fields.items(): if mutagen_f in audio: numstrs = str(audio[mutagen_f][0]).split("/") track_set(track_f, numstrs[0]) if len(numstrs) == 2 and numstrs[1]: track_set(track_total_f, numstrs[1]) if additional_metadata: track.track_extras.additional_metadata.extend(additional_metadata) return track @classmethod @pb def dynamic_data(cls, tracks, uploader_id, do_not_rematch=False): """ :param tracks: list of filled locker_pb2.Track :param uploader_id: :param do_not_rematch: seems to be ignored """ req_msg = upload_pb2.UploadMetadataRequest() req_msg.track.extend(tracks) for track in req_msg.track: track.do_not_rematch = do_not_rematch req_msg.uploader_id = uploader_id return req_msg class GetUploadJobs(MmCall): # TODO static_url = _android_url + 'getjobs' static_params = {'version': 1} @classmethod def check_success(cls, response, msg): if msg.HasField('getjobs_response') and not msg.getjobs_response.get_tracks_success: raise CallFailure('get_tracks_success == False', cls.__name__) @classmethod @pb def dynamic_data(cls, uploader_id): """ :param uploader_id: MM uses host MAC address """ req_msg = upload_pb2.GetJobsRequest() req_msg.uploader_id = uploader_id return req_msg class GetUploadSession(MmCall): """Called when we want to upload; the server returns the url to use. This is a json call, and doesn't share much with the other calls.""" static_method = 'POST' static_url = 'https://uploadsj.clients.google.com/uploadsj/scottyagent' @classmethod def parse_response(cls, response): return cls._parse_json(response.text) @staticmethod def filter_response(res): return res @staticmethod def dynamic_data(uploader_id, num_already_uploaded, track, filepath, server_id, do_not_rematch=False): """track is a locker_pb2.Track, and the server_id is from a metadata upload.""" # small info goes inline, big things get their own external PUT. # still not sure as to thresholds - I've seen big album art go inline. if isinstance(filepath, bytes): filepath = filepath.decode('utf8') inlined = { "title": "jumper-uploader-title-42", "ClientId": track.client_id, "ClientTotalSongCount": "1", # TODO think this is ie "how many will you upload" "CurrentTotalUploadedCount": str(num_already_uploaded), "CurrentUploadingTrack": track.title, "ServerId": server_id, "SyncNow": "true", "TrackBitRate": track.original_bit_rate, "TrackDoNotRematch": str(do_not_rematch).lower(), "UploaderId": uploader_id, } message = { "clientId": "Jumper Uploader", "createSessionRequest": { "fields": [ { "external": { "filename": os.path.basename(filepath), "name": os.path.abspath(filepath), "put": {}, # used to use this; don't see it in examples # "size": track.estimated_size, } } ] }, "protocolVersion": "0.8" } # Insert the inline info. for key in inlined: payload = inlined[key] if not isinstance(payload, str): payload = str(payload) message['createSessionRequest']['fields'].append( { "inlined": { "content": payload, "name": key } } ) return json.dumps(message) @staticmethod def process_session(res): """Return (got_session, error_details). error_details is (should_retry, reason, error_code) or None if got_session.""" if 'sessionStatus' in res: return (True, None) if 'errorMessage' in res: try: # This terribly nested structure is Google's doing. error_code = (res['errorMessage']['additionalInfo'] ['uploader_service.GoogleRupioAdditionalInfo']['completionInfo'] ['customerSpecificInfo']['ResponseCode']) except KeyError: # The returned nested structure is not as expected: cannot get Response Code error_code = None got_session = False if error_code == 503: should_retry = True reason = 'upload servers still syncing' # TODO unsure about these codes elif error_code == 200: should_retry = False reason = 'this song is already uploaded' elif error_code == 404: should_retry = False reason = 'the request was rejected' else: should_retry = True reason = 'the server reported an unknown error' return (got_session, (should_retry, reason, error_code)) return (False, (True, "the server's response could not be understood", None)) class UploadFile(MmCall): """Called after getting a session to actually upload a file.""" # TODO recent protocols use multipart encoding static_method = 'PUT' @classmethod def parse_response(cls, response): return cls._parse_json(response.text) @staticmethod def filter_response(res): return res @staticmethod def dynamic_headers(session_url, content_type, audio): return {'CONTENT-TYPE': content_type} @staticmethod def dynamic_url(session_url, content_type, audio): # this actually includes params, but easier to pass them straight through return session_url @staticmethod def dynamic_data(session_url, content_type, audio): return audio class ProvideSample(MmCall): """Give the server a scan and match sample. The sample is a 128k mp3 slice of the file, usually 15 seconds long.""" static_method = 'POST' static_params = {'version': 1} static_url = _android_url + 'sample' @staticmethod @pb def dynamic_data(filepath, server_challenge, track, uploader_id, mock_sample=None): """Raise OSError on transcoding problems, or ValueError for invalid input. :param mock_sample: if provided, will be sent in place of a proper sample """ msg = upload_pb2.UploadSampleRequest() msg.uploader_id = uploader_id sample_msg = upload_pb2.TrackSample() sample_msg.track.CopyFrom(track) sample_msg.signed_challenge_info.CopyFrom(server_challenge) sample_spec = server_challenge.challenge_info # convenience if mock_sample is None: # The sample is simply a small (usually 15 second) clip of the song, # transcoded into 128kbs mp3. The server dictates where the cut should be made. sample_msg.sample = utils.transcode_to_mp3( filepath, quality='128k', slice_start=sample_spec.start_millis // 1000, slice_duration=sample_spec.duration_millis // 1000 ) else: sample_msg.sample = mock_sample # You can provide multiple samples; I just provide one at a time. msg.track_sample.extend([sample_msg]) return msg class UpdateUploadState(MmCall): """Notify the server that we will be starting/stopping/pausing our upload. I believe this is used for the webclient 'currently uploading' widget, but that might also be the current_uploading information. """ static_method = 'POST' static_params = {'version': 1} static_url = _android_url + 'uploadstate' @staticmethod @pb def dynamic_data(to_state, uploader_id): """Raise ValueError on problems. :param to_state: one of 'start', 'paused', or 'stopped' """ msg = upload_pb2.UpdateUploadStateRequest() msg.uploader_id = uploader_id try: state = getattr(upload_pb2.UpdateUploadStateRequest, to_state.upper()) except AttributeError as e: raise ValueError(str(e)) msg.state = state return msg class CancelUploadJobs(MmCall): """This call will cancel any outstanding upload jobs (ie from GetJobs). The Music Manager only calls it when the user changes the location of their local collection. It doesn't actually return anything useful.""" static_method = 'POST' static_url = _android_url + 'deleteuploadrequested' @staticmethod @pb def dynamic_data(uploader_id): """ :param uploader_id: id """ msg = upload_pb2.DeleteUploadRequestedRequest() # what a mouthful! msg.uploader_id = uploader_id return msg class ListTracks(MmCall): """List all tracks. Returns a subset of all available metadata. Can optionally filter for only free/purchased tracks.""" res_msg_type = download_pb2.GetTracksToExportResponse static_method = 'POST' static_url = 'https://music.google.com/music/exportids' # example response: # download_track_info { # id: "970d9e51-b392-3857-897a-170e456cba60" # title: "Temporary Trip" # album: "Pay Attention" # album_artist: "The Mighty Mighty Bosstones" # artist: "The Mighty Mighty Bosstones" # track_number: 14 # track_size: 3577382 # } @staticmethod def dynamic_headers(client_id, *args, **kwargs): return {'X-Device-ID': client_id} @staticmethod @pb def dynamic_data(client_id, cont_token=None, export_type=1, updated_min=0): """Works similarly to the webclient method. Chunks are up to 1000 tracks. :param client_id: an authorized uploader_id :param cont_token: (optional) token to get the next library chunk. :param export_type: 1='ALL', 2='PURCHASED_AND_PROMOTIONAL' :param updated_min: likely a timestamp; never seen an example of this != 0 """ msg = download_pb2.GetTracksToExportRequest() msg.client_id = client_id msg.export_type = export_type if cont_token is not None: msg.continuation_token = cont_token msg.updated_min = updated_min return msg @classmethod def check_success(cls, response, msg): if msg.status != download_pb2.GetTracksToExportResponse.OK: enum_desc = download_pb2._GETTRACKSTOEXPORTRESPONSE.enum_types[0] res_name = enum_desc.values_by_number[msg.status].name raise CallFailure( "Track export (list) error code %s: %s." % ( msg.status, res_name ), cls.__name__ ) # TODO @staticmethod def filter_response(msg): """Only log a summary.""" cont_token = None if msg.HasField('continuation_token'): cont_token = msg.continuation_token updated_min = None if msg.HasField('updated_min'): updated_min = msg.updated_min return "<%s songs>, updated_min: %r, continuation_token: %r" % ( len(msg.download_track_info), updated_min, cont_token) class GetDownloadLink(MmCall): """Get a url where a track can be downloaded. Auth is not needed to retrieve the resulting url.""" static_method = 'GET' static_headers = {} static_params = {'version': 2} static_url = 'https://music.google.com/music/export' @staticmethod def dynamic_headers(sid, client_id): return {'X-Device-ID': client_id} @staticmethod def dynamic_params(sid, client_id): return {'songid': sid} @classmethod def parse_response(cls, response): return cls._parse_json(response.text) @staticmethod def filter_response(res): return res class DownloadTrack(MmCall): """Given a url, retrieve a track. Unlike the Webclient, this requires authentication. The entire Requests.Response is returned.""" static_method = 'GET' @staticmethod def dynamic_url(url): """ :param url: result of a call to GetDownloadLink """ return url @classmethod def parse_response(cls, response): return response @staticmethod def filter_response(res): return "code: %s; size: %s bytes; disposition: %r" % ( res.status_code, res.headers['Content-Length'], res.headers['Content-Disposition'])