text
stringlengths 213
32.3k
|
---|
from django.urls import reverse
from weblate.trans.models.component import Component
from weblate.trans.tests.test_views import ViewTestCase
class LockTest(ViewTestCase):
def setUp(self):
super().setUp()
# Need extra power
self.user.is_superuser = True
self.user.save()
def assert_component_locked(self):
component = Component.objects.get(
slug=self.component.slug, project__slug=self.project.slug
)
self.assertTrue(component.locked)
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(
response,
"The translation is temporarily closed for contributions due "
"to maintenance, please come back later.",
)
def assert_component_not_locked(self):
component = Component.objects.get(
slug=self.component.slug, project__slug=self.project.slug
)
self.assertFalse(component.locked)
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertNotContains(
response,
"The translation is temporarily closed for contributions due "
"to maintenance, please come back later.",
)
def test_component(self):
response = self.client.post(reverse("lock_component", kwargs=self.kw_component))
redirect_url = "{}#repository".format(
reverse("component", kwargs=self.kw_component)
)
self.assertRedirects(response, redirect_url)
self.assert_component_locked()
response = self.client.post(
reverse("unlock_component", kwargs=self.kw_component)
)
self.assertRedirects(response, redirect_url)
self.assert_component_not_locked()
def test_project(self):
response = self.client.post(reverse("lock_project", kwargs=self.kw_project))
redirect_url = "{}#repository".format(
reverse("project", kwargs=self.kw_project)
)
self.assertRedirects(response, redirect_url)
self.assert_component_locked()
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(
response,
"The translation is temporarily closed for contributions due "
"to maintenance, please come back later.",
)
response = self.client.post(reverse("unlock_project", kwargs=self.kw_project))
self.assertRedirects(response, redirect_url)
self.assert_component_not_locked()
|
import sys
import mne
def run():
"""Run command."""
import matplotlib.pyplot as plt
from mne.commands.utils import get_optparser, _add_verbose_flag
from mne.viz import _RAW_CLIP_DEF
parser = get_optparser(__file__, usage='usage: %prog raw [options]')
parser.add_option("--raw", dest="raw_in",
help="Input raw FIF file (can also be specified "
"directly as an argument without the --raw prefix)",
metavar="FILE")
parser.add_option("--proj", dest="proj_in",
help="Projector file", metavar="FILE",
default='')
parser.add_option("--eve", dest="eve_in",
help="Events file", metavar="FILE",
default='')
parser.add_option("-d", "--duration", dest="duration", type="float",
help="Time window for plotting (sec)",
default=10.0)
parser.add_option("-t", "--start", dest="start", type="float",
help="Initial start time for plotting",
default=0.0)
parser.add_option("-n", "--n_channels", dest="n_channels", type="int",
help="Number of channels to plot at a time",
default=20)
parser.add_option("-o", "--order", dest="group_by",
help="Order to use for grouping during plotting "
"('type' or 'original')", default='type')
parser.add_option("-p", "--preload", dest="preload",
help="Preload raw data (for faster navigaton)",
default=False, action="store_true")
parser.add_option("-s", "--show_options", dest="show_options",
help="Show projection options dialog",
default=False)
parser.add_option("--allowmaxshield", dest="maxshield",
help="Allow loading MaxShield processed data",
action="store_true")
parser.add_option("--highpass", dest="highpass", type="float",
help="Display high-pass filter corner frequency",
default=-1)
parser.add_option("--lowpass", dest="lowpass", type="float",
help="Display low-pass filter corner frequency",
default=-1)
parser.add_option("--filtorder", dest="filtorder", type="int",
help="Display filtering IIR order (or 0 to use FIR)",
default=4)
parser.add_option("--clipping", dest="clipping",
help="Enable trace clipping mode, either 'clamp' or "
"'transparent'", default=_RAW_CLIP_DEF)
parser.add_option("--filterchpi", dest="filterchpi",
help="Enable filtering cHPI signals.", default=None,
action="store_true")
_add_verbose_flag(parser)
options, args = parser.parse_args()
if len(args):
raw_in = args[0]
else:
raw_in = options.raw_in
duration = options.duration
start = options.start
n_channels = options.n_channels
group_by = options.group_by
preload = options.preload
show_options = options.show_options
proj_in = options.proj_in
eve_in = options.eve_in
maxshield = options.maxshield
highpass = options.highpass
lowpass = options.lowpass
filtorder = options.filtorder
clipping = options.clipping
if isinstance(clipping, str):
if clipping.lower() == 'none':
clipping = None
else:
try:
clipping = float(clipping) # allow float and convert it
except ValueError:
pass
filterchpi = options.filterchpi
verbose = options.verbose
if raw_in is None:
parser.print_help()
sys.exit(1)
raw = mne.io.read_raw_fif(raw_in, preload=preload,
allow_maxshield=maxshield)
if len(proj_in) > 0:
projs = mne.read_proj(proj_in)
raw.info['projs'] = projs
if len(eve_in) > 0:
events = mne.read_events(eve_in)
else:
events = None
if filterchpi:
if not preload:
raise RuntimeError(
'Raw data must be preloaded for chpi, use --preload')
raw = mne.chpi.filter_chpi(raw)
highpass = None if highpass < 0 or filtorder < 0 else highpass
lowpass = None if lowpass < 0 or filtorder < 0 else lowpass
raw.plot(duration=duration, start=start, n_channels=n_channels,
group_by=group_by, show_options=show_options, events=events,
highpass=highpass, lowpass=lowpass, filtorder=filtorder,
clipping=clipping, verbose=verbose)
plt.show(block=True)
mne.utils.run_command_if_main()
|
from mock import MagicMock
from mock import patch
from paasta_tools.cli.cmds.itest import paasta_itest
@patch("paasta_tools.cli.cmds.itest.validate_service_name", autospec=True)
@patch("paasta_tools.cli.cmds.itest._run", autospec=True)
@patch("paasta_tools.cli.cmds.itest._log", autospec=True)
@patch("paasta_tools.cli.cmds.itest.check_docker_image", autospec=True)
@patch("paasta_tools.cli.cmds.itest.build_docker_tag", autospec=True)
def test_itest_run_fail(
mock_build_docker_tag,
mock_docker_image,
mock_log,
mock_run,
mock_validate_service_name,
):
mock_build_docker_tag.return_value = "fake-registry/services-foo:paasta-bar"
mock_docker_image.return_value = True
mock_run.return_value = (1, "fake_output")
args = MagicMock()
assert paasta_itest(args) == 1
@patch("paasta_tools.cli.cmds.itest.validate_service_name", autospec=True)
@patch("paasta_tools.cli.cmds.itest._run", autospec=True)
@patch("paasta_tools.cli.cmds.itest._log", autospec=True)
@patch("paasta_tools.cli.cmds.itest.check_docker_image", autospec=True)
@patch("paasta_tools.cli.cmds.itest.build_docker_tag", autospec=True)
def test_itest_success(
mock_build_docker_tag,
mock_docker_image,
mock_log,
mock_run,
mock_validate_service_name,
):
mock_build_docker_tag.return_value = "fake-registry/services-foo:paasta-bar"
mock_docker_image.return_value = True
mock_run.return_value = (0, "Yeeehaaa")
args = MagicMock()
assert paasta_itest(args) == 0
@patch("paasta_tools.cli.cmds.itest.validate_service_name", autospec=True)
@patch("paasta_tools.cli.cmds.itest._run", autospec=True)
@patch("paasta_tools.cli.cmds.itest.build_docker_tag", autospec=True)
@patch("paasta_tools.cli.cmds.itest._log", autospec=True)
@patch("paasta_tools.cli.cmds.itest.check_docker_image", autospec=True)
def test_itest_works_when_service_name_starts_with_services_dash(
mock_docker_image,
mock_log,
mock_build_docker_tag,
mock_run,
mock_validate_service_name,
):
mock_docker_image.return_value = True
mock_build_docker_tag.return_value = "unused_docker_tag"
mock_run.return_value = (0, "Yeeehaaa")
args = MagicMock()
args.service = "services-fake_service"
args.commit = "unused"
assert paasta_itest(args) == 0
mock_build_docker_tag.assert_called_once_with("fake_service", "unused")
|
import datetime
import json
import sys
from coverage import __version__
from coverage.report import get_analysis_to_report
from coverage.results import Numbers
class JsonReporter(object):
"""A reporter for writing JSON coverage results."""
def __init__(self, coverage):
self.coverage = coverage
self.config = self.coverage.config
self.total = Numbers()
self.report_data = {}
def report(self, morfs, outfile=None):
"""Generate a json report for `morfs`.
`morfs` is a list of modules or file names.
`outfile` is a file object to write the json to
"""
outfile = outfile or sys.stdout
coverage_data = self.coverage.get_data()
coverage_data.set_query_contexts(self.config.report_contexts)
self.report_data["meta"] = {
"version": __version__,
"timestamp": datetime.datetime.now().isoformat(),
"branch_coverage": coverage_data.has_arcs(),
"show_contexts": self.config.json_show_contexts,
}
measured_files = {}
for file_reporter, analysis in get_analysis_to_report(self.coverage, morfs):
measured_files[file_reporter.relative_filename()] = self.report_one_file(
coverage_data,
analysis
)
self.report_data["files"] = measured_files
self.report_data["totals"] = {
'covered_lines': self.total.n_executed,
'num_statements': self.total.n_statements,
'percent_covered': self.total.pc_covered,
'missing_lines': self.total.n_missing,
'excluded_lines': self.total.n_excluded,
}
if coverage_data.has_arcs():
self.report_data["totals"].update({
'num_branches': self.total.n_branches,
'num_partial_branches': self.total.n_partial_branches,
'covered_branches': self.total.n_executed_branches,
'missing_branches': self.total.n_missing_branches,
})
json.dump(
self.report_data,
outfile,
indent=4 if self.config.json_pretty_print else None
)
return self.total.n_statements and self.total.pc_covered
def report_one_file(self, coverage_data, analysis):
"""Extract the relevant report data for a single file"""
nums = analysis.numbers
self.total += nums
summary = {
'covered_lines': nums.n_executed,
'num_statements': nums.n_statements,
'percent_covered': nums.pc_covered,
'missing_lines': nums.n_missing,
'excluded_lines': nums.n_excluded,
}
reported_file = {
'executed_lines': sorted(analysis.executed),
'summary': summary,
'missing_lines': sorted(analysis.missing),
'excluded_lines': sorted(analysis.excluded)
}
if self.config.json_show_contexts:
reported_file['contexts'] = analysis.data.contexts_by_lineno(
analysis.filename,
)
if coverage_data.has_arcs():
reported_file['summary'].update({
'num_branches': nums.n_branches,
'num_partial_branches': nums.n_partial_branches,
'covered_branches': nums.n_executed_branches,
'missing_branches': nums.n_missing_branches,
})
return reported_file
|
from test import CollectorTestCase
from test import get_collector_config
from mock import patch, Mock
from diamond.collector import Collector
from kafka_consumer_lag import KafkaConsumerLagCollector
##########################################################################
class TestKafkaConsumerLagCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('KafkaConsumerLagCollector', {
'consumer_groups': ['test_group']
})
self.collector = KafkaConsumerLagCollector(config, None)
def test_import(self):
self.assertTrue(KafkaConsumerLagCollector)
@patch.object(Collector, 'publish')
def test_should_publish_gpu_stat(self, publish_mock):
output_mock = Mock(
return_value=(self.getFixture('consumer_lag_check').getvalue(), '')
)
collector_mock = patch.object(
KafkaConsumerLagCollector,
'run_command',
output_mock
)
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'stage_nginx_access.nginx_access.0': 0,
'stage_nginx_access.nginx_access.1': 2,
'stage_nginx_access.nginx_access.2': 0,
'stage_nginx_access.nginx_access.3': 0,
'stage_nginx_access.nginx_access.4': 0,
'stage_nginx_access.nginx_access.5': 0,
'stage_nginx_access.nginx_access.6': 0,
'stage_nginx_access.nginx_access.7': 52,
'stage_nginx_access.nginx_access.8': 0,
'stage_nginx_access.nginx_access.9': 0,
'stage_nginx_access.nginx_access.10': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)\
@patch.object(Collector, 'publish')
def test_should_publish_gpu_stat(self, publish_mock):
self.collector.config.update({
'zookeeper':
['192.168.1.101:2181', '192.168.1.102:2181/dev/test-01']
})
output_mock = Mock(
return_value=(self.getFixture('consumer_lag_check').getvalue(), '')
)
collector_mock = patch.object(
KafkaConsumerLagCollector,
'run_command',
output_mock
)
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'dev_test_01.stage_nginx_access.nginx_access.0': 0,
'dev_test_01.stage_nginx_access.nginx_access.1': 2,
'dev_test_01.stage_nginx_access.nginx_access.2': 0,
'dev_test_01.stage_nginx_access.nginx_access.3': 0,
'dev_test_01.stage_nginx_access.nginx_access.4': 0,
'dev_test_01.stage_nginx_access.nginx_access.5': 0,
'dev_test_01.stage_nginx_access.nginx_access.6': 0,
'dev_test_01.stage_nginx_access.nginx_access.7': 52,
'dev_test_01.stage_nginx_access.nginx_access.8': 0,
'dev_test_01.stage_nginx_access.nginx_access.9': 0,
'dev_test_01.stage_nginx_access.nginx_access.10': 0
}
self.assertPublishedMany(publish_mock, metrics)
|
import copy
import time
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import cloud_tpu_models
from perfkitbenchmarker.linux_packages import nvidia_driver
from perfkitbenchmarker.linux_packages import tensorflow
from perfkitbenchmarker.providers.gcp import gcs
from perfkitbenchmarker.providers.gcp import util
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'mnist'
BENCHMARK_CONFIG = """
mnist:
description: Runs MNIST Benchmark.
vm_groups:
default:
os_type: ubuntu1604
vm_spec:
GCP:
machine_type: n1-standard-4
zone: us-east1-d
boot_disk_size: 200
AWS:
machine_type: p2.xlarge
zone: us-east-1
boot_disk_size: 200
Azure:
machine_type: Standard_NC6
zone: eastus
"""
GCP_ENV = 'PATH=/tmp/pkb/google-cloud-sdk/bin:$PATH'
flags.DEFINE_string('mnist_data_dir', None, 'mnist train file for tensorflow')
flags.DEFINE_string('imagenet_data_dir',
'gs://cloud-tpu-test-datasets/fake_imagenet',
'Directory where the input data is stored')
flags.DEFINE_string(
't2t_data_dir', None,
'Directory where the input data is stored for tensor2tensor')
flags.DEFINE_integer('imagenet_num_train_images', 1281167,
'Size of ImageNet training data set.')
flags.DEFINE_integer('imagenet_num_eval_images', 50000,
'Size of ImageNet validation data set.')
flags.DEFINE_integer('mnist_num_train_images', 55000,
'Size of MNIST training data set.')
flags.DEFINE_integer('mnist_num_eval_images', 5000,
'Size of MNIST validation data set.')
flags.DEFINE_integer('mnist_train_epochs', 37,
'Total number of training echos', lower_bound=1)
flags.DEFINE_integer(
'mnist_eval_epochs', 0,
'Total number of evaluation epochs. If `0`, evaluation '
'after training is skipped.')
flags.DEFINE_integer('tpu_iterations', 500,
'Number of iterations per TPU training loop.')
flags.DEFINE_integer('mnist_batch_size', 1024,
'Mini-batch size for the training. Note that this '
'is the global batch size and not the per-shard batch.')
flags.DEFINE_enum('tpu_precision', 'bfloat16', ['bfloat16', 'float32'],
'Precision to use')
EXAMPLES_PER_SECOND_PRECISION = 0.01
def GetConfig(user_config):
"""Load and return benchmark config.
Args:
user_config: user supplied configuration (flags and config file)
Returns:
loaded benchmark configuration
"""
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def _UpdateBenchmarkSpecWithFlags(benchmark_spec):
"""Update the benchmark_spec with supplied command line flags.
Args:
benchmark_spec: benchmark specification to update
"""
benchmark_spec.data_dir = FLAGS.mnist_data_dir
benchmark_spec.iterations = FLAGS.tpu_iterations
benchmark_spec.gcp_service_account = FLAGS.gcp_service_account
benchmark_spec.batch_size = FLAGS.mnist_batch_size
benchmark_spec.num_train_images = FLAGS.mnist_num_train_images
benchmark_spec.num_eval_images = FLAGS.mnist_num_eval_images
benchmark_spec.num_examples_per_epoch = (
float(benchmark_spec.num_train_images) / benchmark_spec.batch_size)
benchmark_spec.train_epochs = FLAGS.mnist_train_epochs
benchmark_spec.train_steps = int(
benchmark_spec.train_epochs * benchmark_spec.num_examples_per_epoch)
benchmark_spec.eval_epochs = FLAGS.mnist_eval_epochs
benchmark_spec.eval_steps = int(
benchmark_spec.eval_epochs * benchmark_spec.num_examples_per_epoch)
benchmark_spec.precision = FLAGS.tpu_precision
benchmark_spec.env_cmd = 'export PYTHONPATH=$PYTHONPATH:$PWD/tpu/models'
def Prepare(benchmark_spec):
"""Install and set up MNIST on the target vm.
Args:
benchmark_spec: The benchmark specification
"""
benchmark_spec.always_call_cleanup = True
_UpdateBenchmarkSpecWithFlags(benchmark_spec)
vm = benchmark_spec.vms[0]
if not benchmark_spec.tpus:
vm.Install('tensorflow')
vm.Install('cloud_tpu_models')
vm.Install('tensorflow_models')
if benchmark_spec.tpus:
storage_service = gcs.GoogleCloudStorageService()
benchmark_spec.storage_service = storage_service
bucket = 'pkb{}'.format(FLAGS.run_uri)
benchmark_spec.bucket = bucket
benchmark_spec.model_dir = 'gs://{}'.format(bucket)
location = benchmark_spec.tpu_groups['train'].GetZone()
storage_service.PrepareService(util.GetRegionFromZone(location))
storage_service.MakeBucket(bucket)
storage_service.ChmodBucket(benchmark_spec.gcp_service_account, 'W', bucket)
else:
benchmark_spec.model_dir = '/tmp'
def CreateMetadataDict(benchmark_spec):
"""Create metadata dict to be used in run results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
metadata dict
"""
metadata = {
'use_tpu': bool(benchmark_spec.tpus),
'data_dir': benchmark_spec.data_dir,
'model_dir': benchmark_spec.model_dir,
'train_steps': benchmark_spec.train_steps,
'eval_steps': benchmark_spec.eval_steps,
'commit': cloud_tpu_models.GetCommit(benchmark_spec.vms[0]),
'iterations': benchmark_spec.iterations,
'num_train_images': benchmark_spec.num_train_images,
'num_eval_images': benchmark_spec.num_eval_images,
'train_epochs': benchmark_spec.train_epochs,
'eval_epochs': benchmark_spec.eval_epochs,
'num_examples_per_epoch': benchmark_spec.num_examples_per_epoch,
'train_batch_size': benchmark_spec.batch_size,
'eval_batch_size': benchmark_spec.batch_size
}
if benchmark_spec.tpus:
metadata.update({
'train_tpu_num_shards':
benchmark_spec.tpu_groups['train'].GetNumShards(),
'train_tpu_accelerator_type':
benchmark_spec.tpu_groups['train'].GetAcceleratorType()
})
return metadata
def ExtractThroughput(regex, output, metadata, metric, unit):
"""Extract throughput from MNIST output.
Args:
regex: string. Regular expression.
output: MNIST output
metadata: dict. Additional metadata to include with the sample.
metric: string. Name of the metric within the benchmark.
unit: string. Units for 'value'.
Returns:
samples containing the throughput
"""
matches = regex_util.ExtractAllMatches(regex, output)
samples = []
for index, value in enumerate(matches):
metadata_with_index = copy.deepcopy(metadata)
metadata_with_index['index'] = index
samples.append(sample.Sample(metric, float(value), unit,
metadata_with_index))
return samples
def MakeSamplesFromTrainOutput(metadata, output, elapsed_seconds, step):
"""Create a sample containing training metrics.
Args:
metadata: dict contains all the metadata that reports.
output: string, command output
elapsed_seconds: float, elapsed seconds from saved checkpoint.
step: int, the global steps in the training process.
Example output:
perfkitbenchmarker/tests/linux_benchmarks/mnist_benchmark_test.py
Returns:
a Sample containing training metrics, current step, elapsed seconds
"""
samples = []
metadata_copy = metadata.copy()
metadata_copy['step'] = int(step)
metadata_copy['epoch'] = step / metadata['num_examples_per_epoch']
metadata_copy['elapsed_seconds'] = elapsed_seconds
get_mean = lambda matches: sum(float(x) for x in matches) / len(matches)
loss = get_mean(regex_util.ExtractAllMatches(
r'Loss for final step: (\d+\.\d+)', output))
samples.append(sample.Sample('Loss', float(loss), '', metadata_copy))
if 'global_step/sec: ' in output:
global_step_sec = get_mean(regex_util.ExtractAllMatches(
r'global_step/sec: (\S+)', output))
samples.append(sample.Sample(
'Global Steps Per Second', global_step_sec,
'global_steps/sec', metadata_copy))
examples_sec = global_step_sec * metadata['train_batch_size']
if 'examples/sec: ' in output:
examples_sec_log = get_mean(regex_util.ExtractAllMatches(
r'examples/sec: (\S+)', output))
precision = abs(examples_sec_log - examples_sec) / examples_sec_log
assert precision < EXAMPLES_PER_SECOND_PRECISION, 'examples/sec is wrong.'
examples_sec = examples_sec_log
samples.append(sample.Sample('Examples Per Second', examples_sec,
'examples/sec', metadata_copy))
return samples
def MakeSamplesFromEvalOutput(metadata, output, elapsed_seconds):
"""Create a sample containing evaluation metrics.
Args:
metadata: dict contains all the metadata that reports.
output: string, command output
elapsed_seconds: float, elapsed seconds from saved checkpoint.
Example output:
perfkitbenchmarker/tests/linux_benchmarks/mnist_benchmark_test.py
Returns:
a Sample containing evaluation metrics
"""
pattern = (r'Saving dict for global step \d+: accuracy = (\d+\.\d+), '
r'global_step = (\d+), loss = (\d+\.\d+)')
accuracy, step, loss = regex_util.ExtractAllMatches(pattern, output).pop()
metadata_copy = metadata.copy()
step = int(step)
metadata_copy['step'] = step
num_examples_per_epoch = metadata['num_examples_per_epoch']
metadata_copy['epoch'] = step / num_examples_per_epoch
metadata_copy['elapsed_seconds'] = elapsed_seconds
return [sample.Sample('Eval Loss', float(loss), '', metadata_copy),
sample.Sample('Accuracy', float(accuracy) * 100, '%', metadata_copy)]
def Run(benchmark_spec):
"""Run MNIST on the cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
_UpdateBenchmarkSpecWithFlags(benchmark_spec)
vm = benchmark_spec.vms[0]
if benchmark_spec.tpus:
mnist_benchmark_script = 'mnist_tpu.py'
mnist_benchmark_cmd = ('cd tpu/models && '
'export PYTHONPATH=$(pwd) && '
'cd official/mnist && '
'python {script} '
'--data_dir={data_dir} '
'--iterations={iterations} '
'--model_dir={model_dir} '
'--batch_size={batch_size}'.format(
script=mnist_benchmark_script,
data_dir=benchmark_spec.data_dir,
iterations=benchmark_spec.iterations,
model_dir=benchmark_spec.model_dir,
batch_size=benchmark_spec.batch_size))
else:
mnist_benchmark_script = 'mnist.py'
mnist_benchmark_cmd = ('cd models && '
'export PYTHONPATH=$(pwd) && '
'cd official/mnist && '
'python {script} '
'--data_dir={data_dir} '
'--model_dir={model_dir} '
'--batch_size={batch_size} '.format(
script=mnist_benchmark_script,
data_dir=benchmark_spec.data_dir,
model_dir=benchmark_spec.model_dir,
batch_size=benchmark_spec.batch_size))
if nvidia_driver.CheckNvidiaGpuExists(vm):
mnist_benchmark_cmd = '{env} {cmd}'.format(
env=tensorflow.GetEnvironmentVars(vm), cmd=mnist_benchmark_cmd)
samples = []
metadata = CreateMetadataDict(benchmark_spec)
if benchmark_spec.train_steps > 0:
if benchmark_spec.tpus:
tpu = benchmark_spec.tpu_groups['train'].GetName()
num_shards = '--num_shards={}'.format(
benchmark_spec.tpu_groups['train'].GetNumShards())
else:
tpu = num_shards = ''
if benchmark_spec.tpus:
mnist_benchmark_train_cmd = (
'{cmd} --tpu={tpu} --use_tpu={use_tpu} --train_steps={train_steps} '
'{num_shards} --noenable_predict'.format(
cmd=mnist_benchmark_cmd,
tpu=tpu,
use_tpu=bool(benchmark_spec.tpus),
train_steps=benchmark_spec.train_steps,
num_shards=num_shards))
else:
mnist_benchmark_train_cmd = (
'{cmd} --train_epochs={train_epochs} '.format(
cmd=mnist_benchmark_cmd,
train_epochs=benchmark_spec.train_epochs))
start = time.time()
stdout, stderr = vm.RobustRemoteCommand(mnist_benchmark_train_cmd,
should_log=True)
elapsed_seconds = (time.time() - start)
samples.extend(MakeSamplesFromTrainOutput(
metadata, stdout + stderr, elapsed_seconds, benchmark_spec.train_steps))
if benchmark_spec.eval_steps > 0:
if benchmark_spec.tpus:
mnist_benchmark_eval_cmd = (
'{cmd} --tpu={tpu} --use_tpu={use_tpu} --eval_steps={eval_steps}'
.format(
cmd=mnist_benchmark_cmd,
use_tpu=bool(benchmark_spec.tpus),
tpu=benchmark_spec.tpu_groups['eval'].GetName(),
eval_steps=benchmark_spec.eval_steps))
else:
mnist_benchmark_eval_cmd = ('{cmd} --eval_steps={eval_steps}'.format(
cmd=mnist_benchmark_cmd, eval_steps=benchmark_spec.eval_steps))
stdout, stderr = vm.RobustRemoteCommand(mnist_benchmark_eval_cmd,
should_log=True)
samples.extend(MakeSamplesFromEvalOutput(metadata, stdout + stderr,
elapsed_seconds))
return samples
def Cleanup(benchmark_spec):
"""Cleanup MNIST on the cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
if benchmark_spec.tpus:
benchmark_spec.storage_service.DeleteBucket(benchmark_spec.bucket)
|
import random
import pandas as pd
from arctic import Arctic
def gen_dataframe_random(cols, rows):
c = {}
for col in range(cols):
c[str(col)] = [round(random.uniform(-10000.0, 10000.0), 1) for r in range(rows)]
index = [range(rows)]
return pd.DataFrame(data=c, index=index)
def gen_series_random(rows):
col = [round(random.uniform(-10000.0, 10000.0), 1) for r in range(rows)]
return pd.Series(data=col, index=list(range(rows)))
def gen_dataframe_compressible(cols, rows):
row = [round(random.uniform(-100.0, 100.0), 1) for r in range(cols)]
data = [row] * rows
index = [range(rows)]
return pd.DataFrame(data=data, index=index)
def gen_series_compressible(rows):
d = round(random.uniform(-100.0, 100.0), 1)
data = [d] * rows
index = [range(rows)]
return pd.Series(data=data, index=index)
TEST_SIZES = [1000, 10000, 100000, 1000000]
df_random = [gen_dataframe_random(5, rows) for rows in TEST_SIZES]
s_random = [gen_series_random(5 * rows) for rows in TEST_SIZES]
df_compress = [gen_dataframe_compressible(10, rows) for rows in TEST_SIZES]
s_compress = [gen_series_compressible(rows) for rows in TEST_SIZES]
class TimeSuiteWrite(object):
params = list(range(len(TEST_SIZES)))
param_names = ['5K * 10^']
def setup(self, arg):
self.store = Arctic("127.0.0.1")
self.store.delete_library('test.lib')
self.store.initialize_library('test.lib')
self.lib = self.store['test.lib']
def teardown(self, arg):
self.store.delete_library('test.lib')
self.lib = None
def time_write_dataframe_random(self, idx):
self.lib.write('df_bench_random', df_random[idx])
def time_write_series_random(self, idx):
self.lib.write('series_bench_random', s_random[idx])
def time_write_dataframe_compressible(self, idx):
self.lib.write('df_bench_compressible', df_compress[idx])
def time_write_series_compressible(self, idx):
self.lib.write('series_bench_compressible', s_compress[idx])
class TimeSuiteRead(object):
params = list(range(len(TEST_SIZES)))
param_names = ['5K * 10^']
def __init__(self):
self.store = Arctic("127.0.0.1")
def setup(self, idx):
self.store.delete_library('test.lib')
self.store.initialize_library('test.lib')
self.lib = self.store['test.lib']
self.lib.write('test_df', df_random[idx])
def teardown(self, arg):
self.store.delete_library('test.lib')
self.lib = None
def time_read_dataframe(self, idx):
self.lib.read('test_df')
class TimeSuiteAppend(object):
params = list(range(len(TEST_SIZES)))
param_names = ['5K * 10^']
def __init__(self):
self.store = Arctic("127.0.0.1")
def setup(self, idx):
self.store.delete_library('test.lib')
self.store.initialize_library('test.lib')
self.lib = self.store['test.lib']
self.lib.write('test_df', df_random[idx])
def teardown(self, arg):
self.store.delete_library('test.lib')
self.lib = None
def time_append_dataframe(self, idx):
self.lib.append('test_df', df_random[idx])
|
import pytest
from qutebrowser.utils import usertypes
from qutebrowser.misc import objects
from qutebrowser.misc.keyhintwidget import KeyHintView
def expected_text(*args):
"""Helper to format text we expect the KeyHintView to generate.
Args:
args: One tuple for each row in the expected output.
Tuples are of the form: (prefix, color, suffix, command).
"""
text = '<table>'
for group in args:
text += ("<tr>"
"<td>{}</td>"
"<td style='color: {}'>{}</td>"
"<td style='padding-left: 2ex'>{}</td>"
"</tr>").format(*group)
return text + '</table>'
@pytest.fixture
def keyhint(qtbot, config_stub, key_config_stub):
"""Fixture to initialize a KeyHintView."""
config_stub.val.colors.keyhint.suffix.fg = 'yellow'
keyhint = KeyHintView(0, None)
qtbot.add_widget(keyhint)
assert keyhint.text() == ''
return keyhint
def test_show_and_hide(qtbot, keyhint):
with qtbot.waitSignal(keyhint.update_geometry):
with qtbot.waitExposed(keyhint):
keyhint.show()
keyhint.update_keyhint(usertypes.KeyMode.normal, '')
assert not keyhint.isVisible()
def test_position_change(keyhint, config_stub):
config_stub.val.statusbar.position = 'top'
stylesheet = keyhint.styleSheet()
assert 'border-bottom-right-radius' in stylesheet
assert 'border-top-right-radius' not in stylesheet
def test_suggestions(keyhint, config_stub):
"""Test that keyhints are shown based on a prefix."""
bindings = {'normal': {
'aa': 'message-info cmd-aa',
'ab': 'message-info cmd-ab',
'aba': 'message-info cmd-aba',
'abb': 'message-info cmd-abb',
'xd': 'message-info cmd-xd',
'xe': 'message-info cmd-xe',
}}
default_bindings = {'normal': {
'ac': 'message-info cmd-ac',
}}
config_stub.val.bindings.default = default_bindings
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, 'a')
assert keyhint.text() == expected_text(
('a', 'yellow', 'a', 'message-info cmd-aa'),
('a', 'yellow', 'b', 'message-info cmd-ab'),
('a', 'yellow', 'ba', 'message-info cmd-aba'),
('a', 'yellow', 'bb', 'message-info cmd-abb'),
('a', 'yellow', 'c', 'message-info cmd-ac'))
def test_suggestions_special(keyhint, config_stub):
"""Test that special characters work properly as prefix."""
bindings = {'normal': {
'<Ctrl-C>a': 'message-info cmd-Cca',
'<Ctrl-C><Ctrl-C>': 'message-info cmd-CcCc',
'<Ctrl-C><Ctrl-X>': 'message-info cmd-CcCx',
'cbb': 'message-info cmd-cbb',
'xd': 'message-info cmd-xd',
'xe': 'message-info cmd-xe',
}}
default_bindings = {'normal': {
'<Ctrl-C>c': 'message-info cmd-Ccc',
}}
config_stub.val.bindings.default = default_bindings
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, '<Ctrl+c>')
assert keyhint.text() == expected_text(
('<Ctrl+c>', 'yellow', 'a', 'message-info cmd-Cca'),
('<Ctrl+c>', 'yellow', 'c', 'message-info cmd-Ccc'),
('<Ctrl+c>', 'yellow', '<Ctrl+c>',
'message-info cmd-CcCc'),
('<Ctrl+c>', 'yellow', '<Ctrl+x>',
'message-info cmd-CcCx'))
def test_suggestions_with_count(keyhint, config_stub, monkeypatch, stubs):
"""Test that a count prefix filters out commands that take no count."""
monkeypatch.setattr(objects, 'commands', {
'foo': stubs.FakeCommand(name='foo', takes_count=lambda: False),
'bar': stubs.FakeCommand(name='bar', takes_count=lambda: True),
})
bindings = {'normal': {'aa': 'foo', 'ab': 'bar'}}
config_stub.val.bindings.default = bindings
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, '2a')
assert keyhint.text() == expected_text(
('a', 'yellow', 'b', 'bar'),
)
def test_special_bindings(keyhint, config_stub):
"""Ensure a prefix of '<' doesn't suggest special keys."""
bindings = {'normal': {
'<a': 'message-info cmd-<a',
'<b': 'message-info cmd-<b',
'<ctrl-a>': 'message-info cmd-ctrla',
}}
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, '<')
assert keyhint.text() == expected_text(
('<', 'yellow', 'a', 'message-info cmd-<a'),
('<', 'yellow', 'b', 'message-info cmd-<b'))
def test_color_switch(keyhint, config_stub):
"""Ensure the keyhint suffix color can be updated at runtime."""
bindings = {'normal': {'aa': 'message-info cmd-aa'}}
config_stub.val.colors.keyhint.suffix.fg = '#ABCDEF'
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, 'a')
assert keyhint.text() == expected_text(('a', '#ABCDEF', 'a',
'message-info cmd-aa'))
def test_no_matches(keyhint, config_stub):
"""Ensure the widget isn't visible if there are no keystrings to show."""
bindings = {'normal': {
'aa': 'message-info cmd-aa',
'ab': 'message-info cmd-ab',
}}
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, 'z')
assert not keyhint.text()
assert not keyhint.isVisible()
@pytest.mark.parametrize('blacklist, expected', [
(['ab*'], expected_text(('a', 'yellow', 'a', 'message-info cmd-aa'))),
(['*'], ''),
])
def test_blacklist(keyhint, config_stub, blacklist, expected):
"""Test that blacklisted keychains aren't hinted."""
config_stub.val.keyhint.blacklist = blacklist
bindings = {'normal': {
'aa': 'message-info cmd-aa',
'ab': 'message-info cmd-ab',
'aba': 'message-info cmd-aba',
'abb': 'message-info cmd-abb',
'xd': 'message-info cmd-xd',
'xe': 'message-info cmd-xe',
}}
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint(usertypes.KeyMode.normal, 'a')
assert keyhint.text() == expected
def test_delay(qtbot, stubs, monkeypatch, config_stub, key_config_stub):
timer = stubs.FakeTimer()
monkeypatch.setattr(
'qutebrowser.misc.keyhintwidget.usertypes.Timer',
lambda *_: timer)
interval = 200
bindings = {'normal': {'aa': 'message-info cmd-aa'}}
config_stub.val.keyhint.delay = interval
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint = KeyHintView(0, None)
keyhint.update_keyhint(usertypes.KeyMode.normal, 'a')
assert timer.isSingleShot()
assert timer.interval() == interval
|
from appconf import AppConf
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.urls import reverse
from django.utils.functional import cached_property
from weblate.addons.events import (
EVENT_CHOICES,
EVENT_COMPONENT_UPDATE,
EVENT_POST_ADD,
EVENT_POST_COMMIT,
EVENT_POST_PUSH,
EVENT_POST_UPDATE,
EVENT_PRE_COMMIT,
EVENT_PRE_PUSH,
EVENT_PRE_UPDATE,
EVENT_STORE_POST_LOAD,
EVENT_UNIT_POST_SAVE,
EVENT_UNIT_PRE_CREATE,
)
from weblate.trans.models import Component, Unit
from weblate.trans.signals import (
component_post_update,
store_post_load,
translation_post_add,
unit_pre_create,
vcs_post_commit,
vcs_post_push,
vcs_post_update,
vcs_pre_commit,
vcs_pre_push,
vcs_pre_update,
)
from weblate.utils.classloader import ClassLoader
from weblate.utils.decorators import disable_for_loaddata
from weblate.utils.errors import report_error
from weblate.utils.fields import JSONField
# Initialize addons registry
ADDONS = ClassLoader("WEBLATE_ADDONS", False)
class AddonQuerySet(models.QuerySet):
def filter_component(self, component):
return self.prefetch_related("event_set").filter(
(Q(component=component) & Q(project_scope=False))
| (Q(component__project=component.project) & Q(project_scope=True))
| (Q(component__linked_component=component) & Q(repo_scope=True))
| (Q(component=component.linked_component) & Q(repo_scope=True))
)
def filter_event(self, component, event):
return component.addons_cache[event]
class Addon(models.Model):
component = models.ForeignKey(Component, on_delete=models.deletion.CASCADE)
name = models.CharField(max_length=100)
configuration = JSONField()
state = JSONField()
project_scope = models.BooleanField(default=False, db_index=True)
repo_scope = models.BooleanField(default=False, db_index=True)
objects = AddonQuerySet.as_manager()
class Meta:
verbose_name = "add-on"
verbose_name_plural = "add-ons"
def __str__(self):
return f"{self.addon.verbose}: {self.component}"
def get_absolute_url(self):
return reverse(
"addon-detail",
kwargs={
"project": self.component.project.slug,
"component": self.component.slug,
"pk": self.pk,
},
)
def configure_events(self, events):
for event in events:
Event.objects.get_or_create(addon=self, event=event)
self.event_set.exclude(event__in=events).delete()
@cached_property
def addon(self):
return ADDONS[self.name](self)
def delete(self, *args, **kwargs):
# Delete any addon alerts
if self.addon.alert:
self.component.delete_alert(self.addon.alert)
super().delete(*args, **kwargs)
class Event(models.Model):
addon = models.ForeignKey(Addon, on_delete=models.deletion.CASCADE)
event = models.IntegerField(choices=EVENT_CHOICES)
class Meta:
unique_together = ("addon", "event")
verbose_name = "add-on event"
verbose_name_plural = "add-on events"
def __str__(self):
return f"{self.addon}: {self.get_event_display()}"
class AddonsConf(AppConf):
WEBLATE_ADDONS = (
"weblate.addons.gettext.GenerateMoAddon",
"weblate.addons.gettext.UpdateLinguasAddon",
"weblate.addons.gettext.UpdateConfigureAddon",
"weblate.addons.gettext.MsgmergeAddon",
"weblate.addons.gettext.GettextCustomizeAddon",
"weblate.addons.gettext.GettextAuthorComments",
"weblate.addons.cleanup.CleanupAddon",
"weblate.addons.cleanup.RemoveBlankAddon",
"weblate.addons.consistency.LangaugeConsistencyAddon",
"weblate.addons.discovery.DiscoveryAddon",
"weblate.addons.autotranslate.AutoTranslateAddon",
"weblate.addons.flags.SourceEditAddon",
"weblate.addons.flags.TargetEditAddon",
"weblate.addons.flags.SameEditAddon",
"weblate.addons.flags.BulkEditAddon",
"weblate.addons.generate.GenerateFileAddon",
"weblate.addons.json.JSONCustomizeAddon",
"weblate.addons.properties.PropertiesSortAddon",
"weblate.addons.git.GitSquashAddon",
"weblate.addons.removal.RemoveComments",
"weblate.addons.removal.RemoveSuggestions",
"weblate.addons.resx.ResxUpdateAddon",
"weblate.addons.yaml.YAMLCustomizeAddon",
"weblate.addons.cdn.CDNJSAddon",
)
LOCALIZE_CDN_URL = None
LOCALIZE_CDN_PATH = None
class Meta:
prefix = ""
def handle_addon_error(addon, component):
report_error(cause="addon error")
# Uninstall no longer compatible addons
if not addon.addon.can_install(component, None):
component.log_warning("disabling no longer compatible addon: %s", addon.name)
addon.delete()
@receiver(vcs_pre_push)
def pre_push(sender, component, **kwargs):
for addon in Addon.objects.filter_event(component, EVENT_PRE_PUSH):
component.log_debug("running pre_push addon: %s", addon.name)
try:
addon.addon.pre_push(component)
except Exception:
handle_addon_error(addon, component)
@receiver(vcs_post_push)
def post_push(sender, component, **kwargs):
for addon in Addon.objects.filter_event(component, EVENT_POST_PUSH):
component.log_debug("running post_push addon: %s", addon.name)
try:
addon.addon.post_push(component)
except Exception:
handle_addon_error(addon, component)
@receiver(vcs_post_update)
def post_update(
sender,
component,
previous_head: str,
child: bool = False,
skip_push: bool = False,
**kwargs,
):
for addon in Addon.objects.filter_event(component, EVENT_POST_UPDATE):
if child and addon.repo_scope:
continue
component.log_debug("running post_update addon: %s", addon.name)
try:
addon.addon.post_update(component, previous_head, skip_push)
except Exception:
handle_addon_error(addon, component)
@receiver(component_post_update)
def component_update(sender, component, **kwargs):
for addon in Addon.objects.filter_event(component, EVENT_COMPONENT_UPDATE):
component.log_debug("running component_update addon: %s", addon.name)
try:
addon.addon.component_update(component)
except Exception:
handle_addon_error(addon, component)
@receiver(vcs_pre_update)
def pre_update(sender, component, **kwargs):
for addon in Addon.objects.filter_event(component, EVENT_PRE_UPDATE):
component.log_debug("running pre_update addon: %s", addon.name)
try:
addon.addon.pre_update(component)
except Exception:
handle_addon_error(addon, component)
@receiver(vcs_pre_commit)
def pre_commit(sender, translation, author, **kwargs):
addons = Addon.objects.filter_event(translation.component, EVENT_PRE_COMMIT)
for addon in addons:
translation.log_debug("running pre_commit addon: %s", addon.name)
try:
addon.addon.pre_commit(translation, author)
except Exception:
handle_addon_error(addon, translation.component)
@receiver(vcs_post_commit)
def post_commit(sender, component, **kwargs):
addons = Addon.objects.filter_event(component, EVENT_POST_COMMIT)
for addon in addons:
component.log_debug("running post_commit addon: %s", addon.name)
try:
addon.addon.post_commit(component)
except Exception:
handle_addon_error(addon, component)
@receiver(translation_post_add)
def post_add(sender, translation, **kwargs):
addons = Addon.objects.filter_event(translation.component, EVENT_POST_ADD)
for addon in addons:
translation.log_debug("running post_add addon: %s", addon.name)
try:
addon.addon.post_add(translation)
except Exception:
handle_addon_error(addon, translation.component)
@receiver(unit_pre_create)
def unit_pre_create_handler(sender, unit, **kwargs):
addons = Addon.objects.filter_event(
unit.translation.component, EVENT_UNIT_PRE_CREATE
)
for addon in addons:
unit.translation.log_debug("running unit_pre_create addon: %s", addon.name)
try:
addon.addon.unit_pre_create(unit)
except Exception:
handle_addon_error(addon, unit.translation.component)
@receiver(post_save, sender=Unit)
@disable_for_loaddata
def unit_post_save_handler(sender, instance, created, **kwargs):
addons = Addon.objects.filter_event(
instance.translation.component, EVENT_UNIT_POST_SAVE
)
for addon in addons:
instance.translation.log_debug("running unit_post_save addon: %s", addon.name)
try:
addon.addon.unit_post_save(instance, created)
except Exception:
handle_addon_error(addon, instance.translation.component)
@receiver(store_post_load)
def store_post_load_handler(sender, translation, store, **kwargs):
addons = Addon.objects.filter_event(translation.component, EVENT_STORE_POST_LOAD)
for addon in addons:
translation.log_debug("running store_post_load addon: %s", addon.name)
try:
addon.addon.store_post_load(translation, store)
except Exception:
handle_addon_error(addon, translation.component)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import functools
import logging
import os
import posixpath
import threading
import time
import uuid
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import events
from perfkitbenchmarker import vm_util
import six
FLAGS = flags.FLAGS
def Register(parsed_flags):
"""Registers the collector if FLAGS.<collector> is set.
See dstat.py for an example on how to register a collector.
Args:
parsed_flags: argument passed into each call to Register()
"""
del parsed_flags # unused
class BaseCollector(object):
"""Object representing a Base Collector.
A Collector is a utility that is ran alongside benchmarks to record stats
at various points when running a benchmark. A Base collector is an abstract
class with common routines that derived collectors use.
"""
def __init__(self, interval=None, output_directory=None):
"""Runs collector on 'vms'.
Start collector collection via `Start`. Stop via `Stop`.
Args:
interval: Optional int. Interval in seconds in which to collect samples.
output_directory: Optional directory where to save collection output.
Raises:
IOError: for when the output directory doesn't exist.
"""
self.interval = interval
self.output_directory = output_directory or vm_util.GetTempDir()
self._lock = threading.Lock()
self._pid_files = {}
self._role_mapping = {} # mapping vm role to output file
self._start_time = 0
if not os.path.isdir(self.output_directory):
raise IOError('collector output directory does not exist: {0}'.format(
self.output_directory))
@abc.abstractmethod
def _CollectorName(self):
pass
@abc.abstractmethod
def _InstallCollector(self, vm):
pass
@abc.abstractmethod
def _CollectorRunCommand(self, vm, collector_file):
pass
def _KillCommand(self, pid):
"""Command to kill off the collector."""
return 'kill {0}'.format(pid)
def _StartOnVm(self, vm, suffix=''):
"""Start collector, having it write to an output file."""
self._InstallCollector(vm)
suffix = '{0}-{1}'.format(suffix, self._CollectorName())
collector_file = posixpath.join(
vm_util.VM_TMP_DIR, '{0}{1}.stdout'.format(vm.name, suffix))
cmd = self._CollectorRunCommand(vm, collector_file)
stdout, _ = vm.RemoteCommand(cmd)
with self._lock:
self._pid_files[vm.name] = (stdout.strip(), collector_file)
def _StopOnVm(self, vm, vm_role):
"""Stop collector on 'vm' and copy the files back."""
if vm.name not in self._pid_files:
logging.warn('No collector PID for %s', vm.name)
return
else:
with self._lock:
pid, file_name = self._pid_files.pop(vm.name)
vm.RemoteCommand(self._KillCommand(pid), ignore_failure=True)
try:
vm.PullFile(self.output_directory, file_name)
self._role_mapping[vm_role] = file_name
except errors.VirtualMachine.RemoteCommandError as ex:
logging.exception('Failed fetching collector result from %s.', vm.name)
raise ex
def Start(self, sender, benchmark_spec):
"""Install and start collector on all VMs in 'benchmark_spec'."""
suffix = '-{0}-{1}'.format(benchmark_spec.uid, str(uuid.uuid4())[:8])
self.StartOnVms(sender, benchmark_spec.vms, suffix)
def StartOnVms(self, sender, vms, id_suffix):
"""Install and start collector on given subset of vms.
Args:
sender: sender of the request/event to start collector.
vms: vms to run the collector on.
id_suffix: id_suffix of the collector output file.
"""
del sender # unused
func = functools.partial(self._StartOnVm, suffix=id_suffix)
vm_util.RunThreaded(func, vms)
self._start_time = time.time()
return
def Stop(self, sender, benchmark_spec, name=''):
"""Stop collector on all VMs in 'benchmark_spec', fetch results."""
self.StopOnVms(sender, benchmark_spec.vm_groups, name)
def StopOnVms(self, sender, vm_groups, name):
"""Stop collector on given subset of vms, fetch results.
Args:
sender: sender of the event to stop the collector.
vm_groups: vm_groups to stop the collector on.
name: name of event to be stopped.
"""
events.record_event.send(sender, event=name,
start_timestamp=self._start_time,
end_timestamp=time.time(),
metadata={})
args = []
for role, vms in six.iteritems(vm_groups):
args.extend([((
vm, '%s_%s' % (role, idx)), {}) for idx, vm in enumerate(vms)])
vm_util.RunThreaded(self._StopOnVm, args)
return
@abc.abstractmethod
def Analyze(self, sender, benchmark_spec, samples):
"""Analyze collector file and record samples."""
pass
|
import os
import re
import sys
from collections import namedtuple # noqa
__version__ = '5.0.2'
__author__ = 'Ask Solem'
__contact__ = '[email protected], [email protected]'
__homepage__ = 'https://kombu.readthedocs.io'
__docformat__ = 'restructuredtext en'
# -eof meta-
version_info_t = namedtuple('version_info_t', (
'major', 'minor', 'micro', 'releaselevel', 'serial',
))
# bumpversion can only search for {current_version}
# so we have to parse the version here.
_temp = re.match(
r'(\d+)\.(\d+).(\d+)(.+)?', __version__).groups()
VERSION = version_info = version_info_t(
int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or '', '')
del(_temp)
del(re)
STATICA_HACK = True
globals()['kcah_acitats'[::-1].upper()] = False
if STATICA_HACK: # pragma: no cover
# This is never executed, but tricks static analyzers (PyDev, PyCharm,
# pylint, etc.) into knowing the types of these symbols, and what
# they contain.
from kombu.connection import Connection, BrokerConnection # noqa
from kombu.entity import Exchange, Queue, binding # noqa
from kombu.message import Message # noqa
from kombu.messaging import Consumer, Producer # noqa
from kombu.pools import connections, producers # noqa
from kombu.utils.url import parse_url # noqa
from kombu.common import eventloop, uuid # noqa
from kombu.serialization import ( # noqa
enable_insecure_serializers,
disable_insecure_serializers,
)
# Lazy loading.
# - See werkzeug/__init__.py for the rationale behind this.
from types import ModuleType # noqa
all_by_module = {
'kombu.connection': ['Connection', 'BrokerConnection'],
'kombu.entity': ['Exchange', 'Queue', 'binding'],
'kombu.message': ['Message'],
'kombu.messaging': ['Consumer', 'Producer'],
'kombu.pools': ['connections', 'producers'],
'kombu.utils.url': ['parse_url'],
'kombu.common': ['eventloop', 'uuid'],
'kombu.serialization': [
'enable_insecure_serializers',
'disable_insecure_serializers',
],
}
object_origins = {}
for module, items in all_by_module.items():
for item in items:
object_origins[item] = module
class module(ModuleType):
"""Customized Python module."""
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
result = list(new_module.__all__)
result.extend(('__file__', '__path__', '__doc__', '__all__',
'__docformat__', '__name__', '__path__', 'VERSION',
'__package__', '__version__', '__author__',
'__contact__', '__homepage__', '__docformat__'))
return result
# 2.5 does not define __package__
try:
package = __package__
except NameError: # pragma: no cover
package = 'kombu'
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules[__name__]
new_module = sys.modules[__name__] = module(__name__)
new_module.__dict__.update({
'__file__': __file__,
'__path__': __path__,
'__doc__': __doc__,
'__all__': tuple(object_origins),
'__version__': __version__,
'__author__': __author__,
'__contact__': __contact__,
'__homepage__': __homepage__,
'__docformat__': __docformat__,
'__package__': package,
'version_info_t': version_info_t,
'version_info': version_info,
'VERSION': VERSION
})
if os.environ.get('KOMBU_LOG_DEBUG'): # pragma: no cover
os.environ.update(KOMBU_LOG_CHANNEL='1', KOMBU_LOG_CONNECTION='1')
from .utils import debug
debug.setup_logging()
|
from __future__ import unicode_literals
import unittest
import codecs
import os,sys,inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
from learn_bpe import learn_bpe
from apply_bpe import BPE
class TestBPELearnMethod(unittest.TestCase):
def test_learn_bpe(self):
infile = codecs.open(os.path.join(currentdir,'data','corpus.en'), encoding='utf-8')
outfile = codecs.open(os.path.join(currentdir,'data','bpe.out'), 'w', encoding='utf-8')
learn_bpe(infile, outfile, 1000)
infile.close()
outfile.close()
outlines = open(os.path.join(currentdir,'data','bpe.out'))
reflines = open(os.path.join(currentdir,'data','bpe.ref'))
for line, line2 in zip(outlines, reflines):
self.assertEqual(line, line2)
outlines.close()
reflines.close()
class TestBPESegmentMethod(unittest.TestCase):
def setUp(self):
with codecs.open(os.path.join(currentdir,'data','bpe.ref'), encoding='utf-8') as bpefile:
self.bpe = BPE(bpefile)
self.infile = codecs.open(os.path.join(currentdir,'data','corpus.en'), encoding='utf-8')
self.reffile = codecs.open(os.path.join(currentdir,'data','corpus.bpe.ref.en'), encoding='utf-8')
def tearDown(self):
self.infile.close()
self.reffile.close()
def test_apply_bpe(self):
for line, ref in zip(self.infile, self.reffile):
out = self.bpe.process_line(line)
self.assertEqual(out, ref)
def test_trailing_whitespace(self):
"""BPE.proces_line() preserves leading and trailing whitespace"""
orig = ' iron cement \n'
exp = ' ir@@ on c@@ ement \n'
out = self.bpe.process_line(orig)
self.assertEqual(out, exp)
def test_utf8_whitespace(self):
"""UTF-8 whitespace is treated as normal character, not word boundary"""
orig = 'iron\xa0cement\n'
exp = 'ir@@ on@@ \xa0@@ c@@ ement\n'
out = self.bpe.process_line(orig)
self.assertEqual(out, exp)
def test_empty_line(self):
orig = '\n'
exp = '\n'
out = self.bpe.process_line(orig)
self.assertEqual(out, exp)
if __name__ == '__main__':
unittest.main()
|
from typing import List, Optional
import voluptuous as vol
from homeassistant.components.device_automation import toggle_entity
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from . import DOMAIN, const
SET_HUMIDITY_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): "set_humidity",
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
vol.Required(const.ATTR_HUMIDITY): vol.Coerce(int),
}
)
SET_MODE_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): "set_mode",
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
vol.Required(const.ATTR_MODE): cv.string,
}
)
ONOFF_SCHEMA = toggle_entity.ACTION_SCHEMA.extend({vol.Required(CONF_DOMAIN): DOMAIN})
ACTION_SCHEMA = vol.Any(SET_HUMIDITY_SCHEMA, SET_MODE_SCHEMA, ONOFF_SCHEMA)
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device actions for Humidifier devices."""
registry = await entity_registry.async_get_registry(hass)
actions = await toggle_entity.async_get_actions(hass, device_id, DOMAIN)
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
state = hass.states.get(entry.entity_id)
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "set_humidity",
}
)
# We need a state or else we can't populate the available modes.
if state is None:
continue
if state.attributes[ATTR_SUPPORTED_FEATURES] & const.SUPPORT_MODES:
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "set_mode",
}
)
return actions
async def async_call_action_from_config(
hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context]
) -> None:
"""Execute a device action."""
config = ACTION_SCHEMA(config)
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
if config[CONF_TYPE] == "set_humidity":
service = const.SERVICE_SET_HUMIDITY
service_data[const.ATTR_HUMIDITY] = config[const.ATTR_HUMIDITY]
elif config[CONF_TYPE] == "set_mode":
service = const.SERVICE_SET_MODE
service_data[const.ATTR_MODE] = config[const.ATTR_MODE]
else:
return await toggle_entity.async_call_action_from_config(
hass, config, variables, context, DOMAIN
)
await hass.services.async_call(
DOMAIN, service, service_data, blocking=True, context=context
)
async def async_get_action_capabilities(hass, config):
"""List action capabilities."""
state = hass.states.get(config[CONF_ENTITY_ID])
action_type = config[CONF_TYPE]
fields = {}
if action_type == "set_humidity":
fields[vol.Required(const.ATTR_HUMIDITY)] = vol.Coerce(int)
elif action_type == "set_mode":
if state:
available_modes = state.attributes.get(const.ATTR_AVAILABLE_MODES, [])
else:
available_modes = []
fields[vol.Required(const.ATTR_MODE)] = vol.In(available_modes)
else:
return {}
return {"extra_fields": vol.Schema(fields)}
|
import numpy as np
import unittest
from chainer import testing
from chainercv.transforms import crop_bbox
class TestCropBbox(unittest.TestCase):
def setUp(self):
self.bbox = np.array((
(0, 0, 3, 4),
(0, 0, 5, 6),
(0, 5, 3, 6),
(1, 2, 3, 4),
(3, 3, 4, 6),
), dtype=np.float32)
self.y_slice = slice(1, 5)
self.x_slice = slice(0, 4)
def test_crop_bbox(self):
expected = np.array((
(0, 0, 2, 4),
(0, 0, 4, 4),
(0, 2, 2, 4),
(2, 3, 3, 4),
), dtype=np.float32)
out, param = crop_bbox(
self.bbox, y_slice=self.y_slice, x_slice=self.x_slice,
return_param=True)
np.testing.assert_equal(out, expected)
np.testing.assert_equal(param['index'], (0, 1, 3, 4))
np.testing.assert_equal(param['truncated_index'], (0, 1, 3))
def test_crop_bbox_disallow_outside_center(self):
expected = np.array((
(0, 0, 2, 4),
(0, 0, 4, 4),
(0, 2, 2, 4),
), dtype=np.float32)
out, param = crop_bbox(
self.bbox, y_slice=self.y_slice, x_slice=self.x_slice,
allow_outside_center=False, return_param=True)
np.testing.assert_equal(out, expected)
np.testing.assert_equal(param['index'], (0, 1, 3))
np.testing.assert_equal(param['truncated_index'], (0, 1))
testing.run_module(__name__, __file__)
|
import unittest
import numpy as np
from pgmpy.models import ClusterGraph
from pgmpy.tests import help_functions as hf
from pgmpy.factors.discrete import DiscreteFactor
class TestClusterGraphCreation(unittest.TestCase):
def setUp(self):
self.graph = ClusterGraph()
def test_add_single_node(self):
self.graph.add_node(("a", "b"))
self.assertListEqual(list(self.graph.nodes()), [("a", "b")])
def test_add_single_node_raises_error(self):
self.assertRaises(TypeError, self.graph.add_node, "a")
def test_add_multiple_nodes(self):
self.graph.add_nodes_from([("a", "b"), ("b", "c")])
self.assertListEqual(
hf.recursive_sorted(self.graph.nodes()), [["a", "b"], ["b", "c"]]
)
def test_add_single_edge(self):
self.graph.add_edge(("a", "b"), ("b", "c"))
self.assertListEqual(
hf.recursive_sorted(self.graph.nodes()), [["a", "b"], ["b", "c"]]
)
self.assertListEqual(
sorted([node for edge in self.graph.edges() for node in edge]),
[("a", "b"), ("b", "c")],
)
def test_add_single_edge_raises_error(self):
self.assertRaises(ValueError, self.graph.add_edge, ("a", "b"), ("c", "d"))
def tearDown(self):
del self.graph
class TestClusterGraphFactorOperations(unittest.TestCase):
def setUp(self):
self.graph = ClusterGraph()
def test_add_single_factor(self):
self.graph.add_node(("a", "b"))
phi1 = DiscreteFactor(["a", "b"], [2, 2], np.random.rand(4))
self.graph.add_factors(phi1)
self.assertCountEqual(self.graph.factors, [phi1])
def test_add_single_factor_raises_error(self):
self.graph.add_node(("a", "b"))
phi1 = DiscreteFactor(["b", "c"], [2, 2], np.random.rand(4))
self.assertRaises(ValueError, self.graph.add_factors, phi1)
def test_add_multiple_factors(self):
self.graph.add_edges_from([[("a", "b"), ("b", "c")]])
phi1 = DiscreteFactor(["a", "b"], [2, 2], np.random.rand(4))
phi2 = DiscreteFactor(["b", "c"], [2, 2], np.random.rand(4))
self.graph.add_factors(phi1, phi2)
self.assertCountEqual(self.graph.factors, [phi1, phi2])
def test_get_factors(self):
self.graph.add_edges_from([[("a", "b"), ("b", "c")]])
phi1 = DiscreteFactor(["a", "b"], [2, 2], np.random.rand(4))
phi2 = DiscreteFactor(["b", "c"], [2, 2], np.random.rand(4))
self.assertCountEqual(self.graph.get_factors(), [])
self.graph.add_factors(phi1, phi2)
self.assertEqual(self.graph.get_factors(node=("b", "a")), phi1)
self.assertEqual(self.graph.get_factors(node=("b", "c")), phi2)
self.assertCountEqual(self.graph.get_factors(), [phi1, phi2])
def test_remove_factors(self):
self.graph.add_edges_from([[("a", "b"), ("b", "c")]])
phi1 = DiscreteFactor(["a", "b"], [2, 2], np.random.rand(4))
phi2 = DiscreteFactor(["b", "c"], [2, 2], np.random.rand(4))
self.graph.add_factors(phi1, phi2)
self.graph.remove_factors(phi1)
self.assertCountEqual(self.graph.factors, [phi2])
def test_get_partition_function(self):
self.graph.add_edges_from([[("a", "b"), ("b", "c")]])
phi1 = DiscreteFactor(["a", "b"], [2, 2], range(4))
phi2 = DiscreteFactor(["b", "c"], [2, 2], range(4))
self.graph.add_factors(phi1, phi2)
self.assertEqual(self.graph.get_partition_function(), 22.0)
def tearDown(self):
del self.graph
class TestClusterGraphMethods(unittest.TestCase):
def setUp(self):
self.graph = ClusterGraph()
def test_get_cardinality(self):
self.graph.add_edges_from(
[(("a", "b", "c"), ("a", "b")), (("a", "b", "c"), ("a", "c"))]
)
self.assertDictEqual(self.graph.get_cardinality(), {})
phi1 = DiscreteFactor(["a", "b", "c"], [1, 2, 2], np.random.rand(4))
self.graph.add_factors(phi1)
self.assertDictEqual(self.graph.get_cardinality(), {"a": 1, "b": 2, "c": 2})
self.graph.remove_factors(phi1)
self.assertDictEqual(self.graph.get_cardinality(), {})
phi1 = DiscreteFactor(["a", "b"], [1, 2], np.random.rand(2))
phi2 = DiscreteFactor(["a", "c"], [1, 2], np.random.rand(2))
self.graph.add_factors(phi1, phi2)
self.assertDictEqual(self.graph.get_cardinality(), {"a": 1, "b": 2, "c": 2})
phi3 = DiscreteFactor(["a", "c"], [1, 1], np.random.rand(1))
self.graph.add_factors(phi3)
self.assertDictEqual(self.graph.get_cardinality(), {"c": 1, "b": 2, "a": 1})
self.graph.remove_factors(phi1, phi2, phi3)
self.assertDictEqual(self.graph.get_cardinality(), {})
def test_get_cardinality_with_node(self):
self.graph.add_edges_from([(("a", "b"), ("a", "c"))])
phi1 = DiscreteFactor(["a", "b"], [1, 2], np.random.rand(2))
phi2 = DiscreteFactor(["a", "c"], [1, 2], np.random.rand(2))
self.graph.add_factors(phi1, phi2)
self.assertEqual(self.graph.get_cardinality("a"), 1)
self.assertEqual(self.graph.get_cardinality("b"), 2)
self.assertEqual(self.graph.get_cardinality("c"), 2)
def test_check_model(self):
self.graph.add_edges_from([(("a", "b"), ("a", "c"))])
phi1 = DiscreteFactor(["a", "b"], [1, 2], np.random.rand(2))
phi2 = DiscreteFactor(["a", "c"], [1, 2], np.random.rand(2))
self.graph.add_factors(phi1, phi2)
self.assertTrue(self.graph.check_model())
self.graph.remove_factors(phi2)
phi2 = DiscreteFactor(["a", "c"], [1, 2], np.random.rand(2))
self.graph.add_factors(phi2)
self.assertTrue(self.graph.check_model())
def test_check_model1(self):
self.graph.add_edges_from([(("a", "b"), ("a", "c")), (("a", "c"), ("a", "d"))])
phi1 = DiscreteFactor(["a", "b"], [1, 2], np.random.rand(2))
self.graph.add_factors(phi1)
self.assertRaises(ValueError, self.graph.check_model)
phi2 = DiscreteFactor(["a", "c"], [1, 2], np.random.rand(2))
self.graph.add_factors(phi2)
self.assertRaises(ValueError, self.graph.check_model)
def test_check_model2(self):
self.graph.add_edges_from([(("a", "b"), ("a", "c")), (("a", "c"), ("a", "d"))])
phi1 = DiscreteFactor(["a", "b"], [1, 2], np.random.rand(2))
phi2 = DiscreteFactor(["a", "c"], [3, 3], np.random.rand(9))
phi3 = DiscreteFactor(["a", "d"], [4, 4], np.random.rand(16))
self.graph.add_factors(phi1, phi2, phi3)
self.assertRaises(ValueError, self.graph.check_model)
self.graph.remove_factors(phi2)
phi2 = DiscreteFactor(["a", "c"], [1, 3], np.random.rand(3))
self.graph.add_factors(phi2)
self.assertRaises(ValueError, self.graph.check_model)
self.graph.remove_factors(phi3)
phi3 = DiscreteFactor(["a", "d"], [1, 4], np.random.rand(4))
self.graph.add_factors(phi3)
self.assertTrue(self.graph.check_model())
def test_copy_with_factors(self):
self.graph.add_edges_from([[("a", "b"), ("b", "c")]])
phi1 = DiscreteFactor(["a", "b"], [2, 2], np.random.rand(4))
phi2 = DiscreteFactor(["b", "c"], [2, 2], np.random.rand(4))
self.graph.add_factors(phi1, phi2)
graph_copy = self.graph.copy()
self.assertIsInstance(graph_copy, ClusterGraph)
self.assertEqual(
hf.recursive_sorted(self.graph.nodes()),
hf.recursive_sorted(graph_copy.nodes()),
)
self.assertEqual(
hf.recursive_sorted(self.graph.edges()),
hf.recursive_sorted(graph_copy.edges()),
)
self.assertTrue(graph_copy.check_model())
self.assertEqual(self.graph.get_factors(), graph_copy.get_factors())
self.graph.remove_factors(phi1, phi2)
self.assertTrue(
phi1 not in self.graph.factors and phi2 not in self.graph.factors
)
self.assertTrue(phi1 in graph_copy.factors and phi2 in graph_copy.factors)
self.graph.add_factors(phi1, phi2)
self.graph.factors[0] = DiscreteFactor(["a", "b"], [2, 2], np.random.rand(4))
self.assertNotEqual(self.graph.get_factors()[0], graph_copy.get_factors()[0])
self.assertNotEqual(self.graph.factors, graph_copy.factors)
def test_copy_without_factors(self):
self.graph.add_nodes_from([("a", "b", "c"), ("a", "b"), ("a", "c")])
self.graph.add_edges_from(
[(("a", "b", "c"), ("a", "b")), (("a", "b", "c"), ("a", "c"))]
)
graph_copy = self.graph.copy()
self.graph.remove_edge(("a", "b", "c"), ("a", "c"))
self.assertFalse(self.graph.has_edge(("a", "b", "c"), ("a", "c")))
self.assertTrue(graph_copy.has_edge(("a", "b", "c"), ("a", "c")))
self.graph.remove_node(("a", "c"))
self.assertFalse(self.graph.has_node(("a", "c")))
self.assertTrue(graph_copy.has_node(("a", "c")))
self.graph.add_node(("c", "d"))
self.assertTrue(self.graph.has_node(("c", "d")))
self.assertFalse(graph_copy.has_node(("c", "d")))
def tearDown(self):
del self.graph
|
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
import unittest
from trashcli.empty import EmptyCmd
from unit_tests.myStringIO import StringIO
import os
from .files import make_file, require_empty_dir, make_dirs, set_sticky_bit
from .files import make_empty_file
from mock import MagicMock
from trashcli.fs import FileSystemReader
from trashcli.fs import FileRemover
import six
from trashcli.empty import main as empty
from trashcli.fs import mkdirs
import shutil
class TestTrashEmptyCmd(unittest.TestCase):
def test(self):
out = StringIO()
empty(['trash-empty', '-h'], stdout = out)
six.assertRegex(self, out.getvalue(), '^Usage. trash-empty.*')
def test_trash_empty_will_crash_on_unreadable_directory_issue_48(self):
out = StringIO()
err = StringIO()
mkdirs('data/Trash/files')
mkdirs('data/Trash/files/unreadable')
os.chmod('data/Trash/files/unreadable', 0o300)
assert os.path.exists('data/Trash/files/unreadable')
empty(['trash-empty'], stdout = out, stderr = err,
environ={'XDG_DATA_HOME':'data'})
assert ("trash-empty: cannot remove data/Trash/files/unreadable\n" ==
err.getvalue())
os.chmod('data/Trash/files/unreadable', 0o700)
shutil.rmtree('data')
def test_the_core_of_failures_for_issue_48(self):
mkdirs('unreadable-dir')
os.chmod('unreadable-dir', 0o300)
assert os.path.exists('unreadable-dir')
try:
FileRemover().remove_file('unreadable-dir')
assert False
except OSError:
pass
os.chmod('unreadable-dir', 0o700)
shutil.rmtree('unreadable-dir')
class TestWhenCalledWithoutArguments(unittest.TestCase):
def setUp(self):
require_empty_dir('XDG_DATA_HOME')
self.info_dir_path = 'XDG_DATA_HOME/Trash/info'
self.files_dir_path = 'XDG_DATA_HOME/Trash/files'
self.environ = {'XDG_DATA_HOME':'XDG_DATA_HOME'}
now = MagicMock(side_effect=RuntimeError)
self.empty_cmd = EmptyCmd(
out = StringIO(),
err = StringIO(),
environ = self.environ,
list_volumes = no_volumes,
now = now,
file_reader = FileSystemReader(),
getuid = None,
file_remover = FileRemover(),
version = None,
)
def user_run_trash_empty(self):
self.empty_cmd.run('trash-empty')
def test_it_should_remove_an_info_file(self):
self.having_a_trashinfo_in_trashcan('foo.trashinfo')
self.user_run_trash_empty()
self.assert_dir_empty(self.info_dir_path)
def test_it_should_remove_all_the_infofiles(self):
self.having_three_trashinfo_in_trashcan()
self.user_run_trash_empty()
self.assert_dir_empty(self.info_dir_path)
def test_it_should_remove_the_backup_files(self):
self.having_one_trashed_file()
self.user_run_trash_empty()
self.assert_dir_empty(self.files_dir_path)
def test_it_should_keep_unknown_files_found_in_infodir(self):
self.having_file_in_info_dir('not-a-trashinfo')
self.user_run_trash_empty()
self.assert_dir_contains(self.info_dir_path, 'not-a-trashinfo')
def test_but_it_should_remove_orphan_files_from_the_files_dir(self):
self.having_orphan_file_in_files_dir()
self.user_run_trash_empty()
self.assert_dir_empty(self.files_dir_path)
def test_it_should_purge_also_directories(self):
os.makedirs("XDG_DATA_HOME/Trash/files/a-dir")
self.user_run_trash_empty()
self.assert_dir_empty(self.files_dir_path)
def assert_dir_empty(self, path):
assert len(os.listdir(path)) == 0
def assert_dir_contains(self, path, filename):
assert os.path.exists(os.path.join(path, filename))
def having_a_trashinfo_in_trashcan(self, basename_of_trashinfo):
make_empty_file(os.path.join(self.info_dir_path, basename_of_trashinfo))
def having_three_trashinfo_in_trashcan(self):
self.having_a_trashinfo_in_trashcan('foo.trashinfo')
self.having_a_trashinfo_in_trashcan('bar.trashinfo')
self.having_a_trashinfo_in_trashcan('baz.trashinfo')
six.assertCountEqual(self,
['foo.trashinfo',
'bar.trashinfo',
'baz.trashinfo'], os.listdir(self.info_dir_path))
def having_one_trashed_file(self):
self.having_a_trashinfo_in_trashcan('foo.trashinfo')
make_empty_file(self.files_dir_path + '/foo')
self.files_dir_should_not_be_empty()
def files_dir_should_not_be_empty(self):
assert len(os.listdir(self.files_dir_path)) != 0
def having_file_in_info_dir(self, filename):
make_empty_file(os.path.join(self.info_dir_path, filename))
def having_orphan_file_in_files_dir(self):
complete_path = os.path.join(self.files_dir_path,
'a-file-without-any-associated-trashinfo')
make_empty_file(complete_path)
assert os.path.exists(complete_path)
class TestWhen_invoked_with_N_days_as_argument(unittest.TestCase):
def setUp(self):
require_empty_dir('XDG_DATA_HOME')
self.xdg_data_home = 'XDG_DATA_HOME'
self.environ = {'XDG_DATA_HOME':'XDG_DATA_HOME'}
self.now = MagicMock(side_effect=RuntimeError)
self.empty_cmd=EmptyCmd(
out = StringIO(),
err = StringIO(),
environ = self.environ,
list_volumes = no_volumes,
now = self.now,
file_reader = FileSystemReader(),
getuid = None,
file_remover = FileRemover(),
version = None,
)
def user_run_trash_empty(self, *args):
self.empty_cmd.run('trash-empty', *args)
def set_clock_at(self, yyyy_mm_dd):
self.now.side_effect = lambda:date(yyyy_mm_dd)
def date(yyyy_mm_dd):
from datetime import datetime
return datetime.strptime(yyyy_mm_dd, '%Y-%m-%d')
def test_it_should_keep_files_newer_than_N_days(self):
self.having_a_trashed_file('foo', '2000-01-01')
self.set_clock_at('2000-01-01')
self.user_run_trash_empty('2')
self.file_should_have_been_kept_in_trashcan('foo')
def test_it_should_remove_files_older_than_N_days(self):
self.having_a_trashed_file('foo', '1999-01-01')
self.set_clock_at('2000-01-01')
self.user_run_trash_empty('2')
self.file_should_have_been_removed_from_trashcan('foo')
def test_it_should_kept_files_with_invalid_deletion_date(self):
self.having_a_trashed_file('foo', 'Invalid Date')
self.set_clock_at('2000-01-01')
self.user_run_trash_empty('2')
self.file_should_have_been_kept_in_trashcan('foo')
def having_a_trashed_file(self, name, date):
contents = "DeletionDate=%sT00:00:00\n" % date
make_file(self.trashinfo(name), contents)
def trashinfo(self, name):
return '%(dirname)s/Trash/info/%(name)s.trashinfo' % {
'dirname' : self.xdg_data_home,
'name' : name }
def file_should_have_been_kept_in_trashcan(self, trashinfo_name):
assert os.path.exists(self.trashinfo(trashinfo_name))
def file_should_have_been_removed_from_trashcan(self, trashinfo_name):
assert not os.path.exists(self.trashinfo(trashinfo_name))
class TestEmptyCmdWithMultipleVolumes(unittest.TestCase):
def setUp(self):
require_empty_dir('topdir')
self.empty=EmptyCmd(
out = StringIO(),
err = StringIO(),
environ = {},
list_volumes = lambda: ['topdir'],
now = None,
file_reader = FileSystemReader(),
getuid = lambda: 123,
file_remover = FileRemover(),
version = None,
)
def test_it_removes_trashinfos_from_method_1_dir(self):
self.make_proper_top_trash_dir('topdir/.Trash')
make_empty_file('topdir/.Trash/123/info/foo.trashinfo')
self.empty.run('trash-empty')
assert not os.path.exists('topdir/.Trash/123/info/foo.trashinfo')
def test_it_removes_trashinfos_from_method_2_dir(self):
make_empty_file('topdir/.Trash-123/info/foo.trashinfo')
self.empty.run('trash-empty')
assert not os.path.exists('topdir/.Trash-123/info/foo.trashinfo')
def test_it_removes_trashinfo_from_specified_trash_dir(self):
make_empty_file('specified/info/foo.trashinfo')
self.empty.run('trash-empty', '--trash-dir', 'specified')
assert not os.path.exists('specified/info/foo.trashinfo')
def make_proper_top_trash_dir(self, path):
make_dirs(path)
set_sticky_bit(path)
from textwrap import dedent
class TestTrashEmpty_on_help(unittest.TestCase):
def test_help_output(self):
err, out = StringIO(), StringIO()
cmd = EmptyCmd(err = err,
out = out,
environ = {},
list_volumes = no_volumes,
now = None,
file_reader = FileSystemReader(),
getuid = None,
file_remover = None,
version = None,
)
cmd.run('trash-empty', '--help')
assert out.getvalue() == dedent("""\
Usage: trash-empty [days]
Purge trashed files.
Options:
--version show program's version number and exit
-h, --help show this help message and exit
Report bugs to https://github.com/andreafrancia/trash-cli/issues
""")
class TestTrashEmpty_on_version(unittest.TestCase):
def test_it_print_version(self):
err, out = StringIO(), StringIO()
cmd = EmptyCmd(err = err,
out = out,
environ = {},
list_volumes = no_volumes,
now = None,
file_reader = FileSystemReader(),
getuid = None,
file_remover = None,
version = '1.2.3',
)
cmd.run('trash-empty', '--version')
assert out.getvalue() == dedent("""\
trash-empty 1.2.3
""")
class Test_describe_trash_empty_command_line__on_invalid_options(unittest.TestCase):
def setUp(self):
self.err, self.out = StringIO(), StringIO()
self.cmd = EmptyCmd(
err = self.err,
out = self.out,
environ = {},
list_volumes = no_volumes,
now = None,
file_reader = FileSystemReader(),
getuid = None,
file_remover = None,
version = None,
)
def test_it_should_fail(self):
self.exit_code = self.cmd.run('trash-empty', '-2')
exit_code_for_command_line_usage = 64
assert exit_code_for_command_line_usage == self.exit_code
def test_it_should_complain_to_the_standard_error(self):
self.exit_code = self.cmd.run('trash-empty', '-2')
assert self.err.getvalue() == dedent("""\
trash-empty: invalid option -- '2'
""")
def test_with_a_different_option(self):
self.cmd.run('trash-empty', '-3')
assert self.err.getvalue() == dedent("""\
trash-empty: invalid option -- '3'
""")
def no_volumes():
return []
|
from dataclasses import dataclass, field
from .const import DOMAIN
from .device import BroadlinkDevice
@dataclass
class BroadlinkData:
"""Class for sharing data within the Broadlink integration."""
devices: dict = field(default_factory=dict)
platforms: dict = field(default_factory=dict)
async def async_setup(hass, config):
"""Set up the Broadlink integration."""
hass.data[DOMAIN] = BroadlinkData()
return True
async def async_setup_entry(hass, entry):
"""Set up a Broadlink device from a config entry."""
device = BroadlinkDevice(hass, entry)
return await device.async_setup()
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
device = hass.data[DOMAIN].devices.pop(entry.entry_id)
return await device.async_unload()
|
import numpy as np
from scipy.special import ndtr
from scattertext.Scalers import scale_neg_1_to_1_with_zero_mean_abs_max
from scattertext.termranking import AbsoluteFrequencyRanker
from scattertext.termsignificance.TermSignificance import TermSignificance
def z_to_p_val(z_scores):
# return norm.sf(-z_scores) - 0.5 + 0.5
return ndtr(z_scores)
class LogOddsRatioSmoothed(TermSignificance):
def __init__(self, alpha_w=1, ranker=AbsoluteFrequencyRanker):
'''
Parameters
----------
alpha_w : np.float
The constant prior.
'''
self.alpha_w = alpha_w
def get_name(self):
return "Log-Odds-Ratio w/ Add One Smoothing"
def get_p_vals(self, X):
'''
Parameters
----------
X : np.array
Array of word counts, shape (N, 2) where N is the vocab size. X[:,0] is the
positive class, while X[:,1] is the negative class. None by default
Returns
-------
np.array of p-values
'''
# Eqs 19-22
return z_to_p_val(self.get_zeta_i_j(X))
def get_p_vals_given_separate_counts(self, y_i, y_j):
'''
Parameters
----------
y_i, np.array(int)
Arrays of word counts of words occurring in positive class
y_j, np.array(int)
Returns
np.array of p-values
'''
return z_to_p_val(self.get_zeta_i_j_given_separate_counts(y_i, y_j))
def get_zeta_i_j_given_separate_counts(self, y_i, y_j):
'''
Parameters
----------
y_i, np.array(int)
Arrays of word counts of words occurring in positi ve class
y_j, np.array(int)
Returns
-------
np.array of z-scores
'''
n_i, n_j = y_i.sum(), y_j.sum()
delta_i_j = (np.log((y_i + 1) / (1. + n_i - y_i))
- np.log((y_j + 1) / (1. + n_j - y_j)))
return delta_i_j
def get_zeta_i_j(self, X):
'''
Parameters
----------
X : np.array
Array of word counts, shape (N, 2) where N is the vocab size. X[:,0] is the
positive class, while X[:,1] is the negative class. None by default
Returns
-------
np.array of z-scores
'''
y_i, y_j = X.T[0], X.T[1]
return self.get_zeta_i_j_given_separate_counts(y_i, y_j)
def get_default_score(self):
return 0
def get_scores(self, y_i, y_j):
'''
Same function as get_zeta_i_j_given_separate_counts
Parameters
----------
y_i, np.array(int)
Arrays of word counts of words occurring in positive class
y_j, np.array(int)
Returns
-------
np.array of z-scores
'''
z_scores = self.get_zeta_i_j_given_separate_counts(y_i, y_j)
#scaled_scores = scale_neg_1_to_1_with_zero_mean_abs_max(z_scores)
return z_scores
|
import numpy as np
import tensorflow as tf
from keras import layers, backend as K
from keras.losses import Loss
from keras.utils import losses_utils
class RankCrossEntropyLoss(Loss):
"""
Rank cross entropy loss.
Examples:
>>> from keras import backend as K
>>> softmax = lambda x: np.exp(x)/np.sum(np.exp(x), axis=0)
>>> x_pred = K.variable(np.array([[1.0], [1.2], [0.8]]))
>>> x_true = K.variable(np.array([[1], [0], [0]]))
>>> expect = -np.log(softmax(np.array([[1.0], [1.2], [0.8]])))
>>> loss = K.eval(RankCrossEntropyLoss(num_neg=2)(x_true, x_pred))
>>> np.isclose(loss, expect[0]).all()
True
"""
def __init__(self, num_neg: int = 1):
"""
:class:`RankCrossEntropyLoss` constructor.
:param num_neg: number of negative instances in cross entropy loss.
"""
super().__init__(reduction=losses_utils.Reduction.SUM_OVER_BATCH_SIZE,
name="rank_crossentropy")
self._num_neg = num_neg
def call(self, y_true: np.array, y_pred: np.array,
sample_weight=None) -> np.array:
"""
Calculate rank cross entropy loss.
:param y_true: Label.
:param y_pred: Predicted result.
:return: Crossentropy loss computed by user-defined negative number.
"""
logits = layers.Lambda(lambda a: a[::(self._num_neg + 1), :])(y_pred)
labels = layers.Lambda(lambda a: a[::(self._num_neg + 1), :])(y_true)
logits, labels = [logits], [labels]
for neg_idx in range(self._num_neg):
neg_logits = layers.Lambda(
lambda a: a[neg_idx + 1::(self._num_neg + 1), :])(y_pred)
neg_labels = layers.Lambda(
lambda a: a[neg_idx + 1::(self._num_neg + 1), :])(y_true)
logits.append(neg_logits)
labels.append(neg_labels)
logits = tf.concat(logits, axis=-1)
labels = tf.concat(labels, axis=-1)
smoothed_prob = tf.nn.softmax(logits) + np.finfo(float).eps
loss = -(tf.reduce_sum(labels * tf.math.log(smoothed_prob), axis=-1))
return losses_utils.compute_weighted_loss(
loss, sample_weight, reduction=self.reduction)
@property
def num_neg(self):
"""`num_neg` getter."""
return self._num_neg
|
import unittest
import tensorflow as tf
import tensorflow_addons as tfa
class TestTensorflowAddons(unittest.TestCase):
def test_tfa_image(self):
img_raw = tf.io.read_file('/input/tests/data/dot.png')
img = tf.io.decode_image(img_raw)
img = tf.image.convert_image_dtype(img, tf.float32)
mean = tfa.image.mean_filter2d(img, filter_shape=1)
self.assertEqual(1, len(mean))
# This test exercises TFA Custom Op. See: b/145555176
def test_gelu(self):
x = tf.constant([[0.5, 1.2, -0.3]])
layer = tfa.layers.GELU()
result = layer(x)
self.assertEqual((1, 3), result.shape)
|
import functools
import hmac
from passlib.hash import apr_md5_crypt
from radicale import auth
class Auth(auth.BaseAuth):
def __init__(self, configuration):
super().__init__(configuration)
self._filename = configuration.get("auth", "htpasswd_filename")
self._encoding = self.configuration.get("encoding", "stock")
encryption = configuration.get("auth", "htpasswd_encryption")
if encryption == "plain":
self._verify = self._plain
elif encryption == "md5":
self._verify = self._md5apr1
elif encryption == "bcrypt":
try:
from passlib.hash import bcrypt
except ImportError as e:
raise RuntimeError(
"The htpasswd encryption method 'bcrypt' requires "
"the passlib[bcrypt] module.") from e
# A call to `encrypt` raises passlib.exc.MissingBackendError with a
# good error message if bcrypt backend is not available. Trigger
# this here.
bcrypt.hash("test-bcrypt-backend")
self._verify = functools.partial(self._bcrypt, bcrypt)
else:
raise RuntimeError("The htpasswd encryption method %r is not "
"supported." % encryption)
def _plain(self, hash_value, password):
"""Check if ``hash_value`` and ``password`` match, plain method."""
return hmac.compare_digest(hash_value.encode(), password.encode())
def _bcrypt(self, bcrypt, hash_value, password):
return bcrypt.verify(password, hash_value.strip())
def _md5apr1(self, hash_value, password):
return apr_md5_crypt.verify(password, hash_value.strip())
def login(self, login, password):
"""Validate credentials.
Iterate through htpasswd credential file until login matches, extract
hash (encrypted password) and check hash against password,
using the method specified in the Radicale config.
The content of the file is not cached because reading is generally a
very cheap operation, and it's useful to get live updates of the
htpasswd file.
"""
try:
with open(self._filename, encoding=self._encoding) as f:
for line in f:
line = line.rstrip("\n")
if line.lstrip() and not line.lstrip().startswith("#"):
try:
hash_login, hash_value = line.split(
":", maxsplit=1)
# Always compare both login and password to avoid
# timing attacks, see #591.
login_ok = hmac.compare_digest(
hash_login.encode(), login.encode())
password_ok = self._verify(hash_value, password)
if login_ok and password_ok:
return login
except ValueError as e:
raise RuntimeError("Invalid htpasswd file %r: %s" %
(self._filename, e)) from e
except OSError as e:
raise RuntimeError("Failed to load htpasswd file %r: %s" %
(self._filename, e)) from e
return ""
|
import genuiclasses
import genexamples
import gencommonast
def init():
print('GENERATING DOCS ...')
print(' Generating docs for UI classes.')
genuiclasses.main()
print(' Generating examples.')
genexamples.main()
def clean(app, *args):
genuiclasses.clean()
genexamples.clean()
def setup(app):
init()
app.connect('build-finished', clean)
|
import json
import logging
import os
import re
from typing import List, Optional
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import data
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import sample
from perfkitbenchmarker import temp_dir
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.dpb_service import BaseDpbService
BENCHMARK_NAME = 'dpb_sparksql_benchmark'
BENCHMARK_CONFIG = """
dpb_sparksql_benchmark:
description: Run Spark SQL on dataproc and emr
dpb_service:
service_type: dataproc
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-4
AWS:
machine_type: m5.xlarge
disk_spec:
GCP:
disk_size: 1000
disk_type: pd-standard
AWS:
disk_size: 1000
disk_type: gp2
worker_count: 2
"""
BENCHMARK_NAMES = {
'tpcds_2_4': 'TPC-DS',
'tpch': 'TPC-H'
}
flags.DEFINE_string(
'dpb_sparksql_data', None,
'The HCFS based dataset to run Spark SQL query '
'against')
flags.DEFINE_bool('dpb_sparksql_create_hive_tables', False,
'Whether to load dpb_sparksql_data into external hive tables '
'or not.')
flags.DEFINE_string(
'dpb_sparksql_data_format', None,
"Format of data to load. Assumed to be 'parquet' for HCFS "
"and 'bigquery' for bigquery if unspecified.")
flags.DEFINE_enum('dpb_sparksql_query', 'tpcds_2_4', BENCHMARK_NAMES.keys(),
'A list of query to run on dpb_sparksql_data')
flags.DEFINE_list(
'dpb_sparksql_order', [],
'The names (numbers) of the queries to run in order. '
'Required.')
flags.DEFINE_string(
'spark_bigquery_connector',
None,
'The Spark BigQuery Connector jar to pass to the Spark Job')
flags.DEFINE_list(
'bigquery_tables', [],
'A list of BigQuery tables to load as Temporary Spark SQL views instead '
'of reading from external Hive tables.'
)
flags.DEFINE_string(
'bigquery_record_format', None,
'The record format to use when connecting to BigQuery storage. See: '
'https://github.com/GoogleCloudDataproc/spark-bigquery-connector#properties'
)
FLAGS = flags.FLAGS
# Creates spark table using pyspark by loading the parquet data.
# Args:
# argv[1]: string, The table name in the dataset that this script will create.
# argv[2]: string, The data path of the table.
SPARK_TABLE_SCRIPT = 'spark_table.py'
SPARK_SQL_RUNNER_SCRIPT = 'spark_sql_runner.py'
SPARK_SQL_PERF_GIT = 'https://github.com/databricks/spark-sql-perf.git'
SPARK_SQL_PERF_GIT_COMMIT = '6b2bf9f9ad6f6c2f620062fda78cded203f619c8'
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Args:
benchmark_config: Config needed to run the Spark SQL.
Raises:
Config.InvalidValue: On encountering invalid configuration.
"""
dpb_service_type = benchmark_config.dpb_service.service_type
if not FLAGS.dpb_sparksql_data and FLAGS.dpb_sparksql_create_hive_tables:
raise errors.Config.InvalidValue(
'You must pass dpb_sparksql_data with dpb_sparksql_create_hive_tables')
if FLAGS.bigquery_tables and not FLAGS.spark_bigquery_connector:
# Remove if Dataproc ever bundles BigQuery connector
raise errors.Config.InvalidValue(
'You must provide the BigQuery connector using '
'--spark_bigquery_connector.')
if not (FLAGS.dpb_sparksql_data or FLAGS.bigquery_tables):
# In the case of a static dpb_service, data could pre-exist
logging.warning(
'You did not specify --dpb_sparksql_data or --bigquery_tables. '
'You will probably not have data to query!')
if not FLAGS.dpb_sparksql_order:
raise errors.Config.InvalidValue(
'You must specify the queries to run with --dpb_sparksql_order')
def Prepare(benchmark_spec):
"""Installs and sets up dataset on the Spark clusters.
Copies scripts and all the queries to cloud.
Creates external Hive tables for data (unless BigQuery is being used).
Args:
benchmark_spec: The benchmark specification
"""
dpb_service_instance = benchmark_spec.dpb_service
# buckets must start with a letter
bucket = 'pkb-' + benchmark_spec.uuid.split('-')[0]
storage_service = dpb_service_instance.storage_service
storage_service.MakeBucket(bucket)
benchmark_spec.base_dir = dpb_service_instance.PERSISTENT_FS_PREFIX + bucket
benchmark_spec.staged_queries = _LoadAndStageQueries(
storage_service, benchmark_spec.base_dir)
for script in [SPARK_TABLE_SCRIPT, SPARK_SQL_RUNNER_SCRIPT]:
src_url = data.ResourcePath(script)
storage_service.CopyToBucket(src_url, bucket, script)
benchmark_spec.table_subdirs = []
if FLAGS.dpb_sparksql_data:
table_dir = FLAGS.dpb_sparksql_data.rstrip('/') + '/'
stdout = storage_service.List(table_dir)
for line in stdout.split('\n'):
# GCS will sometimes list the directory itself.
if line and line != table_dir:
benchmark_spec.table_subdirs.append(
re.split(' |/', line.rstrip('/')).pop())
# Create external Hive tables
if FLAGS.dpb_sparksql_create_hive_tables:
try:
result = dpb_service_instance.SubmitJob(
pyspark_file=os.path.join(benchmark_spec.base_dir,
SPARK_TABLE_SCRIPT),
job_type=BaseDpbService.PYSPARK_JOB_TYPE,
job_arguments=[
FLAGS.dpb_sparksql_data, ','.join(benchmark_spec.table_subdirs)
])
logging.info(result)
except dpb_service.JobSubmissionError as e:
raise errors.Benchmarks.PrepareException(
'Creating tables from {}/* failed'.format(
FLAGS.dpb_sparksql_data)) from e
def Run(benchmark_spec):
"""Runs a sequence of Spark SQL Query.
Args:
benchmark_spec: Spec needed to run the Spark SQL.
Returns:
A list of samples, comprised of the detailed run times of individual query.
Raises:
Benchmarks.RunError if no query succeeds.
"""
dpb_service_instance = benchmark_spec.dpb_service
storage_service = dpb_service_instance.storage_service
metadata = benchmark_spec.dpb_service.GetMetadata()
metadata['benchmark'] = BENCHMARK_NAMES[FLAGS.dpb_sparksql_query]
# Run PySpark Spark SQL Runner
report_dir = os.path.join(benchmark_spec.base_dir, 'report')
args = [
'--sql-scripts',
','.join(benchmark_spec.staged_queries),
'--report-dir',
report_dir,
]
table_metadata = _GetTableMetadata(benchmark_spec.table_subdirs)
if table_metadata:
args += ['--table-metadata', json.dumps(table_metadata)]
jars = []
if FLAGS.spark_bigquery_connector:
jars.append(FLAGS.spark_bigquery_connector)
job_result = dpb_service_instance.SubmitJob(
pyspark_file=os.path.join(
benchmark_spec.base_dir, SPARK_SQL_RUNNER_SCRIPT),
job_arguments=args,
job_jars=jars,
job_type=dpb_service.BaseDpbService.PYSPARK_JOB_TYPE)
# Spark can only write data to directories not files. So do a recursive copy
# of that directory and then search it for the single JSON file with the
# results.
temp_run_dir = temp_dir.GetRunDirPath()
storage_service.Copy(report_dir, temp_run_dir, recursive=True)
report_file = None
for dir_name, _, files in os.walk(os.path.join(temp_run_dir, 'report')):
for filename in files:
if filename.endswith('.json'):
report_file = os.path.join(dir_name, filename)
logging.info(report_file)
if not report_file:
raise errors.Benchmarks.RunError('Job report not found.')
results = []
run_times = {}
passing_queries = set()
with open(report_file, 'r') as file:
for line in file:
result = json.loads(line)
logging.info('Timing: %s', result)
query_id = _GetQueryId(result['script'])
assert query_id
passing_queries.add(query_id)
metadata_copy = metadata.copy()
metadata_copy['query'] = query_id
results.append(
sample.Sample('sparksql_run_time', result['duration'], 'seconds',
metadata_copy))
run_times[query_id] = result['duration']
metadata['failing_queries'] = ','.join(
sorted(set(FLAGS.dpb_sparksql_order) - passing_queries))
results.append(
sample.Sample('sparksql_total_wall_time', job_result.wall_time, 'seconds',
metadata))
results.append(
sample.Sample('sparksql_geomean_run_time',
sample.GeoMean(run_times.values()), 'seconds', metadata))
return results
def _GetTableMetadata(table_subdirs=None):
"""Compute map of table metadata for spark_sql_runner --table_metadata."""
metadata = {}
if not FLAGS.dpb_sparksql_create_hive_tables:
for subdir in table_subdirs or []:
# Subdir is table name
metadata[subdir] = (FLAGS.dpb_sparksql_data_format or 'parquet', {
'path': os.path.join(FLAGS.dpb_sparksql_data, subdir)
})
for table in FLAGS.bigquery_tables:
name = table.split('.')[-1]
bq_options = {'table': table}
if FLAGS.bigquery_record_format:
bq_options['readDataFormat'] = FLAGS.bigquery_record_format
metadata[name] = (FLAGS.dpb_sparksql_data_format or 'bigquery', bq_options)
return metadata
def _GetQueryId(filename: str) -> Optional[str]:
"""Extract query id from file name."""
match = re.match(r'(.*/)?q?([0-9]+[ab]?)\.sql$', filename)
if match:
return match.group(2)
def _LoadAndStageQueries(storage_service, base_dir: str) -> List[str]:
"""Loads queries from Github and stages them in object storage.
Queries are selected using --dpb_sparksql_query and --dpb_sparksql_order.
Args:
storage_service: object_strorage_service to stage queries into.
base_dir: object storage directory to stage queries into.
Returns:
The paths to the stage queries.
Raises:
PrepareException if a requested query is not found.
"""
temp_run_dir = temp_dir.GetRunDirPath()
spark_sql_perf_dir = os.path.join(temp_run_dir, 'spark_sql_perf_dir')
# Clone repo
vm_util.IssueCommand(['git', 'clone', SPARK_SQL_PERF_GIT, spark_sql_perf_dir])
vm_util.IssueCommand(['git', 'checkout', SPARK_SQL_PERF_GIT_COMMIT],
cwd=spark_sql_perf_dir)
query_dir = os.path.join(spark_sql_perf_dir, 'src', 'main', 'resources',
FLAGS.dpb_sparksql_query)
# Search repo for queries
query_file = {} # map query -> staged file
for dir_name, _, files in os.walk(query_dir):
for filename in files:
query_id = _GetQueryId(filename)
if query_id:
# only upload specified queries
if query_id in FLAGS.dpb_sparksql_order:
src_file = os.path.join(dir_name, filename)
staged_file = '{}/{}'.format(base_dir, filename)
storage_service.Copy(src_file, staged_file)
query_file[query_id] = staged_file
# Validate all requested queries are present.
missing_queries = set(FLAGS.dpb_sparksql_order) - set(query_file.keys())
if missing_queries:
raise errors.Benchmarks.PrepareException(
'Could not find queries {}'.format(missing_queries))
# Return staged queries in proper order
return [query_file[query] for query in FLAGS.dpb_sparksql_order]
def Cleanup(_):
"""Cleans up the Spark SQL."""
pass
|
import os
import stat as _stat
from six import text_type
from six.moves import builtins
from mlpatches import base
from stashutils.mount_ctrl import get_manager
from stashutils.fsi.errors import IsFile, OperationFailure
# store default functions
_org_listdir = os.listdir
_org_open = builtins.open
_org_chdir = os.chdir
_org_getcwd = os.getcwd
_org_ismount = os.path.ismount
_org_stat = os.stat
_org_lstat = os.lstat
_org_mkdir = os.mkdir
_org_remove = os.remove
_org_rmdir = os.rmdir
_org_access = os.access
_org_chmod = os.chmod
def listdir(patch, path):
"""
Return a list containing the names of the entries in the directory
given by path. The list is in arbitrary order.
It does not include the special entries '.' and '..' even if
they are present in the directory.
"""
ap = os.path.abspath(os.path.join(os.getcwd(), path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_listdir(ap)
else:
try:
return fsi.listdir(relpath)
except OperationFailure:
raise os.error("[Errno 2] No such file or directory: '/{p}'".format(p=ap))
def open(patch, name, mode="r", buffering=0):
"""
Open a file, returning an object of the file type described in section
File Objects.
If the file cannot be opened, IOError is raised.
When opening a file, its preferable to use open() instead of invoking
the file constructor directly.
"""
path = os.path.abspath(os.path.join(os.getcwd(), name))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(path)
if fsi is None:
return _org_open(relpath, mode, buffering)
elif (("w" in mode) or ("a" in mode) or ("+" in mode)) and readonly:
raise IOError("[Errno 1] Operation not permitted: '{p}'".format(p=path))
else:
try:
return fsi.open(relpath, mode, buffering)
except OperationFailure:
raise os.error("[Errno 2] No such file or directory: '{p}'".format(p=path))
CWD = _org_getcwd() # constant storing the cwd
def getcwd(patch):
"""Return a string representing the current working directory."""
return CWD
def getcwdu(patch):
"""Return a Unicode object representing the current working directory."""
return text_type(CWD)
def chdir(patch, path):
"""Change the current working directory to path."""
global CWD
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
if not os.path.exists(ap):
raise os.error("[Errno 2] No such file or directory: '/{p}/'".format(p=path))
elif not os.path.isdir(ap):
raise os.error("[Errno 20] Not a directory: '{p}'".format(p=path))
else:
CWD = ap
_org_chdir(ap)
# reset paths
for p, fs, readonly in manager.get_mounts():
try:
fs.cd("/")
except:
pass
else:
try:
fsi.cd(relpath)
CWD = ap
except IsFile:
raise os.error("[Errno 20] Not a directory: '{p}'".format(p=path))
except OperationFailure:
raise os.error("[Errno 2] No such file or directory: '/{p}/'".format(p=path))
def ismount(patch, path):
"""
Return True if pathname path is a mount point:
a point in a file system where a different file system has been mounted.
The function checks whether path's parent, path/..,
is on a different device than path,
or whether path/.. and path point to the same i-node on the same device
- this should detect mount points for all Unix and POSIX variants."""
# ^^^ orginal docstring. we can simply ask the manager :)
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_ismount(ap)
else:
return True
def stat(patch, path):
"""
Perform the equivalent of a stat() system call on the given path.
(This function follows symlinks; to stat a symlink use lstat().)
"""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_stat(relpath)
else:
try:
return fsi.stat(relpath)
except OperationFailure:
raise os.error("[Errno 2] No such file or directory: '{p}'".format(p=ap))
def lstat(patch, path):
"""
Perform the equivalent of an lstat() system call on the given path.
Similar to stat(), but does not follow symbolic links.
On platforms that do not support symbolic links, this is an alias for stat().
"""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_lstat(relpath)
else:
# we dont have 'lstat', fallback to stat()
try:
return fsi.stat(relpath)
except OperationFailure:
raise os.error("[Errno 2] No such file or directory: '{p}'".format(p=ap))
def mkdir(patch, path, mode=0o777):
"""
Create a directory named path with numeric mode mode.
The default mode is 0777 (octal). On some systems, mode is ignored.
Where it is used, the current umask value is first masked out.
If the directory already exists, OSError is raised.
"""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_mkdir(relpath, mode)
elif readonly:
raise IOError("[Errno 1] Operation not permitted: '{p}'".format(p=ap))
else:
# FSI.mkdir() doesnt have a 'mode' argument, we need to ignore this
try:
return fsi.mkdir(relpath)
except OperationFailure as e:
raise os.error(e.message)
def remove(patch, path):
"""
Remove (delete) the file path.
If path is a directory, OSError is raised; see rmdir() below to remove
a directory.
This is identical to the unlink() function documented below.
On Windows, attempting to remove a file that is in use causes an
exception to be raised; on Unix, the directory entry is removed but the
storage allocated to the file is not made available until the original
file is no longer in use.
"""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_remove(relpath)
elif readonly:
raise IOError("[Errno 1] Operation not permitted: '{p}'".format(p=ap))
else:
# FSI.remove() works on both files and dirs, we need to check
# this before and raise an Exception if required
if os.path.isdir(relpath):
raise os.error("OSError: [Errno 21] Is a directory: '{p}'".format(p=ap))
try:
return fsi.remove(relpath)
except OperationFailure as e:
raise os.error(e.message)
def rmdir(patch, path):
"""
Remove (delete) the directory path.
Only works when the directory is empty, otherwise, OSError is raised.
In order to remove whole directory trees, shutil.rmtree() can be used.
"""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_rmdir(relpath)
elif readonly:
raise IOError("[Errno 1] Operation not permitted: '{p}'".format(p=ap))
else:
# FSI.remove() works on both files and dirs.
if os.path.isfile(relpath):
raise os.error("[Errno 20] Not a directory: '{p}'".format(p=ap))
try:
return fsi.remove(relpath)
except OperationFailure as e:
raise os.error(e.message)
def access(patch, path, mode):
"""
Use the real uid/gid to test for access to path.
Note that most operations will use the effective uid/gid,
therefore this routine can be used in a suid/sgid environment to test
if the invoking user has the specified access to path.
mode should be F_OK to test the existence of path,
or it can be the inclusive OR of one or more of R_OK, W_OK, and X_OK to
test permissions. Return True if access is allowed, False if not.
See the Unix man page access(2) for more information.
"""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_access(relpath, mode)
else:
try:
s = fsi.stat(relpath)
except OperationFailure:
return False
if mode == os.F_OK:
return True
fa_mode = s.st_mode # & 0777
should_read = mode & os.R_OK
should_write = mode & os.W_OK
should_exec = mode & os.X_OK
acc = True
if should_read:
acc = acc and any((
_stat.S_IRUSR & fa_mode,
_stat.S_IRGRP & fa_mode,
_stat.S_IROTH & fa_mode,
))
if should_write:
acc = acc and any((
_stat.S_IWUSR & fa_mode,
_stat.S_IWGRP & fa_mode,
_stat.S_IWOTH & fa_mode,
))
acc = (acc and (not readonly))
if should_exec:
acc = acc and any((
_stat.S_IXUSR & fa_mode,
_stat.S_IXGRP & fa_mode,
_stat.S_IXOTH & fa_mode,
))
return acc
def chmod(patch, path, mode):
"""Change the mode of path to the numeric mode."""
ap = os.path.abspath(os.path.join(CWD, path))
manager = get_manager()
fsi, relpath, readonly = manager.get_fsi(ap)
if fsi is None:
return _org_chmod(relpath, mode)
elif readonly:
raise IOError("[Errno 1] Operation not permitted: '{p}'".format(p=ap))
else:
# we cant do this
pass
# define patches
class ListdirPatch(base.FunctionPatch):
"""patch for os.listdir()"""
module = "os"
function = "listdir"
replacement = listdir
class Py2OpenPatch(base.FunctionPatch):
"""patch for builtins.open()"""
PY3 = base.SKIP
module = "__builtin__"
function = "open"
replacement = open
class Py3OpenPatch(base.FunctionPatch):
"""patch for builtins.open()"""
PY2 = base.SKIP
module = "builtins"
function = "open"
replacement = open
class SixOpenPatch(base.FunctionPatch):
"""patch for builtins.open()"""
module = "six.moves.builtins"
function = "open"
replacement = open
class Py2GetcwdPatch(base.FunctionPatch):
"""patch for os.getcwd()"""
PY3 = base.SKIP
module = "os"
function = "getcwd"
replacement = getcwd
class Py3GetcwdPatch(base.FunctionPatch):
"""patch for os.getcwd()"""
PY2 = base.SKIP
module = "os"
function = "getcwd"
replacement = getcwdu
class Py2GetcwduPatch(base.FunctionPatch):
"""patch for os.getcwdu()"""
PY3 = base.SKIP
module = "os"
function = "getcwdu"
replacement = getcwdu
class Py3GetcwdbPatch(base.FunctionPatch):
"""patch for os.getcwd()"""
PY2 = base.SKIP
module = "os"
function = "getcwdb"
replacement = getcwd
class ChdirPatch(base.FunctionPatch):
"""patch for os.chdir()."""
module = "os"
function = "chdir"
replacement = chdir
class IsmountPatch(base.FunctionPatch):
"""patch for os.ismount()."""
module = "os.path"
function = "ismount"
replacement = ismount
class StatPatch(base.FunctionPatch):
"""patch for os.stat()"""
module = "os"
function = "stat"
replacement = stat
class LstatPatch(base.FunctionPatch):
"""patch for os.lstat()"""
module = "os"
function = "lstat"
replacement = lstat
class MkdirPatch(base.FunctionPatch):
"""patch for os.mkdir()"""
module = "os"
function = "mkdir"
replacement = mkdir
class RemovePatch(base.FunctionPatch):
"""patch for os.remove()"""
module = "os"
function = "remove"
replacement = remove
class RmdirPatch(base.FunctionPatch):
"""patch for os.rmdir()"""
module = "os"
function = "rmdir"
replacement = rmdir
class AccessPatch(base.FunctionPatch):
"""patch for os.access"""
module = "os"
function = "access"
replacement = access
class ChmodPatch(base.FunctionPatch):
"""patch for os.chmod"""
module = "os"
function = "chmod"
replacement = chmod
# create patch instances
LISTDIR_PATCH = ListdirPatch()
PY2_OPEN_PATCH = Py2OpenPatch()
PY3_OPEN_PATCH = Py3OpenPatch()
SIX_OPEN_PATCH = SixOpenPatch()
PY2_GETCWD_PATCH = Py2GetcwdPatch()
PY3_GETCWD_PATCH = Py3GetcwdPatch()
PY2_GETCWDU_PATCH = Py2GetcwduPatch()
PY3_GETCWDB_PATCH = Py3GetcwdbPatch()
CHDIR_PATCH = ChdirPatch()
ISMOUNT_PATCH = IsmountPatch()
STAT_PATCH = StatPatch()
LSTAT_PATCH = LstatPatch()
MKDIR_PATCH = MkdirPatch()
REMOVE_PATCH = RemovePatch()
RMDIR_PATCH = RmdirPatch()
ACCESS_PATCH = AccessPatch()
CHMOD_PATCH = ChmodPatch()
|
import errno
import collections
import logging
import os.path
import subprocess
import time
from ...common.interfaces import AbstractPlugin, GeneratorPlugin, AggregateResultListener, AbstractInfoWidget, \
StatsReader
from ...common.util import FileScanner
from ..Console import Plugin as ConsolePlugin
from ..Phantom import PhantomReader
_INFO = collections.namedtuple(
"Info",
"address, port, instances, ammo_count, loop_count, duration, steps, stat_log, rps_schedule, ammo_file"
)
_LOGGER = logging.getLogger(__name__)
_PROCESS_KILL_TIMEOUT = 10 # Kill running process after specified number of seconds
_OUTPUT_WAIT_TIMEOUT = 10 # Output files should be found after specified number of seconds
class Plugin(GeneratorPlugin):
"""Simple executor of shooting process with phantom compatible output"""
SECTION = 'shootexec'
def __init__(self, core, cfg, name):
AbstractPlugin.__init__(self, core, cfg, name)
self.stats_reader = None
self.reader = None
self.__process = None
self.__stderr_file = None
self.__processed_ammo_count = 0
self.__start_time = 0
self.opened_file = None
@staticmethod
def get_key():
return __file__
def get_available_options(self):
return ["cmd", "output_path", "stats_path"]
def configure(self):
self.__cmd = self.get_option("cmd")
self.__output_path = self.get_option("output_path")
self.core.add_artifact_file(self.__output_path)
self.__stats_path = self.get_option("stats_path")
if self.__stats_path:
self.core.add_artifact_file(self.__stats_path)
def get_reader(self):
if self.reader is None:
# Touch output_path to clear it
open(self.__output_path, "w").close()
self.opened_file = open(self.__output_path, 'r')
self.add_cleanup(lambda: self.opened_file.close())
self.reader = PhantomReader(self.opened_file)
return self.reader
def get_stats_reader(self):
if self.stats_reader is None:
self.stats_reader = _FileStatsReader(self.__stats_path) if self.__stats_path else _DummyStatsReader()
return self.stats_reader
def prepare_test(self):
stderr_path = self.core.mkstemp(".log", "shootexec_stdout_stderr_")
self.__stderr_file = open(stderr_path, 'w')
_LOGGER.debug("Linking sample reader to aggregator. Reading samples from %s", self.__output_path)
self.__start_time = time.time()
self.core.job.aggregator.add_result_listener(self)
try:
console = self.core.get_plugin_of_type(ConsolePlugin)
except Exception as ex:
_LOGGER.debug("Console not found: %s", ex)
console = None
if console:
widget = _InfoWidget(self)
console.add_info_widget(widget)
self.core.job.aggregator.add_result_listener(widget)
def start_test(self):
_LOGGER.info("Starting shooting process: '%s'", self.__cmd)
self.__process = subprocess.Popen(
self.__cmd,
shell=True,
stderr=self.__stderr_file,
stdout=self.__stderr_file,
close_fds=True
)
# Ensure that all expected output files are ready to use
_LOGGER.info("Waiting until output files are ready")
waitfor = time.time() + _OUTPUT_WAIT_TIMEOUT
while time.time() < waitfor:
output_path_is_ready = os.path.isfile(self.__output_path)
stats_path_is_ready = (not self.__stats_path or os.path.isfile(self.__stats_path))
if output_path_is_ready and stats_path_is_ready:
break
time.sleep(0.1)
else:
raise Exception("Failed to wait until output resources are ready: output={}, stats={}".format(
output_path_is_ready,
stats_path_is_ready
))
_LOGGER.info("Shooting proces is ready to use")
def is_test_finished(self):
retcode = self.__process.poll()
if retcode is not None:
_LOGGER.info("Shooting process done its work with exit code: %s", retcode)
return abs(retcode)
else:
return -1
def end_test(self, retcode):
if self.__process and self.__process.poll() is None:
_LOGGER.warn("Terminating shooting process with PID %s", self.__process.pid)
self.__terminate()
else:
_LOGGER.debug("Seems shooting process finished OK")
return retcode
def post_process(self, retcode):
return retcode
def on_aggregated_data(self, data, stats):
self.__processed_ammo_count += data["overall"]["interval_real"]["len"]
_LOGGER.debug("Processed ammo count: %s", self.__processed_ammo_count)
def get_info(self):
""" returns info object """
return _INFO(
"",
"",
"0",
"0",
"0",
time.time() - self.__start_time,
None,
"",
"",
""
)
def __terminate(self):
"""Gracefull termination of running process"""
if self.__stderr_file:
self.__stderr_file.close()
if not self.__process:
return
waitfor = time.time() + _PROCESS_KILL_TIMEOUT
while time.time() < waitfor:
try:
self.__process.terminate()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to terminate process '{}': {}".format(self.__cmd, e))
return
time.sleep(0.1)
try:
self.__process.kill()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to kill process '{}': {}".format(self.__cmd, e))
return
class _InfoWidget(AbstractInfoWidget, AggregateResultListener):
"""
Widget with information about current run state
"""
def get_index(self):
return 2
def __init__(self, sender):
AbstractInfoWidget.__init__(self)
self.owner = sender
def render(self, screen):
return ""
def on_aggregated_data(self, data, stats):
pass
class _FileStatsReader(FileScanner, StatsReader):
"""
Read shooting stats line by line
Line format is 'timestamp\trps\tinstances'
"""
def __init__(self, *args, **kwargs):
super(_FileStatsReader, self).__init__(*args, **kwargs)
self.__last_ts = 0
def _read_data(self, lines):
"""
Parse lines and return stats
"""
results = []
for line in lines:
timestamp, rps, instances = line.split("\t")
curr_ts = int(float(timestamp)) # We allow floats here, but tank expects only seconds
if self.__last_ts < curr_ts:
self.__last_ts = curr_ts
results.append(self.stats_item(self.__last_ts, float(rps), float(instances)))
return results
class _DummyStatsReader(StatsReader):
"""
Dummy stats reader for shooters without stats file
"""
def __init__(self):
self.__closed = False
self.__last_ts = 0
def __iter__(self):
while not self.__closed:
cur_ts = int(time.time())
if cur_ts > self.__last_ts:
yield [self.stats_item(cur_ts, 0, 0)]
self.__last_ts = cur_ts
else:
yield []
def close(self):
self.__closed = True
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy import dot
from filterpy.common import pretty_str
class FadingMemoryFilter(object):
""" Creates a fading memory filter of order 0, 1, or 2.
The KalmanFilter class also implements a more general fading memory
filter and should be preferred in most cases. This is probably faster
for low order systems.
This algorithm is based on the fading filter algorithm developed in
Zarcan's "Fundamentals of Kalman Filtering" [1].
Parameters
----------
x0 : 1D np.array or scalar
Initial value for the filter state. Each value can be a scalar
or a np.array.
You can use a scalar for x0. If order > 0, then 0.0 is assumed
for the higher order terms.
x[0] is the value being tracked
x[1] is the first derivative (for order 1 and 2 filters)
x[2] is the second derivative (for order 2 filters)
dt : scalar
timestep
order : int
order of the filter. Defines the order of the system
0 - assumes system of form x = a_0 + a_1*t
1 - assumes system of form x = a_0 +a_1*t + a_2*t^2
2 - assumes system of form x = a_0 +a_1*t + a_2*t^2 + a_3*t^3
beta : float
filter gain parameter.
Attributes
----------
x : np.array
State of the filter.
x[0] is the value being tracked
x[1] is the derivative of x[0] (order 1 and 2 only)
x[2] is the 2nd derivative of x[0] (order 2 only)
This is always an np.array, even for order 0 where you can
initialize x0 with a scalar.
P : np.array
The diagonal of the covariance matrix. Assumes that variance
is one; multiply by sigma^2 to get the actual variances.
This is a constant and will not vary as the filter runs.
e : np.array
The truncation error of the filter. Each term must be multiplied
by the a_1, a_2, or a_3 of the polynomial for the system.
For example, if the filter is order 2, then multiply all terms
of self.e by a_3 to get the actual error. Multipy by a_2 for order
1, and a_1 for order 0.
References
----------
Paul Zarchan and Howard Musoff. "Fundamentals of Kalman Filtering:
A Practical Approach" American Institute of Aeronautics and Astronautics,
Inc. Fourth Edition. p. 521-536. (2015)
"""
def __init__(self, x0, dt, order, beta):
if order < 0 or order > 2:
raise ValueError('order must be between 0 and 2')
if np.isscalar(x0):
self.x = np.zeros(order+1)
self.x[0] = x0
else:
self.x = np.copy(x0)
self.dt = dt
self.order = order
self.beta = beta
if order == 0:
self.P = np.array([(1-beta)/(1+beta)], dtype=float)
self.e = np.array([dt * beta / (1-beta)], dtype=float)
elif order == 1:
p11 = (1-beta) * (1+4*beta+5*beta**2) / (1+beta)**3
p22 = 2*(1-beta)**3 / (1+beta)**3
self.P = np.array([p11, p22], dtype=float)
e = 2*dt*2 * (beta / (1-beta))**2
de = dt*((1+3*beta)/(1-beta))
self.e = np.array([e, de], dtype=float)
else:
p11 = (1-beta)*((1+6*beta + 16*beta**2 + 24*beta**3 + 19*beta**4) /
(1+beta)**5)
p22 = (1-beta)**3 * ((13+50*beta + 49*beta**2) /
(2*(1+beta)**5 * dt**2))
p33 = 6*(1-beta)**5 / ((1+beta)**5 * dt**4)
self.P = np.array([p11, p22, p33], dtype=float)
e = 6*dt**3*(beta/(1-beta))**3
de = dt**2 * (2 + 5*beta + 11*beta**2) / (1-beta)**2
dde = 6*dt*(1+2*beta) / (1-beta)
self.e = np.array([e, de, dde], dtype=float)
def __repr__(self):
return '\n'.join([
'FadingMemoryFilter object',
pretty_str('dt', self.dt),
pretty_str('order', self.order),
pretty_str('beta', self.beta),
pretty_str('x', self.x),
pretty_str('P', self.P),
pretty_str('e', self.e),
])
def update(self, z):
""" update the filter with measurement z. z must be the same type
(or treatable as the same type) as self.x[0].
"""
if self.order == 0:
G = 1 - self.beta
self.x = self.x + dot(G, (z - self.x))
elif self.order == 1:
G = 1 - self.beta**2
H = (1-self.beta)**2
x = self.x[0]
dx = self.x[1]
dxdt = dot(dx, self.dt)
residual = z - (x+dxdt)
self.x[0] = x + dxdt + G*residual
self.x[1] = dx + (H / self.dt)*residual
else: # order == 2
G = 1-self.beta**3
H = 1.5*(1+self.beta)*(1-self.beta)**2
K = 0.5*(1-self.beta)**3
x = self.x[0]
dx = self.x[1]
ddx = self.x[2]
dxdt = dot(dx, self.dt)
T2 = self.dt**2.
residual = z - (x + dxdt + 0.5*ddx*T2)
self.x[0] = x + dxdt + 0.5*ddx*T2 + G*residual
self.x[1] = dx + ddx*self.dt + (H/self.dt)*residual
self.x[2] = ddx + (2*K/(self.dt**2))*residual
|
from threading import Event
import os
import pandas as pd
from yandextank.common.util import get_test_path
from yandextank.common.util import FileMultiReader
from yandextank.plugins.Phantom.reader import PhantomReader, PhantomStatsReader, string_to_df_microsec
from functools import reduce
class TestPhantomReader(object):
def setup_class(self):
stop = Event()
self.multireader = FileMultiReader(os.path.join(get_test_path(), 'yandextank/plugins/Phantom/tests/phout.dat'), stop)
stop.set()
def teardown_class(self):
self.multireader.close()
def test_read_all(self):
reader = PhantomReader(
self.multireader.get_file(), cache_size=1024)
df = pd.DataFrame()
for chunk in reader:
df = df.append(chunk)
assert (len(df) == 200)
assert (df['interval_real'].mean() == 11000714.0)
def test_reader_closed(self):
reader = PhantomReader(self.multireader.get_file(), cache_size=64)
frames = [i for i in reader]
result = pd.concat(frames)
assert len(result) == 200
assert (result['interval_real'].mean() == 11000714.0)
def test_reader_us(self):
with open(os.path.join(get_test_path(), 'yandextank/plugins/Phantom/tests/phout.dat')) as f:
chunk = f.read()
result = string_to_df_microsec(chunk)
expected = pd.read_pickle(os.path.join(get_test_path(), 'yandextank/plugins/Phantom/tests/expected_df.dat'))
result['ts'] -= result['ts'][0]
assert result.equals(expected)
class MockInfo(object):
def __init__(self, steps):
self.steps = steps
class TestStatsReader(object):
def test_closed(self):
STEPS = [[1.0, 1], [1.0, 1], [1.0, 1], [2.0, 1], [2.0, 1], [2.0, 1], [2.0, 1], [2.0, 1], [3.0, 1], [3.0, 1],
[3.0, 1], [3.0, 1], [3.0, 1], [4.0, 1], [4.0, 1], [4.0, 1], [4.0, 1], [4.0, 1], [5.0, 1], [5.0, 1],
[5.0, 1]]
reader = PhantomStatsReader(os.path.join(get_test_path(), 'yandextank/plugins/Phantom/tests/phantom_stat.dat'),
MockInfo(STEPS), cache_size=1024 * 10)
reader.close()
stats = reduce(lambda l1, l2: l1 + l2, [i for i in reader])
assert len(stats) == 19
|
import logging
import queue
import json
from ..Telegraf.decoder import decoder
logger = logging.getLogger(__name__)
class MonitoringReader(object):
def __init__(self, source):
self.buffer = []
self.source = source
self.finished = False
self.prev_check = None
def __iter__(self):
while not self.finished:
try:
yield self._decode_agents_data(self.source.get_nowait())
except queue.Empty:
return
def _decode_agents_data(self, block):
"""
decode agents jsons, count diffs
"""
collect = []
if block:
for chunk in block.split('\n'):
try:
if chunk:
prepared_results = {}
jsn = json.loads(chunk)
for ts, values in jsn.items():
for key, value in values.items():
# key sample: diskio-sda1_io_time
# key_group sample: diskio
# key_name sample: io_time
try:
key_group, key_name = key.split('_')[0].split('-')[0], '_'.join(key.split('_')[1:])
except: # noqa: E722
key_group, key_name = key.split('_')[0], '_'.join(key.split('_')[1:])
if key_group in decoder.diff_metrics:
if key_name in decoder.diff_metrics[key_group]:
decoded_key = decoder.find_common_names(
key)
if self.prev_check:
try:
value = jsn[ts][key] - \
self.prev_check[key]
except KeyError:
logger.debug(
'There is no diff value for metric %s.\n'
'Timestamp: %s. Is it initial data?', key, ts, exc_info=True)
value = 0
prepared_results[decoded_key] = value
else:
decoded_key = decoder.find_common_names(
key)
prepared_results[decoded_key] = value
else:
decoded_key = decoder.find_common_names(
key)
prepared_results[decoded_key] = value
self.prev_check = jsn[ts]
collect.append((ts, prepared_results))
except ValueError:
logger.error(
'Telegraf agent send trash to output: %s', chunk)
logger.debug(
'Telegraf agent data block w/ trash: %s',
exc_info=True)
return []
except BaseException:
logger.error(
'Exception trying to parse agent data: %s',
chunk,
exc_info=True)
return []
if collect:
return collect
|
import os
import sys
def newexit():
os._exit(1)
def setup():
# We want to monkey patch sys.exit so that we can get some
# information about where exit is being called.
newexit._old = sys.exit
sys.exit = newexit
def teardown():
try:
sys.exit = sys.exit._old
except AttributeError:
sys.exit = sys._exit
|
import gc
import weakref
import asyncio
from pscript import this_is_js
from flexx import app, event
from flexx.util.testing import run_tests_if_main, raises, skip
from flexx.app.live_tester import run_live, roundtrip, launch
from flexx.event import loop
def setup_module():
app.manager._clear_old_pending_sessions(1)
class PyComponentA(app.PyComponent):
foo = event.IntProp(settable=True)
sub = event.ComponentProp(settable=True)
@event.action
def greet(self, msg):
print('hi', msg)
@event.emitter
def bar_event(self, v):
return dict(value=v)
@event.reaction
def _on_foo(self):
if self.sub is not None:
print('sub foo changed', self.sub.foo)
@event.reaction('bar_event')
def _on_bar(self, *events):
print('got bar event', [ev.value for ev in events])
class JsComponentA(app.JsComponent):
foo = event.IntProp(settable=True)
sub = event.ComponentProp(settable=True)
@event.action
def greet(self, msg):
print('hi', msg)
@event.emitter
def bar_event(self, v):
return dict(value=v)
@event.reaction
def _on_foo(self):
if self.sub is not None:
print('sub foo changed', self.sub.foo)
@event.reaction('bar_event')
def _on_bar(self, *events):
for ev in events:
print('got bar event', ev.value)
# Hard to guarantee that events from Py get handled in same iter
#print('got bar event', [ev.value for ev in events])
class PyComponentC(PyComponentA):
def init(self, foo):
print('init')
self.set_foo(foo)
class JsComponentC(JsComponentA):
def init(self, foo):
print('init')
self.set_foo(foo)
## PyComponent basics
@run_live
async def test_pycomponent_action1():
"""
hi foo
hi bar
hi spam
----------
"""
c, s = launch(PyComponentA)
c.greet('foo')
c.greet('bar')
s.send_command('INVOKE', c.id, 'greet', ["spam"])
await roundtrip(s)
@run_live
async def test_pycomponent_action_chained():
"""
hi foo
hi bar
hi xx
----------
"""
c, s = launch(PyComponentA)
c.greet('foo').greet('bar').greet('xx')
await roundtrip(s)
@run_live
async def test_pycomponent_action2():
"""
hi foo
hi bar
hi spam
----------
"""
c1, s = launch(PyComponentA)
with c1:
c = PyComponentA()
assert c.session is s
c.greet('foo')
c.greet('bar')
s.send_command('INVOKE', c.id, 'greet', ["spam"])
await roundtrip(s)
@run_live
async def test_pycomponent_prop1():
"""
0
3
3
----------
0
3
"""
c, s = launch(PyComponentA)
c.set_foo(3)
print(c.foo)
s.send_command('EVAL', c.id, 'foo')
loop.iter()
print(c.foo) # this will mutate foo
await roundtrip(s)
print(c.foo)
s.send_command('EVAL', c.id, 'foo')
await roundtrip(s)
@run_live
async def test_pycomponent_reaction1():
"""
0
sub foo changed 0
0
sub foo changed 3
3
----------
"""
c1, s = launch(PyComponentA)
with c1:
c2 = PyComponentA() # PyComponent sub
c1.set_sub(c2)
print(c2.foo)
loop.iter()
c2.set_foo(3)
print(c2.foo)
loop.iter()
print(c2.foo)
await roundtrip(s)
@run_live
async def test_pycomponent_reaction2():
"""
0
sub foo changed 0
0
sub foo changed 3
3
----------
"""
c1, s = launch(PyComponentA)
with c1:
c2 = JsComponentA() # JsComponent sub
c1.set_sub(c2)
print(c2.foo)
await roundtrip(s)
c2.set_foo(3)
print(c2.foo)
await roundtrip(s)
print(c2.foo)
await roundtrip(s)
@run_live
async def test_pycomponent_emitter1():
"""
got bar event [6, 7]
got bar event [8, 9]
----------
? Cannot use emitter
? Cannot use emitter
? Cannot use emitter
? Cannot use emitter
"""
c, s = launch(PyComponentA)
c.bar_event(6)
c.bar_event(7)
await roundtrip(s)
c.bar_event(8)
c.bar_event(9)
await roundtrip(s)
s.send_command('INVOKE', c.id, 'bar_event', [16])
s.send_command('INVOKE', c.id, 'bar_event', [17])
await roundtrip(s)
s.send_command('INVOKE', c.id, 'bar_event', [18])
s.send_command('INVOKE', c.id, 'bar_event', [19])
await roundtrip(s)
@run_live
async def test_pycomponent_init1():
"""
init
init
10
20
20
----------
"""
c1, s = launch(app.PyComponent)
with c1:
c2 = PyComponentA(foo=10)
c3 = PyComponentC(20)
c4 = PyComponentC(20, foo=10) # What happens in init takes preference
await roundtrip(s)
print(c2.foo)
print(c3.foo)
print(c4.foo)
## JsComponent basics
@run_live
async def test_jscomponent_action1():
"""
----------
hi foo
hi bar
hi spam
"""
c, s = launch(JsComponentA)
c.greet('foo')
c.greet('bar')
s.send_command('INVOKE', c.id, 'greet', ["spam"])
await roundtrip(s)
await roundtrip(s)
@run_live
async def test_jscomponent_action2():
"""
----------
hi foo
hi bar
hi spam
"""
c1, s = launch(JsComponentA)
with c1:
c = JsComponentA()
assert c.session is s
c.greet('foo')
c.greet('bar')
s.send_command('INVOKE', c.id, 'greet', ["spam"])
await roundtrip(s)
await roundtrip(s)
@run_live
async def test_jscomponent_prop1():
"""
0
0
3
----------
0
3
"""
c, s = launch(JsComponentA)
# Note: set_foo() immediately sends an INVOKE command. If the
# subsequent (now commented) EVAL command is not handled in the same
# event loop iter, the value will already have been updated.
s.send_command('EVAL', c.id, 'foo')
c.set_foo(3)
print(c.foo)
# s.send_command('EVAL', c.id, 'foo')
loop.iter()
print(c.foo) # still not set
await roundtrip(s)
print(c.foo)
s.send_command('EVAL', c.id, 'foo')
await roundtrip(s)
@run_live
async def test_jscomponent_reaction1():
"""
0
0
3
----------
sub foo changed 0
sub foo changed 3
"""
c1, s = launch(JsComponentA)
with c1:
c2 = PyComponentA() # PyComponent sub
c1.set_sub(c2)
print(c2.foo)
await roundtrip(s)
c2.set_foo(3)
print(c2.foo)
await roundtrip(s)
print(c2.foo)
await roundtrip(s)
@run_live
async def test_jscomponent_reaction2():
"""
0
0
3
----------
sub foo changed 0
sub foo changed 3
"""
c1, s = launch(JsComponentA)
with c1:
c2 = JsComponentA() # JsComponent sub
c1.set_sub(c2)
print(c2.foo)
await roundtrip(s)
c2.set_foo(3)
print(c2.foo)
await roundtrip(s)
print(c2.foo)
await roundtrip(s)
@run_live
async def test_jscomponent_emitter1():
"""
? Cannot use emitter
? Cannot use emitter
? Cannot use emitter
? Cannot use emitter
----------
got bar event 16
got bar event 17
got bar event 18
got bar event 19
"""
c, s = launch(JsComponentA)
c.bar_event(6)
c.bar_event(7)
await roundtrip(s)
c.bar_event(8)
c.bar_event(9)
await roundtrip(s)
s.send_command('INVOKE', c.id, 'bar_event', [16])
s.send_command('INVOKE', c.id, 'bar_event', [17])
await roundtrip(s)
s.send_command('INVOKE', c.id, 'bar_event', [18])
s.send_command('INVOKE', c.id, 'bar_event', [19])
await roundtrip(s)
@run_live
async def test_jscomponent_init1():
"""
0
0
0
10
20
20
----------
init
init
"""
# This test is important. We have plenty of tests that ensure that the init
# args and kwargs work in both Python and JS variants of Component, but
# instantiating a JsComponent in Python will have to communicate these!
c1, s = launch(app.PyComponent)
with c1:
c2 = JsComponentA(foo=10)
c3 = JsComponentC(20)
c4 = JsComponentC(20, foo=10) # What happens in init takes preference
# Data is not yet synced
print(c2.foo)
print(c3.foo)
print(c4.foo)
await roundtrip(s)
print(c2.foo)
print(c3.foo)
print(c4.foo)
## With sub components
class CreatingPyComponent(PyComponentA):
def init(self):
self._x = JsComponentA(foo=7)
@event.action
def apply_sub(self):
self.set_sub(self._x)
class CreatingJsComponent(JsComponentA):
def init(self):
self._x = JsComponentA(foo=7)
@event.action
def apply_sub(self):
self.set_sub(self._x)
@run_live
async def test_proxy_binding1():
"""
sub foo changed 7
7
sub foo changed 7
7
----------
"""
# Get ref to JsComponent instantiated by a PyComponent
c1, s = launch(app.PyComponent)
with c1:
c2 = CreatingPyComponent() # PyComponent that has local JsComponent
await roundtrip(s)
assert c2.sub is None
# Get access to the sub component
c2.apply_sub()
await roundtrip(s)
c3 = c2.sub
assert isinstance(c3, JsComponentA)
print(c3.foo)
# Get id of c3 and get rid of any references
c3_id = c3.id
c3_ref = weakref.ref(c3)
c2.set_sub(None)
for i in range(5):
await roundtrip(s)
del c3
for i in range(5):
await roundtrip(s)
assert c3_ref() is not None # because PyComponent has it
# Get access to the sub component again (proxy thereof, really)
c2.apply_sub()
await roundtrip(s)
c3 = c2.sub
assert isinstance(c3, JsComponentA)
assert c3.id == c3_id
print(c3.foo)
@run_live
async def test_proxy_binding2():
"""
7
7
----------
sub foo changed 7
sub foo changed 7
"""
# Get ref to JsComponent instantiated by a JsComponent,
# drop that ref, re-get the proxy instance, and verify that its
# a different instance representing the same object in JS
c1, s = launch(app.PyComponent)
with c1:
c2 = CreatingJsComponent() # JsComponent that has local JsComponent
await roundtrip(s)
assert c2.sub is None
# Get access to the sub component
c2.apply_sub()
await roundtrip(s)
await roundtrip(s)
c3 = c2.sub
assert isinstance(c3, JsComponentA)
print(c3.foo)
# Get id of c3 and get rid of any references
id3 = id(c3)
c3_ref = weakref.ref(c3)
c3_id = c3.id
c2.set_sub(None)
for i in range(5): # need a few roundtrips for session to drop c3
await roundtrip(s)
del c3
for i in range(5):
await roundtrip(s)
gc.collect()
assert c3_ref() is None # Python dropped it, but JS still has the object!
# Get access to the sub component again (proxy thereof, really)
c2.apply_sub()
await roundtrip(s)
c3 = c2.sub
assert isinstance(c3, JsComponentA)
assert c3.id == c3_id
print(c3.foo)
@run_live
async def test_proxy_binding3():
"""
sub foo changed 0
sub foo changed 3
sub foo changed 6
sub foo changed 7
? Using stub component
? session does not know it
----------
"""
# Test that local components only send events when there is a proxy,
# and that when events are send anyway, warnings are shown
c1, s = launch(PyComponentA)
with c1:
c2 = JsComponentA() # JsComponent that has local JsComponent
c1.set_sub(c2)
id2 = c2.id
# Change foo of c2
c2.set_foo(3)
await roundtrip(s)
# Now, we're pretend that to drop the instance
s.send_command('INVOKE', c2.id, '_flx_set_has_proxy', [False])
await roundtrip(s)
# We don't get the events anymore
c2.set_foo(4)
c2.set_foo(5)
await roundtrip(s)
# Re-establish
s.send_command('INVOKE', c2.id, '_flx_set_has_proxy', [True])
await roundtrip(s)
# We get these
c2.set_foo(6)
s.send_command('INVOKE', id2, 'set_foo', [7]) # same thing, really
await roundtrip(s)
# Now, we simulate destroying the proxy without JS knowing
s._component_instances.pop(id2)
# And then ... invoking an event will raise one error for not being able
# to invoke in Python, and one for not being able to decode the "source"
# of the event.
s.send_command('INVOKE', id2, 'set_foo', [9])
await roundtrip(s)
## Multi-session
class JsComponentB(app.JsComponent):
sub1 = event.ComponentProp(settable=True)
sub2 = event.ComponentProp(settable=True)
@event.action
def sub1_to_sub2(self):
self.set_sub2(self.sub1)
@run_live
async def test_proxy_binding21():
"""
14 None
24 None
24 24
----------
14
? JsComponentA
undefined
? JsComponentA
undefined
"""
# Test multiple sessions, and sharing objects
c1, s1 = launch(JsComponentB)
c2, s2 = launch(JsComponentB)
with c1:
c11 = JsComponentA() # JsComponent that has local JsComponent
c1.set_sub1(c11)
with c2:
c22 = JsComponentA() # JsComponent that has local JsComponent
c2.set_sub1(c22)
await roundtrip(s1, s2)
c11.set_foo(14)
c22.set_foo(24)
await roundtrip(s1, s2)
print(c1.sub1 and c1.sub1.foo, c1.sub2 and c1.sub2.foo)
s1.send_command('EVAL', c1.id, 'sub1.foo')
await roundtrip(s1, s2)
# So far, not much news, now break the universe ...
c1.set_sub1(c2.sub1)
await roundtrip(s1, s2)
print(c1.sub1 and c1.sub1.foo, c1.sub2 and c1.sub2.foo)
# In JS, c1.sub1 will be a stub
s1.send_command('EVAL', c1.id, 'sub1.id')
s1.send_command('EVAL', c1.id, 'sub1.foo')
await roundtrip(s1, s2)
# But we can still "handle" it
c1.sub1_to_sub2()
await roundtrip(s1, s2)
# And now c1.sub2.foo has the value of c2.sub1.foo
print(c1.sub1 and c1.sub1.foo, c1.sub2 and c1.sub2.foo)
s1.send_command('EVAL', c1.id, 'sub1.id')
s1.send_command('EVAL', c1.id, 'sub1.foo')
await roundtrip(s1, s2)
@run_live
async def test_sharing_state_between_sessions():
"""
7
7
42
42
----------
7
7
42
42
"""
# Test sharing state between multiple sessions
class SharedComponent(event.Component):
foo = event.IntProp(0, settable=True)
shared = SharedComponent()
# This lambda thingy at a PyComponent is the magic to share state
# Note that this needs to be setup for each property. It would be nice
# to really share a component (proxy), but this would mean that a
# PyComponent could have multiple sessions, which would complicate things
# too much to be worthwhile.
c1 = app.App(PyComponentA, foo=lambda:shared.foo).launch()
c2 = app.App(PyComponentA, foo=lambda:shared.foo).launch()
s1, s2 = c1.session, c2.session
with c1:
c11 = JsComponentA()
with c2:
c22 = JsComponentA()
await roundtrip(s1, s2)
shared.set_foo(7)
await roundtrip(s1, s2)
print(c1.foo)
s1.send_command('EVAL', c1.id, 'foo')
await roundtrip(s1, s2)
print(c2.foo)
s2.send_command('EVAL', c2.id, 'foo')
shared.set_foo(42)
await roundtrip(s1, s2)
print(c1.foo)
s1.send_command('EVAL', c1.id, 'foo')
await roundtrip(s1, s2)
print(c2.foo)
s2.send_command('EVAL', c2.id, 'foo')
await roundtrip(s1, s2)
class CreatingJsComponent2(app.JsComponent):
sub = event.ComponentProp(settable=True)
@event.action
def create_sub(self):
with self:
c = CreatingJsComponent2()
self.set_sub(c)
@run_live
async def test_component_id_uniqueness():
"""
JsComponentB_1
CreatingJsComponent2_2
CreatingJsComponent2_2js
JsComponentB_1
CreatingJsComponent2_2
CreatingJsComponent2_2js
3
6
3
----------
JsComponentB_1
CreatingJsComponent2_2
CreatingJsComponent2_2js
JsComponentB_1
CreatingJsComponent2_2
CreatingJsComponent2_2js
"""
# Test uniqueness of component id's
c1, s1 = launch(JsComponentB)
c2, s2 = launch(JsComponentB)
with c1:
c11 = CreatingJsComponent2() # JsComponent that has local JsComponent
c11.create_sub()
c11.create_sub()
with c2:
c22 = CreatingJsComponent2() # JsComponent that has local JsComponent
c22.create_sub()
c22.create_sub()
await roundtrip(s1, s2)
cc = [c1, c11, c11.sub, c2, c22, c22.sub]
for c in cc:
print(c.id)
c.session.send_command('EVAL', c.id, 'id')
await roundtrip(s1, s2)
# That was not very unique though
s = set()
for c in cc:
s.add(c.id)
print(len(s))
# But this is
s = set()
for c in cc:
s.add(c.uid)
print(len(s))
# And this should be too
s = set()
for c in [c1, c11, c11.sub]:
s.add(c.id.split('_')[-1])
print(len(s))
##
run_tests_if_main()
|
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
CONF_RELAY_ADDR,
CONF_RELAY_CHAN,
CONF_ZONE_LOOP,
CONF_ZONE_NAME,
CONF_ZONE_NUMBER,
CONF_ZONE_RFID,
CONF_ZONE_TYPE,
DEFAULT_ZONE_OPTIONS,
OPTIONS_ZONES,
SIGNAL_REL_MESSAGE,
SIGNAL_RFX_MESSAGE,
SIGNAL_ZONE_FAULT,
SIGNAL_ZONE_RESTORE,
)
_LOGGER = logging.getLogger(__name__)
ATTR_RF_BIT0 = "rf_bit0"
ATTR_RF_LOW_BAT = "rf_low_battery"
ATTR_RF_SUPERVISED = "rf_supervised"
ATTR_RF_BIT3 = "rf_bit3"
ATTR_RF_LOOP3 = "rf_loop3"
ATTR_RF_LOOP2 = "rf_loop2"
ATTR_RF_LOOP4 = "rf_loop4"
ATTR_RF_LOOP1 = "rf_loop1"
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
):
"""Set up for AlarmDecoder sensor."""
zones = entry.options.get(OPTIONS_ZONES, DEFAULT_ZONE_OPTIONS)
entities = []
for zone_num in zones:
zone_info = zones[zone_num]
zone_type = zone_info[CONF_ZONE_TYPE]
zone_name = zone_info[CONF_ZONE_NAME]
zone_rfid = zone_info.get(CONF_ZONE_RFID)
zone_loop = zone_info.get(CONF_ZONE_LOOP)
relay_addr = zone_info.get(CONF_RELAY_ADDR)
relay_chan = zone_info.get(CONF_RELAY_CHAN)
entity = AlarmDecoderBinarySensor(
zone_num, zone_name, zone_type, zone_rfid, zone_loop, relay_addr, relay_chan
)
entities.append(entity)
async_add_entities(entities)
class AlarmDecoderBinarySensor(BinarySensorEntity):
"""Representation of an AlarmDecoder binary sensor."""
def __init__(
self,
zone_number,
zone_name,
zone_type,
zone_rfid,
zone_loop,
relay_addr,
relay_chan,
):
"""Initialize the binary_sensor."""
self._zone_number = int(zone_number)
self._zone_type = zone_type
self._state = None
self._name = zone_name
self._rfid = zone_rfid
self._loop = zone_loop
self._rfstate = None
self._relay_addr = relay_addr
self._relay_chan = relay_chan
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_ZONE_FAULT, self._fault_callback
)
)
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_ZONE_RESTORE, self._restore_callback
)
)
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_RFX_MESSAGE, self._rfx_message_callback
)
)
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_REL_MESSAGE, self._rel_message_callback
)
)
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes."""
attr = {CONF_ZONE_NUMBER: self._zone_number}
if self._rfid and self._rfstate is not None:
attr[ATTR_RF_BIT0] = bool(self._rfstate & 0x01)
attr[ATTR_RF_LOW_BAT] = bool(self._rfstate & 0x02)
attr[ATTR_RF_SUPERVISED] = bool(self._rfstate & 0x04)
attr[ATTR_RF_BIT3] = bool(self._rfstate & 0x08)
attr[ATTR_RF_LOOP3] = bool(self._rfstate & 0x10)
attr[ATTR_RF_LOOP2] = bool(self._rfstate & 0x20)
attr[ATTR_RF_LOOP4] = bool(self._rfstate & 0x40)
attr[ATTR_RF_LOOP1] = bool(self._rfstate & 0x80)
return attr
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state == 1
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
def _fault_callback(self, zone):
"""Update the zone's state, if needed."""
if zone is None or int(zone) == self._zone_number:
self._state = 1
self.schedule_update_ha_state()
def _restore_callback(self, zone):
"""Update the zone's state, if needed."""
if zone is None or (int(zone) == self._zone_number and not self._loop):
self._state = 0
self.schedule_update_ha_state()
def _rfx_message_callback(self, message):
"""Update RF state."""
if self._rfid and message and message.serial_number == self._rfid:
self._rfstate = message.value
if self._loop:
self._state = 1 if message.loop[self._loop - 1] else 0
self.schedule_update_ha_state()
def _rel_message_callback(self, message):
"""Update relay / expander state."""
if self._relay_addr == message.address and self._relay_chan == message.channel:
_LOGGER.debug(
"%s %d:%d value:%d",
"Relay" if message.type == message.RELAY else "ZoneExpander",
message.address,
message.channel,
message.value,
)
self._state = message.value
self.schedule_update_ha_state()
|
revision = '434c29e40511'
down_revision = '8323a5ea723a'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('certificates', sa.Column('key_type', sa.String(length=128), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('certificates', 'key_type')
# ### end Alembic commands ###
|
import typing
import pandas as pd
import collections.abc
from matchzoo.engine.param import Param
from matchzoo.engine import hyper_spaces
class ParamTable(object):
"""
Parameter table class.
Example:
>>> params = ParamTable()
>>> params.add(Param('ham', 'Parma Ham'))
>>> params.add(Param('egg', 'Over Easy'))
>>> params['ham']
'Parma Ham'
>>> params['egg']
'Over Easy'
>>> print(params)
ham Parma Ham
egg Over Easy
>>> params.add(Param('egg', 'Sunny side Up'))
Traceback (most recent call last):
...
ValueError: Parameter named egg already exists.
To re-assign parameter egg value, use `params["egg"] = value` instead.
"""
def __init__(self):
"""Parameter table constrctor."""
self._params = {}
def add(self, param: Param):
""":param param: parameter to add."""
if not isinstance(param, Param):
raise TypeError("Only accepts a Param instance.")
if param.name in self._params:
msg = f"Parameter named {param.name} already exists.\n" \
f"To re-assign parameter {param.name} value, " \
f"use `params[\"{param.name}\"] = value` instead."
raise ValueError(msg)
self._params[param.name] = param
def get(self, key) -> Param:
""":return: The parameter in the table named `key`."""
return self._params[key]
def set(self, key, param: Param):
"""Set `key` to parameter `param`."""
if not isinstance(param, Param):
raise ValueError("Only accepts a Param instance.")
self._params[key] = param
@property
def hyper_space(self) -> dict:
""":return: Hyper space of the table, a valid `hyperopt` graph."""
full_space = {}
for param in self:
if param.hyper_space is not None:
param_space = param.hyper_space
if isinstance(param_space, hyper_spaces.HyperoptProxy):
param_space = param_space.convert(param.name)
full_space[param.name] = param_space
return full_space
def to_frame(self) -> pd.DataFrame:
"""
Convert the parameter table into a pandas data frame.
:return: A `pandas.DataFrame`.
Example:
>>> import matchzoo as mz
>>> table = mz.ParamTable()
>>> table.add(mz.Param(name='x', value=10, desc='my x'))
>>> table.add(mz.Param(name='y', value=20, desc='my y'))
>>> table.to_frame()
Name Description Value Hyper-Space
0 x my x 10 None
1 y my y 20 None
"""
df = pd.DataFrame(data={
'Name': [p.name for p in self],
'Description': [p.desc for p in self],
'Value': [p.value for p in self],
'Hyper-Space': [p.hyper_space for p in self]
}, columns=['Name', 'Description', 'Value', 'Hyper-Space'])
return df
def __getitem__(self, key: str) -> typing.Any:
""":return: The value of the parameter in the table named `key`."""
return self._params[key].value
def __setitem__(self, key: str, value: typing.Any):
"""
Set the value of the parameter named `key`.
:param key: Name of the parameter.
:param value: New value of the parameter to set.
"""
self._params[key].value = value
def __str__(self):
""":return: Pretty formatted parameter table."""
return '\n'.join(param.name.ljust(30) + str(param.value)
for param in self._params.values())
def __iter__(self) -> typing.Iterator:
""":return: A iterator that iterates over all parameter instances."""
yield from self._params.values()
def completed(self) -> bool:
"""
:return: `True` if all params are filled, `False` otherwise.
Example:
>>> import matchzoo
>>> model = matchzoo.models.Naive()
>>> model.params.completed()
False
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.params.completed()
True
"""
return all(param for param in self)
def keys(self) -> collections.abc.KeysView:
""":return: Parameter table keys."""
return self._params.keys()
def __contains__(self, item):
""":return: `True` if parameter in parameters."""
return item in self._params
def update(self, other: dict):
"""
Update `self`.
Update `self` with the key/value pairs from other, overwriting
existing keys. Notice that this does not add new keys to `self`.
This method is usually used by models to obtain useful information
from a preprocessor's context.
:param other: The dictionary used update.
Example:
>>> import matchzoo as mz
>>> model = mz.models.DenseBaseline()
>>> model.params['input_shapes'] is None
True
>>> prpr = model.get_default_preprocessor()
>>> _ = prpr.fit(mz.datasets.toy.load_data(), verbose=0)
>>> model.params.update(prpr.context)
>>> model.params['input_shapes']
[(30,), (30,)]
"""
for key in other:
if key in self:
self[key] = other[key]
|
import pytest
import voluptuous as vol
from homeassistant.auth.permissions.entities import (
ENTITY_POLICY_SCHEMA,
compile_entities,
)
from homeassistant.auth.permissions.models import PermissionLookup
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.entity_registry import RegistryEntry
from tests.common import mock_device_registry, mock_registry
def test_entities_none():
"""Test entity ID policy."""
policy = None
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is False
def test_entities_empty():
"""Test entity ID policy."""
policy = {}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is False
def test_entities_false():
"""Test entity ID policy."""
policy = False
with pytest.raises(vol.Invalid):
ENTITY_POLICY_SCHEMA(policy)
def test_entities_true():
"""Test entity ID policy."""
policy = True
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
def test_entities_domains_true():
"""Test entity ID policy."""
policy = {"domains": True}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
def test_entities_domains_domain_true():
"""Test entity ID policy."""
policy = {"domains": {"light": True}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
assert compiled("switch.kitchen", "read") is False
def test_entities_domains_domain_false():
"""Test entity ID policy."""
policy = {"domains": {"light": False}}
with pytest.raises(vol.Invalid):
ENTITY_POLICY_SCHEMA(policy)
def test_entities_entity_ids_true():
"""Test entity ID policy."""
policy = {"entity_ids": True}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
def test_entities_entity_ids_false():
"""Test entity ID policy."""
policy = {"entity_ids": False}
with pytest.raises(vol.Invalid):
ENTITY_POLICY_SCHEMA(policy)
def test_entities_entity_ids_entity_id_true():
"""Test entity ID policy."""
policy = {"entity_ids": {"light.kitchen": True}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
assert compiled("switch.kitchen", "read") is False
def test_entities_entity_ids_entity_id_false():
"""Test entity ID policy."""
policy = {"entity_ids": {"light.kitchen": False}}
with pytest.raises(vol.Invalid):
ENTITY_POLICY_SCHEMA(policy)
def test_entities_control_only():
"""Test policy granting control only."""
policy = {"entity_ids": {"light.kitchen": {"read": True}}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
assert compiled("light.kitchen", "control") is False
assert compiled("light.kitchen", "edit") is False
def test_entities_read_control():
"""Test policy granting control only."""
policy = {"domains": {"light": {"read": True, "control": True}}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
assert compiled("light.kitchen", "control") is True
assert compiled("light.kitchen", "edit") is False
def test_entities_all_allow():
"""Test policy allowing all entities."""
policy = {"all": True}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
assert compiled("light.kitchen", "control") is True
assert compiled("switch.kitchen", "read") is True
def test_entities_all_read():
"""Test policy applying read to all entities."""
policy = {"all": {"read": True}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
assert compiled("light.kitchen", "control") is False
assert compiled("switch.kitchen", "read") is True
def test_entities_all_control():
"""Test entity ID policy applying control to all."""
policy = {"all": {"control": True}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is False
assert compiled("light.kitchen", "control") is True
assert compiled("switch.kitchen", "read") is False
assert compiled("switch.kitchen", "control") is True
def test_entities_device_id_boolean(hass):
"""Test entity ID policy applying control on device id."""
entity_registry = mock_registry(
hass,
{
"test_domain.allowed": RegistryEntry(
entity_id="test_domain.allowed",
unique_id="1234",
platform="test_platform",
device_id="mock-allowed-dev-id",
),
"test_domain.not_allowed": RegistryEntry(
entity_id="test_domain.not_allowed",
unique_id="5678",
platform="test_platform",
device_id="mock-not-allowed-dev-id",
),
},
)
device_registry = mock_device_registry(hass)
policy = {"device_ids": {"mock-allowed-dev-id": {"read": True}}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(
policy, PermissionLookup(entity_registry, device_registry)
)
assert compiled("test_domain.allowed", "read") is True
assert compiled("test_domain.allowed", "control") is False
assert compiled("test_domain.not_allowed", "read") is False
assert compiled("test_domain.not_allowed", "control") is False
def test_entities_areas_true():
"""Test entity ID policy for areas."""
policy = {"area_ids": True}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(policy, None)
assert compiled("light.kitchen", "read") is True
def test_entities_areas_area_true(hass):
"""Test entity ID policy for areas with specific area."""
entity_registry = mock_registry(
hass,
{
"light.kitchen": RegistryEntry(
entity_id="light.kitchen",
unique_id="1234",
platform="test_platform",
device_id="mock-dev-id",
)
},
)
device_registry = mock_device_registry(
hass, {"mock-dev-id": DeviceEntry(id="mock-dev-id", area_id="mock-area-id")}
)
policy = {"area_ids": {"mock-area-id": {"read": True, "control": True}}}
ENTITY_POLICY_SCHEMA(policy)
compiled = compile_entities(
policy, PermissionLookup(entity_registry, device_registry)
)
assert compiled("light.kitchen", "read") is True
assert compiled("light.kitchen", "control") is True
assert compiled("light.kitchen", "edit") is False
assert compiled("switch.kitchen", "read") is False
|
import os
import time
import unittest
import mock
from kalliope.core.NeuronModule import NeuronModule, MissingParameterException, InvalidParameterException
from kalliope.neurons.script.script import Script
class TestScript(unittest.TestCase):
def setUp(self):
self.path = "path"
self.random = "random"
self.test_file = "/tmp/kalliope_text_shell.txt"
def testParameters(self):
def run_test_missing_param(parameters_to_test):
with self.assertRaises(MissingParameterException):
Script(**parameters_to_test)
def run_test_invalid_param(parameters_to_test):
with self.assertRaises(InvalidParameterException):
Script(**parameters_to_test)
# empty
parameters = dict()
run_test_missing_param(parameters)
# missing path
parameters = {
"random": self.random
}
run_test_missing_param(parameters)
# random path
self.path = "/tmp/iamarandompath/anotherrandompath/kalliope"
parameters = {
"path": self.path
}
run_test_invalid_param(parameters)
# Test Non executable file
# Create the file and remove permissions to the user
tmp_path = "/tmp/kalliope/tests/"
tmp_file_path = tmp_path+"neuronScript"
if not os.path.exists(tmp_path):
os.makedirs(tmp_path)
text_to_write = "[kalliope-test] TestScript - testParameters"
with open(tmp_file_path, 'w') as myFile:
myFile.write(text_to_write)
os.chmod(tmp_file_path, 0o600)
# test the user does not have access
self.path = tmp_file_path
parameters = {
"path": self.path
}
run_test_invalid_param(parameters)
# Remove the tmp file
os.chmod(tmp_file_path, 0o700)
os.remove(tmp_file_path)
def script_execution(self):
"""
Test we can run a script
"""
param = {
"path": "kalliope/neurons/script/tests/test_script.sh"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
Script(**param)
self.assertTrue(os.path.isfile(self.test_file))
# remove the tet file
os.remove(self.test_file)
def test_script_execution_async(self):
"""
Test we can run a script asynchronously
"""
param = {
"path": "kalliope/neurons/script/tests/test_script.sh",
"async": True
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
Script(**param)
# let the time to the thread to do its job
time.sleep(0.5)
self.assertTrue(os.path.isfile(self.test_file))
# remove the test file
os.remove(self.test_file)
def test_script_content(self):
"""
Test we can get a content from the launched script
"""
text_to_write = 'kalliope'
# we write a content into a file
with open(self.test_file, 'w') as myFile:
myFile.write(text_to_write)
# get the output with the neuron
parameters = {
"path": "kalliope/neurons/script/tests/test_script_cat.sh",
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
script = Script(**parameters)
self.assertEqual(script.output, text_to_write)
self.assertEqual(script.returncode, 0)
# remove the tet file
os.remove(self.test_file)
if __name__ == '__main__':
unittest.main()
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import redis_enterprise
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'redis_enterprise'
REDIS_PORT = 12006
REDIS_UI_PORT = 8443
BENCHMARK_CONFIG = """
redis_enterprise:
description: Run memtier_benchmark against Redis Enterprise.
vm_groups:
servers:
vm_spec:
GCP:
machine_type: c2-standard-30
zone: us-east1-a
AWS:
machine_type: c5.9xlarge
zone: us-east-1d
vm_count: 1
clients:
vm_spec:
GCP:
machine_type: c2-standard-30
zone: us-east1-a
AWS:
machine_type: c5.9xlarge
zone: us-east-1d
vm_count: 2
"""
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
return config
def _InstallRedisEnterprise(vm):
"""Download and install enterprise redis on a vm."""
vm.Install('redis_enterprise')
def Prepare(benchmark_spec):
"""Install Redis on one VM and memtier_benchmark on another.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
client_vms = benchmark_spec.vm_groups['clients']
server_vm = benchmark_spec.vm_groups['servers']
args = [((vm,), {}) for vm in client_vms + server_vm]
vm_util.RunThreaded(_InstallRedisEnterprise, args)
server_vm = server_vm[0]
server_vm.AllowPort(REDIS_PORT)
server_vm.AllowPort(REDIS_UI_PORT)
redis_enterprise.OfflineCores(server_vm)
redis_enterprise.CreateCluster(server_vm)
redis_enterprise.TuneProxy(server_vm)
redis_enterprise.SetUpCluster(server_vm, REDIS_PORT)
redis_enterprise.PinWorkers(server_vm)
redis_enterprise.WaitForClusterUp(server_vm, REDIS_PORT)
redis_enterprise.LoadCluster(server_vm, REDIS_PORT)
def Run(benchmark_spec):
"""Run memtier against enterprise redis and measure latency and throughput.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
load_vms = benchmark_spec.vm_groups['clients']
redis_vm = benchmark_spec.vm_groups['servers'][0]
numa_pages_migrated, _ = redis_vm.RemoteCommand(
'cat /proc/vmstat | grep numa_pages_migrated')
numa_pages_migrated = numa_pages_migrated.split(' ')[1]
numa_balancing, _ = redis_vm.RemoteCommand(
'cat /proc/sys/kernel/numa_balancing')
setup_metadata = {
'numa_pages_migrated': numa_pages_migrated.rstrip(),
'numa_balancing': numa_balancing.rstrip(),
}
results = redis_enterprise.Run(redis_vm, load_vms, REDIS_PORT)
for result in results:
result.metadata.update(setup_metadata)
return results
def Cleanup(benchmark_spec):
del benchmark_spec
|
import inspect
import os
import pkg_resources
import sys
__version__ = pkg_resources.get_distribution('chainercv').version
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
rtd_version = os.environ.get('READTHEDOCS_VERSION')
if rtd_version == 'latest':
tag = 'master'
else:
tag = 'v{}'.format(__version__)
extlinks = {
'blob':
('https://github.com/chainer/chainercv/blob/{}/%s'.format(tag), ''),
'tree':
('https://github.com/chainer/chainercv/tree/{}/%s'.format(tag), ''),
}
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.napoleon',
'sphinx.ext.linkcode']
try:
import sphinxcontrib.spelling # noqa
extensions.append('sphinxcontrib.spelling')
except ImportError:
pass
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ChainerCV'
copyright = u'2017, Preferred Networks, inc.'
author = u'Preferred Networks, inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.13.1'
# The full version, including alpha/beta/rc tags.
release = u'0.13.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# Napoleon settings
napoleon_use_ivar = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
if not on_rtd:
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_style = 'css/modified_theme.css'
if on_rtd:
html_context = {
'css_files': [
'https://media.readthedocs.org/css/sphinx_rtd_theme.css',
'https://media.readthedocs.org/css/readthedocs-doc-embed.css',
'_static/css/modified_theme.css',
],
}
# -- Options for HTMLHelp output ------------------------------------------
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'ChainerCVdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {}
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ChainerCV.tex', u'ChainerCV Documentation',
u'Preferred Networks, inc.', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'chainercv', u'ChainerCV Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ChainerCV', u'ChainerCV Documentation',
author, 'ChainerCV', 'One line description of project.',
'Miscellaneous'),
]
autosummary_generate = True
intersphinx_mapping = {
'python': ('https://docs.python.org/3/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
}
source_root = None
def _is_egg_directory(path):
return (path.endswith('.egg') and
os.path.isdir(os.path.join(path, 'EGG-INFO')))
def _is_git_root(path):
return os.path.isdir(os.path.join(path, '.git'))
def _import_object_from_name(module_name, fullname):
obj = sys.modules.get(module_name)
if obj is None:
return None
for comp in fullname.split('.'):
obj = getattr(obj, comp)
return obj
_source_root = None
def _find_source_root(source_abs_path):
# Note that READTHEDOCS* environment variable cannot be used, because they
# are not set under docker environment.
global _source_root
if _source_root is None:
dir = os.path.dirname(source_abs_path)
while True:
if _is_egg_directory(dir) or _is_git_root(dir):
# Reached the root directory
_source_root = dir
break
dir_ = os.path.dirname(dir)
if len(dir_) == len(dir):
raise RuntimeError('Couldn\'t parse root directory from '
'source file: {}'.format(source_abs_path))
dir = dir_
return _source_root
def _get_source_relative_path(source_abs_path):
return os.path.relpath(source_abs_path, _find_source_root(source_abs_path))
def _get_sourcefile_and_linenumber(obj):
# Retrieve the original function wrapped by contextlib.contextmanager
if callable(obj):
closure = getattr(obj, '__closure__', None)
if closure is not None:
obj = closure[0].cell_contents
# Get the source file name and line number at which obj is defined.
try:
filename = inspect.getsourcefile(obj)
except TypeError:
# obj is not a module, class, function, ..etc.
return None, None
# inspect can return None for cython objects
if filename is None:
return None, None
# Get the source line number
_, linenum = inspect.getsourcelines(obj)
return filename, linenum
def linkcode_resolve(domain, info):
if domain != 'py' or not info['module']:
return None
# Import the object from module path
obj = _import_object_from_name(info['module'], info['fullname'])
# If it's not defined in the internal module, return None.
mod = inspect.getmodule(obj)
if mod is None:
return None
if not (mod.__name__ == 'chainercv'
or mod.__name__.startswith('chainercv.')):
return None
# Retrieve source file name and line number
filename, linenum = _get_sourcefile_and_linenumber(obj)
if filename is None or linenum is None:
return None
filename = os.path.realpath(filename)
relpath = _get_source_relative_path(filename)
return 'https://github.com/chainer/chainercv/blob/{}/{}#L{}'.format(
tag, relpath, linenum)
|
import logging
import telnetlib
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import CONF_HOST, CONF_NAME, STATE_OFF, STATE_ON
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Music station"
SUPPORT_DENON = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
SUPPORT_MEDIA_MODES = (
SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
NORMAL_INPUTS = {
"Cd": "CD",
"Dvd": "DVD",
"Blue ray": "BD",
"TV": "TV",
"Satellite / Cable": "SAT/CBL",
"Game": "GAME",
"Game2": "GAME2",
"Video Aux": "V.AUX",
"Dock": "DOCK",
}
MEDIA_MODES = {
"Tuner": "TUNER",
"Media server": "SERVER",
"Ipod dock": "IPOD",
"Net/USB": "NET/USB",
"Rapsody": "RHAPSODY",
"Napster": "NAPSTER",
"Pandora": "PANDORA",
"LastFM": "LASTFM",
"Flickr": "FLICKR",
"Favorites": "FAVORITES",
"Internet Radio": "IRADIO",
"USB/IPOD": "USB/IPOD",
}
# Sub-modes of 'NET/USB'
# {'USB': 'USB', 'iPod Direct': 'IPD', 'Internet Radio': 'IRP',
# 'Favorites': 'FVP'}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Denon platform."""
denon = DenonDevice(config[CONF_NAME], config[CONF_HOST])
if denon.update():
add_entities([denon])
class DenonDevice(MediaPlayerEntity):
"""Representation of a Denon device."""
def __init__(self, name, host):
"""Initialize the Denon device."""
self._name = name
self._host = host
self._pwstate = "PWSTANDBY"
self._volume = 0
# Initial value 60dB, changed if we get a MVMAX
self._volume_max = 60
self._source_list = NORMAL_INPUTS.copy()
self._source_list.update(MEDIA_MODES)
self._muted = False
self._mediasource = ""
self._mediainfo = ""
self._should_setup_sources = True
def _setup_sources(self, telnet):
# NSFRN - Network name
nsfrn = self.telnet_request(telnet, "NSFRN ?")[len("NSFRN ") :]
if nsfrn:
self._name = nsfrn
# SSFUN - Configured sources with (optional) names
self._source_list = {}
for line in self.telnet_request(telnet, "SSFUN ?", all_lines=True):
ssfun = line[len("SSFUN") :].split(" ", 1)
source = ssfun[0]
if len(ssfun) == 2 and ssfun[1]:
configured_name = ssfun[1]
else:
# No name configured, reusing the source name
configured_name = source
self._source_list[configured_name] = source
# SSSOD - Deleted sources
for line in self.telnet_request(telnet, "SSSOD ?", all_lines=True):
source, status = line[len("SSSOD") :].split(" ", 1)
if status == "DEL":
for pretty_name, name in self._source_list.items():
if source == name:
del self._source_list[pretty_name]
break
@classmethod
def telnet_request(cls, telnet, command, all_lines=False):
"""Execute `command` and return the response."""
_LOGGER.debug("Sending: %s", command)
telnet.write(command.encode("ASCII") + b"\r")
lines = []
while True:
line = telnet.read_until(b"\r", timeout=0.2)
if not line:
break
lines.append(line.decode("ASCII").strip())
_LOGGER.debug("Received: %s", line)
if all_lines:
return lines
return lines[0] if lines else ""
def telnet_command(self, command):
"""Establish a telnet connection and sends `command`."""
telnet = telnetlib.Telnet(self._host)
_LOGGER.debug("Sending: %s", command)
telnet.write(command.encode("ASCII") + b"\r")
telnet.read_very_eager() # skip response
telnet.close()
def update(self):
"""Get the latest details from the device."""
try:
telnet = telnetlib.Telnet(self._host)
except OSError:
return False
if self._should_setup_sources:
self._setup_sources(telnet)
self._should_setup_sources = False
self._pwstate = self.telnet_request(telnet, "PW?")
for line in self.telnet_request(telnet, "MV?", all_lines=True):
if line.startswith("MVMAX "):
# only grab two digit max, don't care about any half digit
self._volume_max = int(line[len("MVMAX ") : len("MVMAX XX")])
continue
if line.startswith("MV"):
self._volume = int(line[len("MV") :])
self._muted = self.telnet_request(telnet, "MU?") == "MUON"
self._mediasource = self.telnet_request(telnet, "SI?")[len("SI") :]
if self._mediasource in MEDIA_MODES.values():
self._mediainfo = ""
answer_codes = [
"NSE0",
"NSE1X",
"NSE2X",
"NSE3X",
"NSE4",
"NSE5",
"NSE6",
"NSE7",
"NSE8",
]
for line in self.telnet_request(telnet, "NSE", all_lines=True):
self._mediainfo += f"{line[len(answer_codes.pop(0)) :]}\n"
else:
self._mediainfo = self.source
telnet.close()
return True
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._pwstate == "PWSTANDBY":
return STATE_OFF
if self._pwstate == "PWON":
return STATE_ON
return None
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume / self._volume_max
@property
def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
return self._muted
@property
def source_list(self):
"""Return the list of available input sources."""
return sorted(list(self._source_list))
@property
def media_title(self):
"""Return the current media info."""
return self._mediainfo
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._mediasource in MEDIA_MODES.values():
return SUPPORT_DENON | SUPPORT_MEDIA_MODES
return SUPPORT_DENON
@property
def source(self):
"""Return the current input source."""
for pretty_name, name in self._source_list.items():
if self._mediasource == name:
return pretty_name
def turn_off(self):
"""Turn off media player."""
self.telnet_command("PWSTANDBY")
def volume_up(self):
"""Volume up media player."""
self.telnet_command("MVUP")
def volume_down(self):
"""Volume down media player."""
self.telnet_command("MVDOWN")
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.telnet_command(f"MV{round(volume * self._volume_max):02}")
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
mute_status = "ON" if mute else "OFF"
self.telnet_command(f"MU{mute_status})")
def media_play(self):
"""Play media player."""
self.telnet_command("NS9A")
def media_pause(self):
"""Pause media player."""
self.telnet_command("NS9B")
def media_stop(self):
"""Pause media player."""
self.telnet_command("NS9C")
def media_next_track(self):
"""Send the next track command."""
self.telnet_command("NS9D")
def media_previous_track(self):
"""Send the previous track command."""
self.telnet_command("NS9E")
def turn_on(self):
"""Turn the media player on."""
self.telnet_command("PWON")
def select_source(self, source):
"""Select input source."""
self.telnet_command(f"SI{self._source_list.get(source)}")
|
from typing import List
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.characteristics.const import InputEventValues
from aiohomekit.model.services import ServicesTypes
from aiohomekit.utils import clamp_enum_to_char
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, KNOWN_DEVICES, TRIGGERS
TRIGGER_TYPES = {
"button1",
"button2",
"button3",
"button4",
"button5",
"button6",
"button7",
"button8",
"button9",
"button10",
}
TRIGGER_SUBTYPES = {"single_press", "double_press", "long_press"}
CONF_IID = "iid"
CONF_SUBTYPE = "subtype"
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
vol.Required(CONF_SUBTYPE): vol.In(TRIGGER_SUBTYPES),
}
)
HK_TO_HA_INPUT_EVENT_VALUES = {
InputEventValues.SINGLE_PRESS: "single_press",
InputEventValues.DOUBLE_PRESS: "double_press",
InputEventValues.LONG_PRESS: "long_press",
}
class TriggerSource:
"""Represents a stateless source of event data from HomeKit."""
def __init__(self, connection, aid, triggers):
"""Initialize a set of triggers for a device."""
self._hass = connection.hass
self._connection = connection
self._aid = aid
self._triggers = {}
for trigger in triggers:
self._triggers[(trigger["type"], trigger["subtype"])] = trigger
self._callbacks = {}
def fire(self, iid, value):
"""Process events that have been received from a HomeKit accessory."""
for event_handler in self._callbacks.get(iid, []):
event_handler(value)
def async_get_triggers(self):
"""List device triggers for homekit devices."""
yield from self._triggers
async def async_attach_trigger(
self,
config: TRIGGER_SCHEMA,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
def event_handler(char):
if config[CONF_SUBTYPE] != HK_TO_HA_INPUT_EVENT_VALUES[char["value"]]:
return
self._hass.async_create_task(action({"trigger": config}))
trigger = self._triggers[config[CONF_TYPE], config[CONF_SUBTYPE]]
iid = trigger["characteristic"]
self._connection.add_watchable_characteristics([(self._aid, iid)])
self._callbacks.setdefault(iid, []).append(event_handler)
def async_remove_handler():
if iid in self._callbacks:
self._callbacks[iid].remove(event_handler)
return async_remove_handler
def enumerate_stateless_switch(service):
"""Enumerate a stateless switch, like a single button."""
# A stateless switch that has a SERVICE_LABEL_INDEX is part of a group
# And is handled separately
if service.has(CharacteristicsTypes.SERVICE_LABEL_INDEX):
if len(service.linked) > 0:
return []
char = service[CharacteristicsTypes.INPUT_EVENT]
# HomeKit itself supports single, double and long presses. But the
# manufacturer might not - clamp options to what they say.
all_values = clamp_enum_to_char(InputEventValues, char)
results = []
for event_type in all_values:
results.append(
{
"characteristic": char.iid,
"value": event_type,
"type": "button1",
"subtype": HK_TO_HA_INPUT_EVENT_VALUES[event_type],
}
)
return results
def enumerate_stateless_switch_group(service):
"""Enumerate a group of stateless switches, like a remote control."""
switches = list(
service.accessory.services.filter(
service_type=ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH,
child_service=service,
order_by=[CharacteristicsTypes.SERVICE_LABEL_INDEX],
)
)
results = []
for idx, switch in enumerate(switches):
char = switch[CharacteristicsTypes.INPUT_EVENT]
# HomeKit itself supports single, double and long presses. But the
# manufacturer might not - clamp options to what they say.
all_values = clamp_enum_to_char(InputEventValues, char)
for event_type in all_values:
results.append(
{
"characteristic": char.iid,
"value": event_type,
"type": f"button{idx + 1}",
"subtype": HK_TO_HA_INPUT_EVENT_VALUES[event_type],
}
)
return results
def enumerate_doorbell(service):
"""Enumerate doorbell buttons."""
input_event = service[CharacteristicsTypes.INPUT_EVENT]
# HomeKit itself supports single, double and long presses. But the
# manufacturer might not - clamp options to what they say.
all_values = clamp_enum_to_char(InputEventValues, input_event)
results = []
for event_type in all_values:
results.append(
{
"characteristic": input_event.iid,
"value": event_type,
"type": "doorbell",
"subtype": HK_TO_HA_INPUT_EVENT_VALUES[event_type],
}
)
return results
TRIGGER_FINDERS = {
"service-label": enumerate_stateless_switch_group,
"stateless-programmable-switch": enumerate_stateless_switch,
"doorbell": enumerate_doorbell,
}
async def async_setup_triggers_for_entry(hass: HomeAssistant, config_entry):
"""Triggers aren't entities as they have no state, but we still need to set them up for a config entry."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service_dict):
service_type = service_dict["stype"]
# If not a known service type then we can't handle any stateless events for it
if service_type not in TRIGGER_FINDERS:
return False
# We can't have multiple trigger sources for the same device id
# Can't have a doorbell and a remote control in the same accessory
# They have to be different accessories (they can be on the same bridge)
# In practice, this is inline with what iOS actually supports AFAWCT.
device_id = conn.devices[aid]
if device_id in hass.data[TRIGGERS]:
return False
# At the moment add_listener calls us with the raw service dict, rather than
# a service model. So turn it into a service ourselves.
accessory = conn.entity_map.aid(aid)
service = accessory.services.iid(service_dict["iid"])
# Just because we recognise the service type doesn't mean we can actually
# extract any triggers - so only proceed if we can
triggers = TRIGGER_FINDERS[service_type](service)
if len(triggers) == 0:
return False
trigger = TriggerSource(conn, aid, triggers)
hass.data[TRIGGERS][device_id] = trigger
return True
conn.add_listener(async_add_service)
def async_fire_triggers(conn, events):
"""Process events generated by a HomeKit accessory into automation triggers."""
for (aid, iid), ev in events.items():
if aid in conn.devices:
device_id = conn.devices[aid]
if device_id in conn.hass.data[TRIGGERS]:
source = conn.hass.data[TRIGGERS][device_id]
source.fire(iid, ev)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for homekit devices."""
if device_id not in hass.data.get(TRIGGERS, {}):
return []
device = hass.data[TRIGGERS][device_id]
triggers = []
for trigger, subtype in device.async_get_triggers():
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
device_id = config[CONF_DEVICE_ID]
device = hass.data[TRIGGERS][device_id]
return await device.async_attach_trigger(config, action, automation_info)
|
from abc import abstractmethod
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_MOISTURE,
BinarySensorEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
DOMAIN as DOMAIN_RACHIO,
KEY_DEVICE_ID,
KEY_RAIN_SENSOR_TRIPPED,
KEY_STATUS,
KEY_SUBTYPE,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
SIGNAL_RACHIO_RAIN_SENSOR_UPDATE,
STATUS_ONLINE,
)
from .entity import RachioDevice
from .webhooks import (
SUBTYPE_COLD_REBOOT,
SUBTYPE_OFFLINE,
SUBTYPE_ONLINE,
SUBTYPE_RAIN_SENSOR_DETECTION_OFF,
SUBTYPE_RAIN_SENSOR_DETECTION_ON,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Rachio binary sensors."""
entities = await hass.async_add_executor_job(_create_entities, hass, config_entry)
async_add_entities(entities)
_LOGGER.info("%d Rachio binary sensor(s) added", len(entities))
def _create_entities(hass, config_entry):
entities = []
for controller in hass.data[DOMAIN_RACHIO][config_entry.entry_id].controllers:
entities.append(RachioControllerOnlineBinarySensor(controller))
entities.append(RachioRainSensor(controller))
return entities
class RachioControllerBinarySensor(RachioDevice, BinarySensorEntity):
"""Represent a binary sensor that reflects a Rachio state."""
def __init__(self, controller):
"""Set up a new Rachio controller binary sensor."""
super().__init__(controller)
self._state = None
@property
def is_on(self) -> bool:
"""Return whether the sensor has a 'true' value."""
return self._state
@callback
def _async_handle_any_update(self, *args, **kwargs) -> None:
"""Determine whether an update event applies to this device."""
if args[0][KEY_DEVICE_ID] != self._controller.controller_id:
# For another device
return
# For this device
self._async_handle_update(args, kwargs)
@abstractmethod
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle an update to the state of this sensor."""
class RachioControllerOnlineBinarySensor(RachioControllerBinarySensor):
"""Represent a binary sensor that reflects if the controller is online."""
@property
def name(self) -> str:
"""Return the name of this sensor including the controller name."""
return self._controller.name
@property
def unique_id(self) -> str:
"""Return a unique id for this entity."""
return f"{self._controller.controller_id}-online"
@property
def device_class(self) -> str:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_CONNECTIVITY
@property
def icon(self) -> str:
"""Return the name of an icon for this sensor."""
return "mdi:wifi-strength-4" if self.is_on else "mdi:wifi-strength-off-outline"
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle an update to the state of this sensor."""
if (
args[0][0][KEY_SUBTYPE] == SUBTYPE_ONLINE
or args[0][0][KEY_SUBTYPE] == SUBTYPE_COLD_REBOOT
):
self._state = True
elif args[0][0][KEY_SUBTYPE] == SUBTYPE_OFFLINE:
self._state = False
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._state = self._controller.init_data[KEY_STATUS] == STATUS_ONLINE
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
self._async_handle_any_update,
)
)
class RachioRainSensor(RachioControllerBinarySensor):
"""Represent a binary sensor that reflects the status of the rain sensor."""
@property
def name(self) -> str:
"""Return the name of this sensor including the controller name."""
return f"{self._controller.name} rain sensor"
@property
def unique_id(self) -> str:
"""Return a unique id for this entity."""
return f"{self._controller.controller_id}-rain_sensor"
@property
def device_class(self) -> str:
"""Return the class of this device."""
return DEVICE_CLASS_MOISTURE
@property
def icon(self) -> str:
"""Return the icon for this sensor."""
return "mdi:water" if self.is_on else "mdi:water-off"
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle an update to the state of this sensor."""
if args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_SENSOR_DETECTION_ON:
self._state = True
elif args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_SENSOR_DETECTION_OFF:
self._state = False
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._state = self._controller.init_data[KEY_RAIN_SENSOR_TRIPPED]
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_RACHIO_RAIN_SENSOR_UPDATE,
self._async_handle_any_update,
)
)
|
from django.urls import reverse
from weblate.trans.tests.test_views import ViewTestCase
class GitNoChangeProjectTest(ViewTestCase):
"""Testing of git manipulations with no change in repo."""
TEST_TYPE = "project"
def setUp(self):
super().setUp()
# We need extra privileges for overwriting
self.user.is_superuser = True
self.user.save()
def get_test_url(self, prefix):
return reverse(
f"{prefix}_{self.TEST_TYPE}",
kwargs=getattr(self, f"kw_{self.TEST_TYPE}"),
)
def get_expected_redirect(self):
return getattr(self, f"{self.TEST_TYPE}_url") + "#repository"
def test_commit(self):
response = self.client.post(self.get_test_url("commit"))
self.assertRedirects(response, self.get_expected_redirect())
def test_update(self):
response = self.client.post(self.get_test_url("update"))
self.assertRedirects(response, self.get_expected_redirect())
def test_push(self):
response = self.client.post(self.get_test_url("push"))
self.assertRedirects(response, self.get_expected_redirect())
def test_reset(self):
response = self.client.post(self.get_test_url("reset"))
self.assertRedirects(response, self.get_expected_redirect())
def test_cleanup(self):
response = self.client.post(self.get_test_url("cleanup"))
self.assertRedirects(response, self.get_expected_redirect())
def test_status(self):
response = self.client.get(self.get_test_url("git_status"))
self.assertContains(response, "Repository status")
class GitNoChangeComponentTest(GitNoChangeProjectTest):
"""Testing of component git manipulations."""
TEST_TYPE = "component"
class GitNoChangeTranslationTest(GitNoChangeProjectTest):
"""Testing of translation git manipulations."""
TEST_TYPE = "translation"
class GitChangeProjectTest(GitNoChangeProjectTest):
"""Testing of project git manipulations with not committed change."""
def setUp(self):
super().setUp()
self.change_unit("Ahoj světe!\n")
class GitChangeComponentTest(GitChangeProjectTest):
"""Testing of component git manipulations with not committed change."""
TEST_TYPE = "component"
class GitChangeTranslationTest(GitChangeProjectTest):
"""Testing of translation git manipulations with not committed change."""
TEST_TYPE = "translation"
class GitCommittedChangeProjectTest(GitNoChangeProjectTest):
"""Testing of project git manipulations with committed change in repo."""
def setUp(self):
super().setUp()
self.change_unit("Ahoj světe!\n")
self.project.commit_pending("test", self.user)
class GitCommittedChangeComponentTest(GitCommittedChangeProjectTest):
"""Testing of component git manipulations with committed change."""
TEST_TYPE = "component"
class GitCommittedChangeTranslationTest(GitCommittedChangeProjectTest):
"""Testing of translation git manipulations with committed change."""
TEST_TYPE = "translation"
class GitBrokenProjectTest(GitNoChangeProjectTest):
"""Testing of project git manipulations with disappeared remote."""
def setUp(self):
super().setUp()
repo = self.component.repository
with repo.lock:
repo.execute(["branch", "--delete", "--remotes", "origin/master"])
class GitBrokenComponentTest(GitBrokenProjectTest):
"""Testing of component git manipulations with disappeared remote."""
TEST_TYPE = "component"
class GitBrokenTranslationTest(GitBrokenProjectTest):
"""Testing of translation git manipulations with disappeared remote."""
TEST_TYPE = "translation"
|
from trashcli.put import TrashPutCmd
import os
from os.path import exists as file_exists
from datetime import datetime
from .files import make_empty_file, require_empty_dir
from .files import make_sticky_dir
from trashcli.fstab import FakeFstab
from trashcli.fs import remove_file
from trashcli.put import parent_path, RealFs
from .asserts import assert_line_in_text
import unittest
class TestPath(unittest.TestCase):
def setUp(self):
self.base = os.path.realpath(os.getcwd())
def test(self):
require_empty_dir('other_dir/dir')
remove_file('dir')
os.symlink('other_dir/dir', 'dir')
make_empty_file('dir/foo')
assert (os.path.join(self.base, 'other_dir/dir') ==
parent_path('dir/foo'))
remove_file('dir')
remove_file('other_dir')
def test2(self):
require_empty_dir('test-disk/dir')
remove_file('link-to-non-existent')
os.symlink('test-disk/non-existent', 'link-to-non-existent')
assert (self.base ==
parent_path('link-to-non-existent'))
remove_file('link-to-non-existent')
def test3(self):
remove_file('foo')
remove_file('bar')
require_empty_dir('foo')
require_empty_dir('bar')
os.symlink('../bar/zap', 'foo/zap')
assert os.path.join(self.base, 'foo') == parent_path('foo/zap')
remove_file('foo')
remove_file('bar')
def test4(self):
remove_file('foo')
remove_file('bar')
require_empty_dir('foo')
require_empty_dir('bar')
os.symlink('../bar/zap', 'foo/zap')
make_empty_file('bar/zap')
assert os.path.join(self.base,'foo') == parent_path('foo/zap')
remove_file('foo')
remove_file('bar')
class TrashPutTest(unittest.TestCase):
def setUp(self):
self.prepare_fixture()
self.setUp2()
def setUp2(self):
pass
def prepare_fixture(self):
require_empty_dir('sandbox')
self.environ = {'XDG_DATA_HOME': 'sandbox/XDG_DATA_HOME' }
from .output_collector import OutputCollector
self.out = OutputCollector()
self.err = OutputCollector()
self.fstab = FakeFstab()
self.stderr_should_be = self.err.should_be
self.output_should_be = self.out.should_be
def run_trashput(self, *argv):
cmd = TrashPutCmd(
stdout = self.out,
stderr = self.err,
environ = self.environ,
volume_of = self.fstab.volume_of,
parent_path = os.path.dirname,
realpath = lambda x:x,
fs = RealFs(),
getuid = lambda: None,
now = datetime.now
)
self.exit_code = cmd.run(list(argv))
self.stderr = self.err.getvalue()
class Test_when_deleting_an_existing_file(TrashPutTest):
def setUp2(self):
make_empty_file('sandbox/foo')
self.run_trashput('trash-put', 'sandbox/foo')
def test_it_should_remove_the_file(self):
assert not file_exists('sandbox/foo')
def test_it_should_remove_it_silently(self):
self.output_should_be('')
def test_a_trashinfo_file_should_have_been_created(self):
open('sandbox/XDG_DATA_HOME/Trash/info/foo.trashinfo').read()
class Test_when_deleting_an_existing_file_in_verbose_mode(TrashPutTest):
def setUp2(self):
make_empty_file('sandbox/foo')
self.run_trashput('trash-put', '-v', 'sandbox/foo')
def test_should_tell_where_a_file_is_trashed(self):
assert ("trash-put: 'sandbox/foo' trashed in sandbox/XDG_DATA_HOME/Trash" in
self.stderr.splitlines())
def test_should_be_succesfull(self):
assert 0 == self.exit_code
class Test_when_deleting_a_non_existing_file(TrashPutTest):
def setUp2(self):
self.run_trashput('trash-put', '-v', 'non-existent')
def test_should_be_succesfull(self):
assert 0 != self.exit_code
class Test_when_fed_with_dot_arguments(TrashPutTest):
def setUp2(self):
require_empty_dir('sandbox/')
make_empty_file('other_argument')
def test_dot_argument_is_skipped(self):
self.run_trashput("trash-put", ".", "other_argument")
# the dot directory shouldn't be operated, but a diagnostic message
# shall be writtend on stderr
self.stderr_should_be(
"trash-put: cannot trash directory '.'\n")
# the remaining arguments should be processed
assert not file_exists('other_argument')
def test_dot_dot_argument_is_skipped(self):
self.run_trashput("trash-put", "..", "other_argument")
# the dot directory shouldn't be operated, but a diagnostic message
# shall be writtend on stderr
self.stderr_should_be(
"trash-put: cannot trash directory '..'\n")
# the remaining arguments should be processed
assert not file_exists('other_argument')
def test_dot_argument_is_skipped_even_in_subdirs(self):
self.run_trashput("trash-put", "sandbox/.", "other_argument")
# the dot directory shouldn't be operated, but a diagnostic message
# shall be writtend on stderr
self.stderr_should_be(
"trash-put: cannot trash '.' directory 'sandbox/.'\n")
# the remaining arguments should be processed
assert not file_exists('other_argument')
assert file_exists('sandbox')
def test_dot_dot_argument_is_skipped_even_in_subdirs(self):
self.run_trashput("trash-put", "sandbox/..", "other_argument")
# the dot directory shouldn't be operated, but a diagnostic message
# shall be writtend on stderr
self.stderr_should_be(
"trash-put: cannot trash '..' directory 'sandbox/..'\n")
# the remaining arguments should be processed
assert not file_exists('other_argument')
assert file_exists('sandbox')
class TestUnsecureTrashDirMessages(TrashPutTest):
def setUp(self):
TrashPutTest.setUp(self)
require_empty_dir('fake-vol')
self.fstab.add_mount('fake-vol')
make_empty_file('fake-vol/foo')
def test_when_is_unsticky(self):
require_empty_dir('fake-vol/.Trash')
self.run_trashput('trash-put', '-v', 'fake-vol/foo')
assert_line_in_text(
'trash-put: found unsecure .Trash dir (should be sticky): '
'fake-vol/.Trash', self.stderr)
def test_when_it_is_not_a_dir(self):
make_empty_file('fake-vol/.Trash')
self.run_trashput('trash-put', '-v', 'fake-vol/foo')
assert_line_in_text(
'trash-put: found unusable .Trash dir (should be a dir): '
'fake-vol/.Trash', self.stderr)
def test_when_is_a_symlink(self):
make_sticky_dir('fake-vol/link-destination')
os.symlink('link-destination', 'fake-vol/.Trash')
self.run_trashput('trash-put', '-v', 'fake-vol/foo')
assert_line_in_text(
'trash-put: found unsecure .Trash dir (should not be a symlink): '
'fake-vol/.Trash', self.stderr)
|
from homeassistant.components.sensor import DOMAIN
from homeassistant.exceptions import PlatformNotReady
from . import CONF_MONITORED_CONDITIONS, DATA_KEY, LTEEntity
from .sensor_types import SENSOR_SMS, SENSOR_SMS_TOTAL, SENSOR_UNITS, SENSOR_USAGE
async def async_setup_platform(hass, config, async_add_entities, discovery_info):
"""Set up Netgear LTE sensor devices."""
if discovery_info is None:
return
modem_data = hass.data[DATA_KEY].get_modem_data(discovery_info)
if not modem_data or not modem_data.data:
raise PlatformNotReady
sensor_conf = discovery_info[DOMAIN]
monitored_conditions = sensor_conf[CONF_MONITORED_CONDITIONS]
sensors = []
for sensor_type in monitored_conditions:
if sensor_type == SENSOR_SMS:
sensors.append(SMSUnreadSensor(modem_data, sensor_type))
elif sensor_type == SENSOR_SMS_TOTAL:
sensors.append(SMSTotalSensor(modem_data, sensor_type))
elif sensor_type == SENSOR_USAGE:
sensors.append(UsageSensor(modem_data, sensor_type))
else:
sensors.append(GenericSensor(modem_data, sensor_type))
async_add_entities(sensors)
class LTESensor(LTEEntity):
"""Base LTE sensor entity."""
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_UNITS[self.sensor_type]
class SMSUnreadSensor(LTESensor):
"""Unread SMS sensor entity."""
@property
def state(self):
"""Return the state of the sensor."""
return sum(1 for x in self.modem_data.data.sms if x.unread)
class SMSTotalSensor(LTESensor):
"""Total SMS sensor entity."""
@property
def state(self):
"""Return the state of the sensor."""
return len(self.modem_data.data.sms)
class UsageSensor(LTESensor):
"""Data usage sensor entity."""
@property
def state(self):
"""Return the state of the sensor."""
return round(self.modem_data.data.usage / 1024 ** 2, 1)
class GenericSensor(LTESensor):
"""Sensor entity with raw state."""
@property
def state(self):
"""Return the state of the sensor."""
return getattr(self.modem_data.data, self.sensor_type)
|
from subliminal.matches import guess_matches
def test_guess_matches_movie(movies):
video = movies['man_of_steel']
guess = {'title': video.title.upper(), 'year': video.year, 'release_group': video.release_group.upper(),
'screen_size': video.resolution, 'source': video.source, 'video_codec': video.video_codec,
'audio_codec': video.audio_codec}
expected = {'title', 'year', 'country', 'release_group', 'resolution', 'source', 'video_codec', 'audio_codec'}
assert guess_matches(video, guess) == expected
def test_guess_matches_episode(episodes):
video = episodes['bbt_s07e05']
guess = {'title': video.series, 'season': video.season, 'episode': video.episode, 'year': video.year,
'episode_title': video.title.upper(), 'release_group': video.release_group.upper(),
'screen_size': video.resolution, 'source': video.source, 'video_codec': video.video_codec,
'audio_codec': video.audio_codec}
expected = {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group', 'resolution', 'source',
'video_codec', 'audio_codec'}
assert guess_matches(video, guess) == expected
def test_guess_matches_episode_equivalent_release_group(episodes):
video = episodes['bbt_s07e05']
guess = {'title': video.series, 'season': video.season, 'episode': video.episode, 'year': video.year,
'episode_title': video.title.upper(), 'release_group': 'LOL',
'screen_size': video.resolution, 'source': video.source, 'video_codec': video.video_codec,
'audio_codec': video.audio_codec}
expected = {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group', 'resolution', 'source',
'video_codec', 'audio_codec'}
assert guess_matches(video, guess) == expected
def test_guess_matches_multiple_sources(episodes):
video = episodes['bbt_s07e05']
video.source = [video.source, 'Blu-ray']
guess = {'title': video.series, 'season': video.season, 'episode': video.episode, 'year': video.year,
'episode_title': video.title.upper(), 'release_group': 'LOL',
'screen_size': video.resolution, 'source': video.source, 'video_codec': video.video_codec,
'audio_codec': video.audio_codec}
expected = {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group', 'resolution', 'source',
'video_codec', 'audio_codec'}
assert guess_matches(video, guess) == expected
def test_guess_matches_multiple_sources_no_match(episodes):
video = episodes['bbt_s07e05']
guess = {'title': video.series, 'season': video.season, 'episode': video.episode, 'year': video.year,
'episode_title': video.title.upper(), 'release_group': 'LOL',
'screen_size': video.resolution, 'source': [video.source, 'Blu-ray'], 'video_codec': video.video_codec,
'audio_codec': video.audio_codec}
expected = {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group', 'resolution', 'video_codec',
'audio_codec'}
assert guess_matches(video, guess) == expected
def test_guess_matches_episode_no_year(episodes):
video = episodes['dallas_s01e03']
guess = {'title': video.series, 'season': video.season, 'episode': video.episode}
expected = {'series', 'season', 'episode', 'year', 'country'}
assert guess_matches(video, guess) == expected
|
import heapq
import sys
from collections import namedtuple
from datetime import datetime
from functools import total_ordering
from weakref import proxy as weakrefproxy
from time import monotonic
from vine.utils import wraps
from kombu.log import get_logger
from time import time as _time
try:
from pytz import utc
except ImportError: # pragma: no cover
utc = None
__all__ = ('Entry', 'Timer', 'to_timestamp')
logger = get_logger(__name__)
DEFAULT_MAX_INTERVAL = 2
EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=utc)
IS_PYPY = hasattr(sys, 'pypy_version_info')
scheduled = namedtuple('scheduled', ('eta', 'priority', 'entry'))
def to_timestamp(d, default_timezone=utc, time=monotonic):
"""Convert datetime to timestamp.
If d' is already a timestamp, then that will be used.
"""
if isinstance(d, datetime):
if d.tzinfo is None:
d = d.replace(tzinfo=default_timezone)
diff = _time() - time()
return max((d - EPOCH).total_seconds() - diff, 0)
return d
@total_ordering
class Entry:
"""Schedule Entry."""
if not IS_PYPY: # pragma: no cover
__slots__ = (
'fun', 'args', 'kwargs', 'tref', 'canceled',
'_last_run', '__weakref__',
)
def __init__(self, fun, args=None, kwargs=None):
self.fun = fun
self.args = args or []
self.kwargs = kwargs or {}
self.tref = weakrefproxy(self)
self._last_run = None
self.canceled = False
def __call__(self):
return self.fun(*self.args, **self.kwargs)
def cancel(self):
try:
self.tref.canceled = True
except ReferenceError: # pragma: no cover
pass
def __repr__(self):
return '<TimerEntry: {}(*{!r}, **{!r})'.format(
self.fun.__name__, self.args, self.kwargs)
# must not use hash() to order entries
def __lt__(self, other):
return id(self) < id(other)
@property
def cancelled(self):
return self.canceled
@cancelled.setter
def cancelled(self, value):
self.canceled = value
class Timer:
"""Async timer implementation."""
Entry = Entry
on_error = None
def __init__(self, max_interval=None, on_error=None, **kwargs):
self.max_interval = float(max_interval or DEFAULT_MAX_INTERVAL)
self.on_error = on_error or self.on_error
self._queue = []
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.stop()
def call_at(self, eta, fun, args=(), kwargs=None, priority=0):
kwargs = {} if not kwargs else kwargs
return self.enter_at(self.Entry(fun, args, kwargs), eta, priority)
def call_after(self, secs, fun, args=(), kwargs=None, priority=0):
kwargs = {} if not kwargs else kwargs
return self.enter_after(secs, self.Entry(fun, args, kwargs), priority)
def call_repeatedly(self, secs, fun, args=(), kwargs=None, priority=0):
kwargs = {} if not kwargs else kwargs
tref = self.Entry(fun, args, kwargs)
@wraps(fun)
def _reschedules(*args, **kwargs):
last, now = tref._last_run, monotonic()
lsince = (now - tref._last_run) if last else secs
try:
if lsince and lsince >= secs:
tref._last_run = now
return fun(*args, **kwargs)
finally:
if not tref.canceled:
last = tref._last_run
next = secs - (now - last) if last else secs
self.enter_after(next, tref, priority)
tref.fun = _reschedules
tref._last_run = None
return self.enter_after(secs, tref, priority)
def enter_at(self, entry, eta=None, priority=0, time=monotonic):
"""Enter function into the scheduler.
Arguments:
entry (~kombu.asynchronous.timer.Entry): Item to enter.
eta (datetime.datetime): Scheduled time.
priority (int): Unused.
"""
if eta is None:
eta = time()
if isinstance(eta, datetime):
try:
eta = to_timestamp(eta)
except Exception as exc:
if not self.handle_error(exc):
raise
return
return self._enter(eta, priority, entry)
def enter_after(self, secs, entry, priority=0, time=monotonic):
return self.enter_at(entry, time() + secs, priority)
def _enter(self, eta, priority, entry, push=heapq.heappush):
push(self._queue, scheduled(eta, priority, entry))
return entry
def apply_entry(self, entry):
try:
entry()
except Exception as exc:
if not self.handle_error(exc):
logger.error('Error in timer: %r', exc, exc_info=True)
def handle_error(self, exc_info):
if self.on_error:
self.on_error(exc_info)
return True
def stop(self):
pass
def __iter__(self, min=min, nowfun=monotonic,
pop=heapq.heappop, push=heapq.heappush):
"""Iterate over schedule.
This iterator yields a tuple of ``(wait_seconds, entry)``,
where if entry is :const:`None` the caller should wait
for ``wait_seconds`` until it polls the schedule again.
"""
max_interval = self.max_interval
queue = self._queue
while 1:
if queue:
eventA = queue[0]
now, eta = nowfun(), eventA[0]
if now < eta:
yield min(eta - now, max_interval), None
else:
eventB = pop(queue)
if eventB is eventA:
entry = eventA[2]
if not entry.canceled:
yield None, entry
continue
else:
push(queue, eventB)
else:
yield None, None
def clear(self):
self._queue[:] = [] # atomic, without creating a new list.
def cancel(self, tref):
tref.cancel()
def __len__(self):
return len(self._queue)
def __nonzero__(self):
return True
@property
def queue(self, _pop=heapq.heappop):
"""Snapshot of underlying datastructure."""
events = list(self._queue)
return [_pop(v) for v in [events] * len(events)]
@property
def schedule(self):
return self
|
from __future__ import absolute_import
import logging
import time
from absl import flags
import boto
import gcs_oauth2_boto_plugin # noqa
# This is the path that we SCP object_storage_interface to.
from providers import object_storage_interface
FLAGS = flags.FLAGS
class GcsServiceBoto(object_storage_interface.ObjectStorageServiceBase):
"""An interface to Google Cloud Storage, using the boto library."""
def __init__(self):
pass
def _StorageURI(self, bucket, object_name=None):
"""Return a storage_uri for the given resource.
Args:
bucket: the name of a bucket.
object_name: the name of an object, if given.
Returns:
A storage_uri. If object is given, the uri will be for the bucket-object
combination. If object is not given, the uri will be for the bucket.
"""
if object_name is not None:
path = '%s/%s' % (bucket, object_name)
else:
path = bucket
storage_uri = boto.storage_uri(path, 'gs')
return storage_uri
def _CreateHeader(self):
# Local use of header for debugging
header = {}
return header
def ListObjects(self, bucket, prefix):
bucket_uri = self._StorageURI(bucket)
return [obj.name for obj in bucket_uri.list_bucket(prefix=prefix)]
def DeleteObjects(self,
bucket,
objects_to_delete,
objects_deleted=None,
delay_time=0,
object_sizes=None):
start_times = []
latencies = []
sizes = []
for index, object_name in enumerate(objects_to_delete):
try:
time.sleep(delay_time)
start_time = time.time()
object_uri = self._StorageURI(bucket, object_name)
object_uri.delete_key(headers=self._CreateHeader())
latency = time.time() - start_time
start_times.append(start_time)
latencies.append(latency)
if objects_deleted is not None:
objects_deleted.append(object_name)
if object_sizes:
sizes.append(object_sizes[index])
except: # pylint:disable=bare-except
logging.exception('Caught exception while deleting object %s.',
object_name)
return start_times, latencies, sizes
def BulkDeleteObjects(self, bucket, objects_to_delete, delay_time):
# GCS Boto currently does not support Bulk delete
start_times, latencies, _ = self.DeleteObjects(
bucket, objects_to_delete, delay_time=delay_time)
return min(start_times), sum(latencies)
def WriteObjectFromBuffer(self, bucket, object_name, stream, size):
start_time = time.time()
stream.seek(0)
object_uri = self._StorageURI(bucket, object_name)
object_uri.set_contents_from_file(
stream, size=size, headers=self._CreateHeader())
latency = time.time() - start_time
return start_time, latency
def ReadObject(self, bucket, object_name):
start_time = time.time()
object_uri = self._StorageURI(bucket, object_name)
object_uri.new_key().get_contents_as_string(headers=self._CreateHeader())
latency = time.time() - start_time
return start_time, latency
|
import logging
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import defaults
from .connection import get_redis_from_settings
logger = logging.getLogger(__name__)
# TODO: Rename class to RedisDupeFilter.
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplicates filter.
This class can also be used with default Scrapy's scheduler.
"""
logger = logger
def __init__(self, server, key, debug=False):
"""Initialize the duplicates filter.
Parameters
----------
server : redis.StrictRedis
The redis server instance.
key : str
Redis key Where to store fingerprints.
debug : bool, optional
Whether to log filtered requests.
"""
self.server = server
self.key = key
self.debug = debug
self.logdupes = True
@classmethod
def from_settings(cls, settings):
"""Returns an instance from given settings.
This uses by default the key ``dupefilter:<timestamp>``. When using the
``scrapy_redis.scheduler.Scheduler`` class, this method is not used as
it needs to pass the spider name in the key.
Parameters
----------
settings : scrapy.settings.Settings
Returns
-------
RFPDupeFilter
A RFPDupeFilter instance.
"""
server = get_redis_from_settings(settings)
# XXX: This creates one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
# TODO: Use SCRAPY_JOB env as default and fallback to timestamp.
key = defaults.DUPEFILTER_KEY % {'timestamp': int(time.time())}
debug = settings.getbool('DUPEFILTER_DEBUG')
return cls(server, key=key, debug=debug)
@classmethod
def from_crawler(cls, crawler):
"""Returns instance from crawler.
Parameters
----------
crawler : scrapy.crawler.Crawler
Returns
-------
RFPDupeFilter
Instance of RFPDupeFilter.
"""
return cls.from_settings(crawler.settings)
def request_seen(self, request):
"""Returns True if request was already seen.
Parameters
----------
request : scrapy.http.Request
Returns
-------
bool
"""
fp = self.request_fingerprint(request)
# This returns the number of values added, zero if already exists.
added = self.server.sadd(self.key, fp)
return added == 0
def request_fingerprint(self, request):
"""Returns a fingerprint for a given request.
Parameters
----------
request : scrapy.http.Request
Returns
-------
str
"""
return request_fingerprint(request)
@classmethod
def from_spider(cls, spider):
settings = spider.settings
server = get_redis_from_settings(settings)
dupefilter_key = settings.get("SCHEDULER_DUPEFILTER_KEY", defaults.SCHEDULER_DUPEFILTER_KEY)
key = dupefilter_key % {'spider': spider.name}
debug = settings.getbool('DUPEFILTER_DEBUG')
return cls(server, key=key, debug=debug)
def close(self, reason=''):
"""Delete data on close. Called by Scrapy's scheduler.
Parameters
----------
reason : str, optional
"""
self.clear()
def clear(self):
"""Clears fingerprints data."""
self.server.delete(self.key)
def log(self, request, spider):
"""Logs given request.
Parameters
----------
request : scrapy.http.Request
spider : scrapy.spiders.Spider
"""
if self.debug:
msg = "Filtered duplicate request: %(request)s"
self.logger.debug(msg, {'request': request}, extra={'spider': spider})
elif self.logdupes:
msg = ("Filtered duplicate request %(request)s"
" - no more duplicates will be shown"
" (see DUPEFILTER_DEBUG to show all duplicates)")
self.logger.debug(msg, {'request': request}, extra={'spider': spider})
self.logdupes = False
|
import logging
from pprint import pformat
from homeassistant.components.supla import (
DOMAIN,
SUPLA_COORDINATORS,
SUPLA_SERVERS,
SuplaChannel,
)
from homeassistant.components.switch import SwitchEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Supla switches."""
if discovery_info is None:
return
_LOGGER.debug("Discovery: %s", pformat(discovery_info))
entities = []
for device in discovery_info:
server_name = device["server_name"]
entities.append(
SuplaSwitch(
device,
hass.data[DOMAIN][SUPLA_SERVERS][server_name],
hass.data[DOMAIN][SUPLA_COORDINATORS][server_name],
)
)
async_add_entities(entities)
class SuplaSwitch(SuplaChannel, SwitchEntity):
"""Representation of a Supla Switch."""
async def async_turn_on(self, **kwargs):
"""Turn on the switch."""
await self.async_action("TURN_ON")
async def async_turn_off(self, **kwargs):
"""Turn off the switch."""
await self.async_action("TURN_OFF")
@property
def is_on(self):
"""Return true if switch is on."""
state = self.channel_data.get("state")
if state:
return state["on"]
return False
|
import unittest
from hyperopt import fmin, tpe, hp
class TestHyperopt(unittest.TestCase):
def test_find_min(self):
best = fmin(
fn=lambda x: x ** 2,
space=hp.uniform('x', -10, 10),
algo=tpe.suggest,
max_evals=1,
)
self.assertIn('x', best)
|
from copy import deepcopy
from io import StringIO
import os.path as op
from datetime import datetime, timezone
import numpy as np
from numpy.testing import assert_array_equal, assert_allclose
import pytest
from scipy import sparse
from mne import read_evokeds, read_cov, pick_types
from mne.io.pick import _picks_by_type
from mne.epochs import make_fixed_length_epochs
from mne.io import read_raw_fif
from mne.time_frequency import tfr_morlet
from mne.utils import (_get_inst_data, hashfunc,
sum_squared, compute_corr, create_slices, _time_mask,
_freq_mask, random_permutation, _reg_pinv, object_size,
object_hash, object_diff, _apply_scaling_cov,
_undo_scaling_cov, _apply_scaling_array,
_undo_scaling_array, _PCA, requires_sklearn,
_array_equal_nan, _julian_to_cal, _cal_to_julian,
_dt_to_julian, _julian_to_dt, grand_average,
_ReuseCycle, requires_version)
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
fname_raw = op.join(base_dir, 'test_raw.fif')
ave_fname = op.join(base_dir, 'test-ave.fif')
cov_fname = op.join(base_dir, 'test-cov.fif')
def test_get_inst_data():
"""Test _get_inst_data."""
raw = read_raw_fif(fname_raw)
raw.crop(tmax=1.)
assert_array_equal(_get_inst_data(raw), raw._data)
raw.pick_channels(raw.ch_names[:2])
epochs = make_fixed_length_epochs(raw, 0.5)
assert_array_equal(_get_inst_data(epochs), epochs._data)
evoked = epochs.average()
assert_array_equal(_get_inst_data(evoked), evoked.data)
evoked.crop(tmax=0.1)
picks = list(range(2))
freqs = [50., 55.]
n_cycles = 3
tfr = tfr_morlet(evoked, freqs, n_cycles, return_itc=False, picks=picks)
assert_array_equal(_get_inst_data(tfr), tfr.data)
pytest.raises(TypeError, _get_inst_data, 'foo')
def test_hashfunc(tmpdir):
"""Test md5/sha1 hash calculations."""
tempdir = str(tmpdir)
fname1 = op.join(tempdir, 'foo')
fname2 = op.join(tempdir, 'bar')
with open(fname1, 'wb') as fid:
fid.write(b'abcd')
with open(fname2, 'wb') as fid:
fid.write(b'efgh')
for hash_type in ('md5', 'sha1'):
hash1 = hashfunc(fname1, hash_type=hash_type)
hash1_ = hashfunc(fname1, 1, hash_type=hash_type)
hash2 = hashfunc(fname2, hash_type=hash_type)
hash2_ = hashfunc(fname2, 1024, hash_type=hash_type)
assert hash1 == hash1_
assert hash2 == hash2_
assert hash1 != hash2
def test_sum_squared():
"""Test optimized sum of squares."""
X = np.random.RandomState(0).randint(0, 50, (3, 3))
assert np.sum(X ** 2) == sum_squared(X)
def test_compute_corr():
"""Test Anscombe's Quartett."""
x = np.array([10, 8, 13, 9, 11, 14, 6, 4, 12, 7, 5])
y = np.array([[8.04, 6.95, 7.58, 8.81, 8.33, 9.96,
7.24, 4.26, 10.84, 4.82, 5.68],
[9.14, 8.14, 8.74, 8.77, 9.26, 8.10,
6.13, 3.10, 9.13, 7.26, 4.74],
[7.46, 6.77, 12.74, 7.11, 7.81, 8.84,
6.08, 5.39, 8.15, 6.42, 5.73],
[8, 8, 8, 8, 8, 8, 8, 19, 8, 8, 8],
[6.58, 5.76, 7.71, 8.84, 8.47, 7.04,
5.25, 12.50, 5.56, 7.91, 6.89]])
r = compute_corr(x, y.T)
r2 = np.array([np.corrcoef(x, y[i])[0, 1]
for i in range(len(y))])
assert_allclose(r, r2)
pytest.raises(ValueError, compute_corr, [1, 2], [])
def test_create_slices():
"""Test checking the create of time create_slices."""
# Test that create_slices default provide an empty list
assert (create_slices(0, 0) == [])
# Test that create_slice return correct number of slices
assert (len(create_slices(0, 100)) == 100)
# Test with non-zero start parameters
assert (len(create_slices(50, 100)) == 50)
# Test slices' length with non-zero start and window_width=2
assert (len(create_slices(0, 100, length=2)) == 50)
# Test slices' length with manual slice separation
assert (len(create_slices(0, 100, step=10)) == 10)
# Test slices' within length for non-consecutive samples
assert (len(create_slices(0, 500, length=50, step=10)) == 46)
# Test that slices elements start, stop and step correctly
slices = create_slices(0, 10)
assert (slices[0].start == 0)
assert (slices[0].step == 1)
assert (slices[0].stop == 1)
assert (slices[-1].stop == 10)
# Same with larger window width
slices = create_slices(0, 9, length=3)
assert (slices[0].start == 0)
assert (slices[0].step == 1)
assert (slices[0].stop == 3)
assert (slices[-1].stop == 9)
# Same with manual slices' separation
slices = create_slices(0, 9, length=3, step=1)
assert (len(slices) == 7)
assert (slices[0].step == 1)
assert (slices[0].stop == 3)
assert (slices[-1].start == 6)
assert (slices[-1].stop == 9)
def test_time_mask():
"""Test safe time masking."""
N = 10
x = np.arange(N).astype(float)
assert _time_mask(x, 0, N - 1).sum() == N
assert _time_mask(x - 1e-10, 0, N - 1, sfreq=1000.).sum() == N
assert _time_mask(x - 1e-10, None, N - 1, sfreq=1000.).sum() == N
assert _time_mask(x - 1e-10, None, None, sfreq=1000.).sum() == N
assert _time_mask(x - 1e-10, -np.inf, None, sfreq=1000.).sum() == N
assert _time_mask(x - 1e-10, None, np.inf, sfreq=1000.).sum() == N
# non-uniformly spaced inputs
x = np.array([4, 10])
assert _time_mask(x[:1], tmin=10, sfreq=1, raise_error=False).sum() == 0
assert _time_mask(x[:1], tmin=11, tmax=12, sfreq=1,
raise_error=False).sum() == 0
assert _time_mask(x, tmin=10, sfreq=1).sum() == 1
assert _time_mask(x, tmin=6, sfreq=1).sum() == 1
assert _time_mask(x, tmin=5, sfreq=1).sum() == 1
assert _time_mask(x, tmin=4.5001, sfreq=1).sum() == 1
assert _time_mask(x, tmin=4.4999, sfreq=1).sum() == 2
assert _time_mask(x, tmin=4, sfreq=1).sum() == 2
# degenerate cases
with pytest.raises(ValueError, match='No samples remain'):
_time_mask(x[:1], tmin=11, tmax=12)
with pytest.raises(ValueError, match='must be less than or equal to tmax'):
_time_mask(x[:1], tmin=10, sfreq=1)
def test_freq_mask():
"""Test safe frequency masking."""
N = 10
x = np.arange(N).astype(float)
assert _freq_mask(x, 1000., fmin=0, fmax=N - 1).sum() == N
assert _freq_mask(x - 1e-10, 1000., fmin=0, fmax=N - 1).sum() == N
assert _freq_mask(x - 1e-10, 1000., fmin=None, fmax=N - 1).sum() == N
assert _freq_mask(x - 1e-10, 1000., fmin=None, fmax=None).sum() == N
assert _freq_mask(x - 1e-10, 1000., fmin=-np.inf, fmax=None).sum() == N
assert _freq_mask(x - 1e-10, 1000., fmin=None, fmax=np.inf).sum() == N
# non-uniformly spaced inputs
x = np.array([4, 10])
assert _freq_mask(x[:1], 1, fmin=10, raise_error=False).sum() == 0
assert _freq_mask(x[:1], 1, fmin=11, fmax=12,
raise_error=False).sum() == 0
assert _freq_mask(x, sfreq=1, fmin=10).sum() == 1
assert _freq_mask(x, sfreq=1, fmin=6).sum() == 1
assert _freq_mask(x, sfreq=1, fmin=5).sum() == 1
assert _freq_mask(x, sfreq=1, fmin=4.5001).sum() == 1
assert _freq_mask(x, sfreq=1, fmin=4.4999).sum() == 2
assert _freq_mask(x, sfreq=1, fmin=4).sum() == 2
# degenerate cases
with pytest.raises(ValueError, match='sfreq can not be None'):
_freq_mask(x[:1], sfreq=None, fmin=3, fmax=5)
with pytest.raises(ValueError, match='No frequencies remain'):
_freq_mask(x[:1], sfreq=1, fmin=11, fmax=12)
with pytest.raises(ValueError, match='must be less than or equal to fmax'):
_freq_mask(x[:1], sfreq=1, fmin=10)
def test_random_permutation():
"""Test random permutation function."""
n_samples = 10
random_state = 42
python_randperm = random_permutation(n_samples, random_state)
# matlab output when we execute rng(42), randperm(10)
matlab_randperm = np.array([7, 6, 5, 1, 4, 9, 10, 3, 8, 2])
assert_array_equal(python_randperm, matlab_randperm - 1)
def test_cov_scaling():
"""Test rescaling covs."""
evoked = read_evokeds(ave_fname, condition=0, baseline=(None, 0),
proj=True)
cov = read_cov(cov_fname)['data']
cov2 = read_cov(cov_fname)['data']
assert_array_equal(cov, cov2)
evoked.pick_channels([evoked.ch_names[k] for k in pick_types(
evoked.info, meg=True, eeg=True
)])
picks_list = _picks_by_type(evoked.info)
scalings = dict(mag=1e15, grad=1e13, eeg=1e6)
_apply_scaling_cov(cov2, picks_list, scalings=scalings)
_apply_scaling_cov(cov, picks_list, scalings=scalings)
assert_array_equal(cov, cov2)
assert cov.max() > 1
_undo_scaling_cov(cov2, picks_list, scalings=scalings)
_undo_scaling_cov(cov, picks_list, scalings=scalings)
assert_array_equal(cov, cov2)
assert cov.max() < 1
data = evoked.data.copy()
_apply_scaling_array(data, picks_list, scalings=scalings)
_undo_scaling_array(data, picks_list, scalings=scalings)
assert_allclose(data, evoked.data, atol=1e-20)
@requires_version('numpy', '1.17') # hermitian kwarg
@pytest.mark.parametrize('ndim', (2, 3))
def test_reg_pinv(ndim):
"""Test regularization and inversion of covariance matrix."""
# create rank-deficient array
a = np.array([[1., 0., 1.], [0., 1., 0.], [1., 0., 1.]])
for _ in range(ndim - 2):
a = a[np.newaxis]
# Test if rank-deficient matrix without regularization throws
# specific warning
with pytest.warns(RuntimeWarning, match='deficient'):
_reg_pinv(a, reg=0.)
# Test inversion with explicit rank
a_inv_np = np.linalg.pinv(a, hermitian=True)
a_inv_mne, loading_factor, rank = _reg_pinv(a, rank=2)
assert loading_factor == 0
assert rank == 2
assert_allclose(a_inv_np, a_inv_mne, atol=1e-14)
# Test inversion with automatic rank detection
a_inv_mne, _, estimated_rank = _reg_pinv(a, rank=None)
assert_allclose(a_inv_np, a_inv_mne, atol=1e-14)
assert estimated_rank == 2
# Test adding regularization
a_inv_mne, loading_factor, estimated_rank = _reg_pinv(a, reg=2)
# Since A has a diagonal of all ones, loading_factor should equal the
# regularization parameter
assert loading_factor == 2
# The estimated rank should be that of the non-regularized matrix
assert estimated_rank == 2
# Test result against the NumPy version
a_inv_np = np.linalg.pinv(a + loading_factor * np.eye(3), hermitian=True)
assert_allclose(a_inv_np, a_inv_mne, atol=1e-14)
# Test setting rcond
a_inv_np = np.linalg.pinv(a, rcond=0.5)
a_inv_mne, _, estimated_rank = _reg_pinv(a, rcond=0.5)
assert_allclose(a_inv_np, a_inv_mne, atol=1e-14)
assert estimated_rank == 1
# Test inverting an all zero cov
a_inv, loading_factor, estimated_rank = _reg_pinv(np.zeros((3, 3)), reg=2)
assert_array_equal(a_inv, 0)
assert loading_factor == 0
assert estimated_rank == 0
def test_object_size():
"""Test object size estimation."""
assert (object_size(np.ones(10, np.float32)) <
object_size(np.ones(10, np.float64)))
for lower, upper, obj in ((0, 60, ''),
(0, 30, 1),
(0, 30, 1.),
(0, 70, 'foo'),
(0, 150, np.ones(0)),
(0, 150, np.int32(1)),
(150, 500, np.ones(20)),
(100, 400, dict()),
(400, 1000, dict(a=np.ones(50))),
(200, 900, sparse.eye(20, format='csc')),
(200, 900, sparse.eye(20, format='csr'))):
size = object_size(obj)
assert lower < size < upper, \
'%s < %s < %s:\n%s' % (lower, size, upper, obj)
# views work properly
x = dict(a=1)
assert object_size(x) < 1000
x['a'] = np.ones(100000, float)
nb = x['a'].nbytes
sz = object_size(x)
assert nb < sz < nb * 1.01
x['b'] = x['a']
sz = object_size(x)
assert nb < sz < nb * 1.01
x['b'] = x['a'].view()
x['b'].flags.writeable = False
assert x['a'].flags.writeable
sz = object_size(x)
assert nb < sz < nb * 1.01
def test_object_diff_with_nan():
"""Test object diff can handle NaNs."""
d0 = np.array([1, np.nan, 0])
d1 = np.array([1, np.nan, 0])
d2 = np.array([np.nan, 1, 0])
assert object_diff(d0, d1) == ''
assert object_diff(d0, d2) != ''
assert object_diff(np.nan, np.nan) == ''
assert object_diff(np.nan, 3.5) == ' value mismatch (nan, 3.5)\n'
def test_hash():
"""Test dictionary hashing and comparison functions."""
# does hashing all of these types work:
# {dict, list, tuple, ndarray, str, float, int, None}
d0 = dict(a=dict(a=0.1, b='fo', c=1), b=[1, 'b'], c=(), d=np.ones(3),
e=None)
d0[1] = None
d0[2.] = b'123'
d1 = deepcopy(d0)
assert len(object_diff(d0, d1)) == 0
assert len(object_diff(d1, d0)) == 0
assert object_hash(d0) == object_hash(d1)
# change values slightly
d1['data'] = np.ones(3, int)
d1['d'][0] = 0
assert object_hash(d0) != object_hash(d1)
d1 = deepcopy(d0)
assert object_hash(d0) == object_hash(d1)
d1['a']['a'] = 0.11
assert (len(object_diff(d0, d1)) > 0)
assert (len(object_diff(d1, d0)) > 0)
assert object_hash(d0) != object_hash(d1)
d1 = deepcopy(d0)
assert object_hash(d0) == object_hash(d1)
d1['a']['d'] = 0 # non-existent key
assert (len(object_diff(d0, d1)) > 0)
assert (len(object_diff(d1, d0)) > 0)
assert object_hash(d0) != object_hash(d1)
d1 = deepcopy(d0)
assert object_hash(d0) == object_hash(d1)
d1['b'].append(0) # different-length lists
assert (len(object_diff(d0, d1)) > 0)
assert (len(object_diff(d1, d0)) > 0)
assert object_hash(d0) != object_hash(d1)
d1 = deepcopy(d0)
assert object_hash(d0) == object_hash(d1)
d1['e'] = 'foo' # non-None
assert (len(object_diff(d0, d1)) > 0)
assert (len(object_diff(d1, d0)) > 0)
assert object_hash(d0) != object_hash(d1)
d1 = deepcopy(d0)
d2 = deepcopy(d0)
d1['e'] = StringIO()
d2['e'] = StringIO()
d2['e'].write('foo')
assert (len(object_diff(d0, d1)) > 0)
assert (len(object_diff(d1, d0)) > 0)
d1 = deepcopy(d0)
d1[1] = 2
assert (len(object_diff(d0, d1)) > 0)
assert (len(object_diff(d1, d0)) > 0)
assert object_hash(d0) != object_hash(d1)
# generators (and other types) not supported
d1 = deepcopy(d0)
d2 = deepcopy(d0)
d1[1] = (x for x in d0)
d2[1] = (x for x in d0)
pytest.raises(RuntimeError, object_diff, d1, d2)
pytest.raises(RuntimeError, object_hash, d1)
x = sparse.eye(2, 2, format='csc')
y = sparse.eye(2, 2, format='csr')
assert ('type mismatch' in object_diff(x, y))
y = sparse.eye(2, 2, format='csc')
assert len(object_diff(x, y)) == 0
y[1, 1] = 2
assert ('elements' in object_diff(x, y))
y = sparse.eye(3, 3, format='csc')
assert ('shape' in object_diff(x, y))
y = 0
assert ('type mismatch' in object_diff(x, y))
# smoke test for gh-4796
assert object_hash(np.int64(1)) != 0
assert object_hash(np.bool_(True)) != 0
@requires_sklearn
@pytest.mark.parametrize('n_components', (None, 0.9999, 8, 'mle'))
@pytest.mark.parametrize('whiten', (True, False))
def test_pca(n_components, whiten):
"""Test PCA equivalence."""
from sklearn.decomposition import PCA
n_samples, n_dim = 1000, 10
X = np.random.RandomState(0).randn(n_samples, n_dim)
X[:, -1] = np.mean(X[:, :-1], axis=-1) # true X dim is ndim - 1
X_orig = X.copy()
pca_skl = PCA(n_components, whiten=whiten, svd_solver='full')
pca_mne = _PCA(n_components, whiten=whiten)
X_skl = pca_skl.fit_transform(X)
assert_array_equal(X, X_orig)
X_mne = pca_mne.fit_transform(X)
assert_array_equal(X, X_orig)
assert_allclose(X_skl, X_mne)
assert pca_mne.n_components_ == pca_skl.n_components_
for key in ('mean_', 'components_',
'explained_variance_', 'explained_variance_ratio_'):
val_skl, val_mne = getattr(pca_skl, key), getattr(pca_mne, key)
assert_allclose(val_skl, val_mne)
if isinstance(n_components, float):
assert pca_mne.n_components_ == n_dim - 1
elif isinstance(n_components, int):
assert pca_mne.n_components_ == n_components
elif n_components == 'mle':
assert pca_mne.n_components_ == n_dim - 1
else:
assert n_components is None
assert pca_mne.n_components_ == n_dim
def test_array_equal_nan():
"""Test comparing arrays with NaNs."""
a = b = [1, np.nan, 0]
assert not np.array_equal(a, b) # this is the annoying behavior we avoid
assert _array_equal_nan(a, b)
b = [np.nan, 1, 0]
assert not _array_equal_nan(a, b)
a = b = [np.nan] * 2
assert _array_equal_nan(a, b)
def test_julian_conversions():
"""Test julian calendar conversions."""
# https://aa.usno.navy.mil/data/docs/JulianDate.php
# A.D. 1922 Jun 13 12:00:00.0 2423219.000000
# A.D. 2018 Oct 3 12:00:00.0 2458395.000000
jds = [2423219, 2458395, 2445701]
dds = [datetime(1922, 6, 13, 12, 0, 0, tzinfo=timezone.utc),
datetime(2018, 10, 3, 12, 0, 0, tzinfo=timezone.utc),
datetime(1984, 1, 1, 12, 0, 0, tzinfo=timezone.utc)]
cals = [(1922, 6, 13), (2018, 10, 3), (1984, 1, 1)]
for dd, cal, jd in zip(dds, cals, jds):
assert (dd == _julian_to_dt(jd))
assert (cal == _julian_to_cal(jd))
assert (jd == _dt_to_julian(dd))
assert (jd == _cal_to_julian(cal[0], cal[1], cal[2]))
def test_grand_average_empty_sequence():
"""Test if mne.grand_average handles an empty sequence correctly."""
with pytest.raises(ValueError, match='Please pass a list of Evoked'):
grand_average([])
def test_grand_average_len_1():
"""Test if mne.grand_average handles a sequence of length 1 correctly."""
# returns a list of length 1
evokeds = read_evokeds(ave_fname, condition=[0], proj=True)
with pytest.warns(RuntimeWarning, match='Only a single dataset'):
gave = grand_average(evokeds)
assert_allclose(gave.data, evokeds[0].data)
def test_reuse_cycle():
"""Test _ReuseCycle."""
vals = 'abcde'
iterable = _ReuseCycle(vals)
assert ''.join(next(iterable) for _ in range(2 * len(vals))) == vals + vals
# we're back to initial
assert ''.join(next(iterable) for _ in range(2)) == 'ab'
iterable.restore('a')
assert ''.join(next(iterable) for _ in range(10)) == 'acdeabcdea'
assert ''.join(next(iterable) for _ in range(4)) == 'bcde'
# we're back to initial
assert ''.join(next(iterable) for _ in range(3)) == 'abc'
iterable.restore('a')
iterable.restore('b')
iterable.restore('c')
assert ''.join(next(iterable) for _ in range(5)) == 'abcde'
# we're back to initial
assert ''.join(next(iterable) for _ in range(3)) == 'abc'
iterable.restore('a')
iterable.restore('c')
assert ''.join(next(iterable) for _ in range(4)) == 'acde'
assert ''.join(next(iterable) for _ in range(5)) == 'abcde'
# we're back to initial
assert ''.join(next(iterable) for _ in range(3)) == 'abc'
iterable.restore('c')
iterable.restore('a')
with pytest.warns(RuntimeWarning, match='Could not find'):
iterable.restore('a')
assert ''.join(next(iterable) for _ in range(4)) == 'acde'
assert ''.join(next(iterable) for _ in range(5)) == 'abcde'
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import flip_bbox
from chainercv.utils.testing.generate_random_bbox import generate_random_bbox
class TestFlipBbox(unittest.TestCase):
def test_flip_bbox(self):
size = (32, 24)
bbox = generate_random_bbox(10, size, 0, min(size))
out = flip_bbox(bbox, size=size, y_flip=True)
bbox_expected = bbox.copy()
bbox_expected[:, 0] = size[0] - bbox[:, 2]
bbox_expected[:, 2] = size[0] - bbox[:, 0]
np.testing.assert_equal(out, bbox_expected)
out = flip_bbox(bbox, size=size, x_flip=True)
bbox_expected = bbox.copy()
bbox_expected[:, 1] = size[1] - bbox[:, 3]
bbox_expected[:, 3] = size[1] - bbox[:, 1]
np.testing.assert_equal(out, bbox_expected)
testing.run_module(__name__, __file__)
|
from datetime import timedelta
import logging
import nikohomecontrol
import voluptuous as vol
# Import the device class from the component that you want to support
from homeassistant.components.light import ATTR_BRIGHTNESS, PLATFORM_SCHEMA, LightEntity
from homeassistant.const import CONF_HOST
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
SCAN_INTERVAL = timedelta(seconds=30)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string})
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Niko Home Control light platform."""
host = config[CONF_HOST]
try:
nhc = nikohomecontrol.NikoHomeControl(
{"ip": host, "port": 8000, "timeout": 20000}
)
niko_data = NikoHomeControlData(hass, nhc)
await niko_data.async_update()
except OSError as err:
_LOGGER.error("Unable to access %s (%s)", host, err)
raise PlatformNotReady from err
async_add_entities(
[NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True
)
class NikoHomeControlLight(LightEntity):
"""Representation of an Niko Light."""
def __init__(self, light, data):
"""Set up the Niko Home Control light platform."""
self._data = data
self._light = light
self._unique_id = f"light-{light.id}"
self._name = light.name
self._state = light.is_on
self._brightness = None
@property
def unique_id(self):
"""Return unique ID for light."""
return self._unique_id
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def brightness(self):
"""Return the brightness of the light."""
return self._brightness
@property
def is_on(self):
"""Return true if light is on."""
return self._state
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
_LOGGER.debug("Turn on: %s", self.name)
self._light.turn_on()
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
_LOGGER.debug("Turn off: %s", self.name)
self._light.turn_off()
async def async_update(self):
"""Get the latest data from NikoHomeControl API."""
await self._data.async_update()
self._state = self._data.get_state(self._light.id)
class NikoHomeControlData:
"""The class for handling data retrieval."""
def __init__(self, hass, nhc):
"""Set up Niko Home Control Data object."""
self._nhc = nhc
self.hass = hass
self.available = True
self.data = {}
self._system_info = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from the NikoHomeControl API."""
_LOGGER.debug("Fetching async state in bulk")
try:
self.data = await self.hass.async_add_executor_job(
self._nhc.list_actions_raw
)
self.available = True
except OSError as ex:
_LOGGER.error("Unable to retrieve data from Niko, %s", str(ex))
self.available = False
def get_state(self, aid):
"""Find and filter state based on action id."""
for state in self.data:
if state["id"] == aid:
return state["value1"] != 0
_LOGGER.error("Failed to retrieve state off unknown light")
|
from __future__ import absolute_import
import datetime
import json
import logging
import os.path
import sys
from subprocess import check_output
from setuptools import Command
from setuptools import setup, find_packages
from setuptools.command.develop import develop
from setuptools.command.install import install
from setuptools.command.sdist import sdist
ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src/ directory to the sys.path.
sys.path.insert(0, ROOT)
about = {}
with open(os.path.join(ROOT, 'lemur', '__about__.py')) as f:
exec(f.read(), about) # nosec: about file is benign
# Parse requirements files
with open('requirements.txt') as f:
install_requirements = f.read().splitlines()
with open('requirements-tests.txt') as f:
tests_requirements = f.read().splitlines()
with open('requirements-docs.txt') as f:
docs_requirements = f.read().splitlines()
with open('requirements-dev.txt') as f:
dev_requirements = f.read().splitlines()
class SmartInstall(install):
"""
Installs Lemur into the Python environment.
If the package indicator is missing, this will also force a run of
`build_static` which is required for JavaScript assets and other things.
"""
def _needs_static(self):
return not os.path.exists(os.path.join(ROOT, 'lemur/static/dist'))
def run(self):
if self._needs_static():
self.run_command('build_static')
install.run(self)
class DevelopWithBuildStatic(develop):
def install_for_development(self):
self.run_command('build_static')
return develop.install_for_development(self)
class SdistWithBuildStatic(sdist):
def make_release_tree(self, *a, **kw):
dist_path = self.distribution.get_fullname()
sdist.make_release_tree(self, *a, **kw)
self.reinitialize_command('build_static', work_path=dist_path)
self.run_command('build_static')
with open(os.path.join(dist_path, 'lemur-package.json'), 'w') as fp:
json.dump({
'createdAt': datetime.datetime.utcnow().isoformat() + 'Z',
}, fp)
class BuildStatic(Command):
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
logging.info("running [npm install --quiet] in {0}".format(ROOT))
try:
check_output(['npm', 'install', '--quiet'], cwd=ROOT)
logging.info("running [gulp build]")
check_output([os.path.join(ROOT, 'node_modules', '.bin', 'gulp'), 'build'], cwd=ROOT)
logging.info("running [gulp package]")
check_output([os.path.join(ROOT, 'node_modules', '.bin', 'gulp'), 'package'], cwd=ROOT)
except Exception as e:
logging.warn("Unable to build static content")
setup(
name=about["__title__"],
version=about["__version__"],
author=about["__author__"],
author_email=about["__email__"],
url=about["__uri__"],
description=about["__summary__"],
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requirements,
extras_require={
'tests': tests_requirements,
'docs': docs_requirements,
'dev': dev_requirements,
},
cmdclass={
'build_static': BuildStatic,
'sdist': SdistWithBuildStatic,
'install': SmartInstall
},
entry_points={
'console_scripts': [
'lemur = lemur.manage:main',
],
'lemur.plugins': [
'verisign_issuer = lemur.plugins.lemur_verisign.plugin:VerisignIssuerPlugin',
'acme_issuer = lemur.plugins.lemur_acme.plugin:ACMEIssuerPlugin',
'acme_http_issuer = lemur.plugins.lemur_acme.plugin:ACMEHttpIssuerPlugin',
'aws_destination = lemur.plugins.lemur_aws.plugin:AWSDestinationPlugin',
'aws_source = lemur.plugins.lemur_aws.plugin:AWSSourcePlugin',
'aws_s3 = lemur.plugins.lemur_aws.plugin:S3DestinationPlugin',
'aws_sns = lemur.plugins.lemur_aws.plugin:SNSNotificationPlugin',
'email_notification = lemur.plugins.lemur_email.plugin:EmailNotificationPlugin',
'slack_notification = lemur.plugins.lemur_slack.plugin:SlackNotificationPlugin',
'java_truststore_export = lemur.plugins.lemur_jks.plugin:JavaTruststoreExportPlugin',
'java_keystore_export = lemur.plugins.lemur_jks.plugin:JavaKeystoreExportPlugin',
'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin',
'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin',
'atlas_metric_redis = lemur.plugins.lemur_atlas_redis.plugin:AtlasMetricRedisPlugin',
'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin',
'cryptography_issuer = lemur.plugins.lemur_cryptography.plugin:CryptographyIssuerPlugin',
'cfssl_issuer = lemur.plugins.lemur_cfssl.plugin:CfsslIssuerPlugin',
'digicert_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertIssuerPlugin',
'digicert_cis_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertCISIssuerPlugin',
'digicert_cis_source = lemur.plugins.lemur_digicert.plugin:DigiCertCISSourcePlugin',
'csr_export = lemur.plugins.lemur_csr.plugin:CSRExportPlugin',
'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin',
'vault_source = lemur.plugins.lemur_vault_dest.plugin:VaultSourcePlugin',
'vault_desination = lemur.plugins.lemur_vault_dest.plugin:VaultDestinationPlugin',
'adcs_issuer = lemur.plugins.lemur_adcs.plugin:ADCSIssuerPlugin',
'adcs_source = lemur.plugins.lemur_adcs.plugin:ADCSSourcePlugin',
'entrust_issuer = lemur.plugins.lemur_entrust.plugin:EntrustIssuerPlugin',
'entrust_source = lemur.plugins.lemur_entrust.plugin:EntrustSourcePlugin',
'azure_destination = lemur.plugins.lemur_azure_dest.plugin:AzureDestinationPlugin'
],
},
classifiers=[
'Framework :: Flask',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development',
"Programming Language :: Python :: 3.5",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License"
]
)
|
from aiohttp import web
from homeassistant.components.cloud import utils
def test_serialize_text():
"""Test serializing a text response."""
response = web.Response(status=201, text="Hello")
assert utils.aiohttp_serialize_response(response) == {
"status": 201,
"body": "Hello",
"headers": {"Content-Type": "text/plain; charset=utf-8"},
}
def test_serialize_body_str():
"""Test serializing a response with a str as body."""
response = web.Response(status=201, body="Hello")
assert utils.aiohttp_serialize_response(response) == {
"status": 201,
"body": "Hello",
"headers": {"Content-Length": "5", "Content-Type": "text/plain; charset=utf-8"},
}
def test_serialize_body_None():
"""Test serializing a response with a str as body."""
response = web.Response(status=201, body=None)
assert utils.aiohttp_serialize_response(response) == {
"status": 201,
"body": None,
"headers": {},
}
def test_serialize_body_bytes():
"""Test serializing a response with a str as body."""
response = web.Response(status=201, body=b"Hello")
assert utils.aiohttp_serialize_response(response) == {
"status": 201,
"body": "Hello",
"headers": {},
}
def test_serialize_json():
"""Test serializing a JSON response."""
response = web.json_response({"how": "what"})
assert utils.aiohttp_serialize_response(response) == {
"status": 200,
"body": '{"how": "what"}',
"headers": {"Content-Type": "application/json; charset=utf-8"},
}
|
import logging
import unittest
import numpy as np
from gensim.test.utils import datapath
from gensim.models.keyedvectors import KeyedVectors
class TestDataType(unittest.TestCase):
def load_model(self, datatype):
path = datapath('high_precision.kv.txt')
kv = KeyedVectors.load_word2vec_format(path, binary=False,
datatype=datatype)
return kv
def test_high_precision(self):
kv = self.load_model(np.float64)
self.assertAlmostEqual(kv['horse.n.01'][0], -0.0008546282343595379)
self.assertEqual(kv['horse.n.01'][0].dtype, np.float64)
def test_medium_precision(self):
kv = self.load_model(np.float32)
self.assertAlmostEqual(kv['horse.n.01'][0], -0.00085462822)
self.assertEqual(kv['horse.n.01'][0].dtype, np.float32)
def test_low_precision(self):
kv = self.load_model(np.float16)
self.assertAlmostEqual(kv['horse.n.01'][0], -0.00085449)
self.assertEqual(kv['horse.n.01'][0].dtype, np.float16)
def test_type_conversion(self):
path = datapath('high_precision.kv.txt')
binary_path = datapath('high_precision.kv.bin')
model1 = KeyedVectors.load_word2vec_format(path, datatype=np.float16)
model1.save_word2vec_format(binary_path, binary=True)
model2 = KeyedVectors.load_word2vec_format(binary_path, datatype=np.float64, binary=True)
self.assertAlmostEqual(model1["horse.n.01"][0], np.float16(model2["horse.n.01"][0]))
self.assertEqual(model1["horse.n.01"][0].dtype, np.float16)
self.assertEqual(model2["horse.n.01"][0].dtype, np.float64)
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, HTTP_OK
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME, default="admin"): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return a Xiaomi Device Scanner."""
scanner = XiaomiDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class XiaomiDeviceScanner(DeviceScanner):
"""This class queries a Xiaomi Mi router.
Adapted from Luci scanner.
"""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.last_results = {}
self.token = _get_token(self.host, self.username, self.password)
self.mac2name = None
self.success_init = self.token is not None
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if self.mac2name is None:
result = self._retrieve_list_with_retry()
if result:
hosts = [x for x in result if "mac" in x and "name" in x]
mac2name_list = [(x["mac"].upper(), x["name"]) for x in hosts]
self.mac2name = dict(mac2name_list)
else:
# Error, handled in the _retrieve_list_with_retry
return
return self.mac2name.get(device.upper(), None)
def _update_info(self):
"""Ensure the information from the router are up to date.
Returns true if scanning successful.
"""
if not self.success_init:
return False
result = self._retrieve_list_with_retry()
if result:
self._store_result(result)
return True
return False
def _retrieve_list_with_retry(self):
"""Retrieve the device list with a retry if token is invalid.
Return the list if successful.
"""
_LOGGER.info("Refreshing device list")
result = _retrieve_list(self.host, self.token)
if result:
return result
_LOGGER.info("Refreshing token and retrying device list refresh")
self.token = _get_token(self.host, self.username, self.password)
return _retrieve_list(self.host, self.token)
def _store_result(self, result):
"""Extract and store the device list in self.last_results."""
self.last_results = []
for device_entry in result:
# Check if the device is marked as connected
if int(device_entry["online"]) == 1:
self.last_results.append(device_entry["mac"])
def _retrieve_list(host, token, **kwargs):
"""Get device list for the given host."""
url = "http://{}/cgi-bin/luci/;stok={}/api/misystem/devicelist"
url = url.format(host, token)
try:
res = requests.get(url, timeout=5, **kwargs)
except requests.exceptions.Timeout:
_LOGGER.exception("Connection to the router timed out at URL %s", url)
return
if res.status_code != HTTP_OK:
_LOGGER.exception("Connection failed with http code %s", res.status_code)
return
try:
result = res.json()
except ValueError:
# If json decoder could not parse the response
_LOGGER.exception("Failed to parse response from mi router")
return
try:
xiaomi_code = result["code"]
except KeyError:
_LOGGER.exception("No field code in response from mi router. %s", result)
return
if xiaomi_code == 0:
try:
return result["list"]
except KeyError:
_LOGGER.exception("No list in response from mi router. %s", result)
return
else:
_LOGGER.info(
"Receive wrong Xiaomi code %s, expected 0 in response %s",
xiaomi_code,
result,
)
return
def _get_token(host, username, password):
"""Get authentication token for the given host+username+password."""
url = f"http://{host}/cgi-bin/luci/api/xqsystem/login"
data = {"username": username, "password": password}
try:
res = requests.post(url, data=data, timeout=5)
except requests.exceptions.Timeout:
_LOGGER.exception("Connection to the router timed out")
return
if res.status_code == HTTP_OK:
try:
result = res.json()
except ValueError:
# If JSON decoder could not parse the response
_LOGGER.exception("Failed to parse response from mi router")
return
try:
return result["token"]
except KeyError:
error_message = (
"Xiaomi token cannot be refreshed, response from "
+ "url: [%s] \nwith parameter: [%s] \nwas: [%s]"
)
_LOGGER.exception(error_message, url, data, result)
return
else:
_LOGGER.error(
"Invalid response: [%s] at url: [%s] with data [%s]", res, url, data
)
|
from datetime import timedelta
from homeassistant.components.bluetooth_le_tracker import device_tracker
from homeassistant.components.device_tracker.const import (
CONF_SCAN_INTERVAL,
CONF_TRACK_NEW,
DOMAIN,
)
from homeassistant.const import CONF_PLATFORM
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util, slugify
from tests.async_mock import patch
from tests.common import async_fire_time_changed
async def test_preserve_new_tracked_device_name(hass, mock_device_tracker_conf):
"""Test preserving tracked device name across new seens."""
address = "DE:AD:BE:EF:13:37"
name = "Mock device name"
entity_id = f"{DOMAIN}.{slugify(name)}"
with patch(
"homeassistant.components."
"bluetooth_le_tracker.device_tracker.pygatt.GATTToolBackend"
) as mock_backend, patch.object(device_tracker, "MIN_SEEN_NEW", 3):
# Return with name when seen first time
device = {"address": address, "name": name}
mock_backend.return_value.scan.return_value = [device]
config = {
CONF_PLATFORM: "bluetooth_le_tracker",
CONF_SCAN_INTERVAL: timedelta(minutes=1),
CONF_TRACK_NEW: True,
}
result = await async_setup_component(hass, DOMAIN, {DOMAIN: config})
assert result
# Seen once here; return without name when seen subsequent times
device["name"] = None
# Tick until device seen enough times for to be registered for tracking
for _ in range(device_tracker.MIN_SEEN_NEW - 1):
async_fire_time_changed(
hass,
dt_util.utcnow() + config[CONF_SCAN_INTERVAL] + timedelta(seconds=1),
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.name == name
|
from datetime import timedelta
from homeassistant import data_entry_flow
from homeassistant.components.geonetnz_volcano import config_flow
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_UNIT_SYSTEM,
)
from tests.async_mock import patch
async def test_duplicate_error(hass, config_entry):
"""Test that errors are shown when duplicates are added."""
conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25}
config_entry.add_to_hass(hass)
flow = config_flow.GeonetnzVolcanoFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "already_configured"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.GeonetnzVolcanoFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import(hass):
"""Test that the import step works."""
conf = {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_UNIT_SYSTEM: "metric",
CONF_SCAN_INTERVAL: timedelta(minutes=4),
}
flow = config_flow.GeonetnzVolcanoFlowHandler()
flow.hass = hass
with patch(
"homeassistant.components.geonetnz_volcano.async_setup_entry", return_value=True
), patch(
"homeassistant.components.geonetnz_volcano.async_setup", return_value=True
):
result = await flow.async_step_import(import_config=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "-41.2, 174.7"
assert result["data"] == {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_UNIT_SYSTEM: "metric",
CONF_SCAN_INTERVAL: 240.0,
}
async def test_step_user(hass):
"""Test that the user step works."""
hass.config.latitude = -41.2
hass.config.longitude = 174.7
conf = {CONF_RADIUS: 25}
flow = config_flow.GeonetnzVolcanoFlowHandler()
flow.hass = hass
with patch(
"homeassistant.components.geonetnz_volcano.async_setup_entry", return_value=True
), patch(
"homeassistant.components.geonetnz_volcano.async_setup", return_value=True
):
result = await flow.async_step_user(user_input=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "-41.2, 174.7"
assert result["data"] == {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_UNIT_SYSTEM: "metric",
CONF_SCAN_INTERVAL: 300.0,
}
|
import os
import unittest
import memcached_slab
fixtures = os.path.join(os.path.dirname(__file__), 'fixtures', 'stats')
with open(fixtures, 'rb') as f:
RAW_SLAB_STATS = f.read()
class MemcachedSlabCollectorTestCase(unittest.TestCase):
def test_dict_to_paths(self):
dict_ = {
'foo': {
1: {
'baz': 1,
'bam': 2,
},
},
'car': 3,
}
metrics = memcached_slab.dict_to_paths(dict_)
self.assertEqual(metrics['foo.1.baz'], 1)
self.assertEqual(metrics['foo.1.bam'], 2)
self.assertEqual(metrics['car'], 3)
def test_parse_slab_stats(self):
slab_stats = memcached_slab.parse_slab_stats(RAW_SLAB_STATS)
self.assertEqual(slab_stats['slabs'][1]['chunk_size'], 96)
self.assertEqual(slab_stats['slabs'][1]['chunks_per_page'], 10922)
self.assertEqual(slab_stats['active_slabs'], 1)
self.assertEqual(slab_stats['total_malloced'], 1048512)
if __name__ == '__main__':
unittest.main()
|
from axis.event_stream import CLASS_LIGHT
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .axis_base import AxisEventBase
from .const import DOMAIN as AXIS_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a Axis light."""
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
if not device.api.vapix.light_control:
return
@callback
def async_add_sensor(event_id):
"""Add light from Axis device."""
event = device.api.event[event_id]
if event.CLASS == CLASS_LIGHT and event.TYPE == "Light":
async_add_entities([AxisLight(event, device)])
device.listeners.append(
async_dispatcher_connect(hass, device.signal_new_event, async_add_sensor)
)
class AxisLight(AxisEventBase, LightEntity):
"""Representation of a light Axis event."""
def __init__(self, event, device):
"""Initialize the Axis light."""
super().__init__(event, device)
self.light_id = f"led{self.event.id}"
self.current_intensity = 0
self.max_intensity = 0
self._features = SUPPORT_BRIGHTNESS
async def async_added_to_hass(self) -> None:
"""Subscribe lights events."""
await super().async_added_to_hass()
current_intensity = (
await self.device.api.vapix.light_control.get_current_intensity(
self.light_id
)
)
self.current_intensity = current_intensity["data"]["intensity"]
max_intensity = await self.device.api.vapix.light_control.get_valid_intensity(
self.light_id
)
self.max_intensity = max_intensity["data"]["ranges"][0]["high"]
@property
def supported_features(self):
"""Flag supported features."""
return self._features
@property
def name(self):
"""Return the name of the light."""
light_type = self.device.api.vapix.light_control[self.light_id].light_type
return f"{self.device.name} {light_type} {self.event.TYPE} {self.event.id}"
@property
def is_on(self):
"""Return true if light is on."""
return self.event.is_tripped
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int((self.current_intensity / self.max_intensity) * 255)
async def async_turn_on(self, **kwargs):
"""Turn on light."""
if not self.is_on:
await self.device.api.vapix.light_control.activate_light(self.light_id)
if ATTR_BRIGHTNESS in kwargs:
intensity = int((kwargs[ATTR_BRIGHTNESS] / 255) * self.max_intensity)
await self.device.api.vapix.light_control.set_manual_intensity(
self.light_id, intensity
)
async def async_turn_off(self, **kwargs):
"""Turn off light."""
if self.is_on:
await self.device.api.vapix.light_control.deactivate_light(self.light_id)
async def async_update(self):
"""Update brightness."""
current_intensity = (
await self.device.api.vapix.light_control.get_current_intensity(
self.light_id
)
)
self.current_intensity = current_intensity["data"]["intensity"]
@property
def should_poll(self):
"""Brightness needs polling."""
return True
|
from __future__ import print_function
import random
import string
from datetime import datetime as dt
import pytest
import six
import arctic._compression as c
try:
from lz4.block import compress as lz4_compress, decompress as lz4_decompress
lz4_compressHC = lambda _str: lz4_compress(_str, mode='high_compression')
except ImportError as e:
from lz4 import compress as lz4_compress, compressHC as lz4_compressHC, decompress as lz4_decompress
@pytest.mark.parametrize("compress,decompress", [
(c.compress, lz4_decompress),
(c.compressHC, lz4_decompress),
(lz4_compress, c.decompress),
(lz4_compressHC, c.decompress)
], ids=('arctic/lz4',
'arcticHC/lz4',
'lz4/arctic',
'lz4HC/arctic'))
def test_roundtrip(compress, decompress):
_str = b"hello world"
cstr = compress(_str)
assert _str == decompress(cstr)
@pytest.mark.parametrize("n, length", [(300, 5e4), # micro TS
(5, 2e6), # Futures TS
(10, 2e6), # Futures TS
(100, 2e6), # Large TS
(250, 2e6)]) # Even Bigger TS
def test_performance_sequential(n, length):
_str = random_string(length)
_strarr = [_str for _ in range(n)]
now = dt.now()
[c.decompress(y) for y in [c.compressHC(x) for x in _strarr]]
clz4_time = (dt.now() - now).total_seconds()
now = dt.now()
c.decompress_array(c.compressHC_array(_strarr))
clz4_time_p = (dt.now() - now).total_seconds()
now = dt.now()
[lz4_decompress(y) for y in [lz4_compress(x) for x in _strarr]]
lz4_time = (dt.now() - now).total_seconds()
print()
print("LZ4 Test %sx len:%s" % (n, length))
print(" LZ4 HC %s s" % clz4_time)
print(" LZ4 HC Parallel %s s" % clz4_time_p)
print(" LZ4 %s s" % lz4_time)
def random_string(N):
_str = ''.join(random.choice(list(string.printable) + ['hello', 'world', 'hellworld', 'Hello', 'w0rld']) for _ in six.moves.xrange(int(N)))
return _str.encode('ascii')
def test_exceptions():
data = c.compress(b'1010101010100000000000000000000000000000000000000000000000000000000011111111111111111111111111111')
data = data[0:16]
with pytest.raises(Exception) as e:
c.decompress(data)
assert("decompressor wrote" in str(e.value).lower() or "corrupt input at" in str(e.value).lower() or "decompression failed: corrupt input" in str(e.value).lower())
data = c.compress(b'1010101010100000000000000000000000000000000000000000000000000000000011111111111111111111111111111')
data = [data[0:16] for x in (1, 2, 3)]
with pytest.raises(Exception) as e:
c.decompress_array(data)
assert ("decompressor wrote" in str(e.value).lower() or "corrupt input at" in str(e.value).lower() or "decompression failed: corrupt input" in str(e.value).lower())
|
import asyncio
import logging
from types import MappingProxyType
from typing import Any, Callable, Dict, List, Optional, Union
import voluptuous as vol
from homeassistant.const import CONF_PLATFORM
from homeassistant.core import CALLBACK_TYPE, callback
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.loader import IntegrationNotFound, async_get_integration
_PLATFORM_ALIASES = {
"device_automation": ("device",),
"homeassistant": ("event", "numeric_state", "state", "time_pattern", "time"),
}
async def _async_get_trigger_platform(
hass: HomeAssistantType, config: ConfigType
) -> Any:
platform = config[CONF_PLATFORM]
for alias, triggers in _PLATFORM_ALIASES.items():
if platform in triggers:
platform = alias
break
try:
integration = await async_get_integration(hass, platform)
except IntegrationNotFound:
raise vol.Invalid(f"Invalid platform '{platform}' specified") from None
try:
return integration.get_platform("trigger")
except ImportError:
raise vol.Invalid(
f"Integration '{platform}' does not provide trigger support"
) from None
async def async_validate_trigger_config(
hass: HomeAssistantType, trigger_config: List[ConfigType]
) -> List[ConfigType]:
"""Validate triggers."""
config = []
for conf in trigger_config:
platform = await _async_get_trigger_platform(hass, conf)
if hasattr(platform, "async_validate_trigger_config"):
conf = await platform.async_validate_trigger_config(hass, conf)
else:
conf = platform.TRIGGER_SCHEMA(conf)
config.append(conf)
return config
async def async_initialize_triggers(
hass: HomeAssistantType,
trigger_config: List[ConfigType],
action: Callable,
domain: str,
name: str,
log_cb: Callable,
home_assistant_start: bool = False,
variables: Optional[Union[Dict[str, Any], MappingProxyType]] = None,
) -> Optional[CALLBACK_TYPE]:
"""Initialize triggers."""
info = {
"domain": domain,
"name": name,
"home_assistant_start": home_assistant_start,
"variables": variables,
}
triggers = []
for conf in trigger_config:
platform = await _async_get_trigger_platform(hass, conf)
triggers.append(platform.async_attach_trigger(hass, conf, action, info))
removes = await asyncio.gather(*triggers)
if None in removes:
log_cb(logging.ERROR, "Error setting up trigger")
removes = list(filter(None, removes))
if not removes:
return None
log_cb(logging.INFO, "Initialized trigger")
@callback
def remove_triggers(): # type: ignore
"""Remove triggers."""
for remove in removes:
remove()
return remove_triggers
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
import homeassistant.util.color as color_util
from . import DATA_HIVE, DOMAIN, HiveEntity, refresh_system
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Hive light devices."""
if discovery_info is None:
return
session = hass.data.get(DATA_HIVE)
devs = []
for dev in discovery_info:
devs.append(HiveDeviceLight(session, dev))
add_entities(devs)
class HiveDeviceLight(HiveEntity, LightEntity):
"""Hive Active Light Device."""
def __init__(self, hive_session, hive_device):
"""Initialize the Light device."""
super().__init__(hive_session, hive_device)
self.light_device_type = hive_device["Hive_Light_DeviceType"]
@property
def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id
@property
def device_info(self):
"""Return device information."""
return {"identifiers": {(DOMAIN, self.unique_id)}, "name": self.name}
@property
def name(self):
"""Return the display name of this light."""
return self.node_name
@property
def device_state_attributes(self):
"""Show Device Attributes."""
return self.attributes
@property
def brightness(self):
"""Brightness of the light (an integer in the range 1-255)."""
return self.session.light.get_brightness(self.node_id)
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
if (
self.light_device_type == "tuneablelight"
or self.light_device_type == "colourtuneablelight"
):
return self.session.light.get_min_color_temp(self.node_id)
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
if (
self.light_device_type == "tuneablelight"
or self.light_device_type == "colourtuneablelight"
):
return self.session.light.get_max_color_temp(self.node_id)
@property
def color_temp(self):
"""Return the CT color value in mireds."""
if (
self.light_device_type == "tuneablelight"
or self.light_device_type == "colourtuneablelight"
):
return self.session.light.get_color_temp(self.node_id)
@property
def hs_color(self) -> tuple:
"""Return the hs color value."""
if self.light_device_type == "colourtuneablelight":
rgb = self.session.light.get_color(self.node_id)
return color_util.color_RGB_to_hs(*rgb)
@property
def is_on(self):
"""Return true if light is on."""
return self.session.light.get_state(self.node_id)
@refresh_system
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
new_brightness = None
new_color_temp = None
new_color = None
if ATTR_BRIGHTNESS in kwargs:
tmp_new_brightness = kwargs.get(ATTR_BRIGHTNESS)
percentage_brightness = (tmp_new_brightness / 255) * 100
new_brightness = int(round(percentage_brightness / 5.0) * 5.0)
if new_brightness == 0:
new_brightness = 5
if ATTR_COLOR_TEMP in kwargs:
tmp_new_color_temp = kwargs.get(ATTR_COLOR_TEMP)
new_color_temp = round(1000000 / tmp_new_color_temp)
if ATTR_HS_COLOR in kwargs:
get_new_color = kwargs.get(ATTR_HS_COLOR)
hue = int(get_new_color[0])
saturation = int(get_new_color[1])
new_color = (hue, saturation, self.brightness)
self.session.light.turn_on(
self.node_id,
self.light_device_type,
new_brightness,
new_color_temp,
new_color,
)
@refresh_system
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
self.session.light.turn_off(self.node_id)
@property
def supported_features(self):
"""Flag supported features."""
supported_features = None
if self.light_device_type == "warmwhitelight":
supported_features = SUPPORT_BRIGHTNESS
elif self.light_device_type == "tuneablelight":
supported_features = SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP
elif self.light_device_type == "colourtuneablelight":
supported_features = SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_COLOR
return supported_features
def update(self):
"""Update all Node data from Hive."""
self.session.core.update_data(self.node_id)
self.attributes = self.session.attributes.state_attributes(self.node_id)
|
import contextlib
import functools
from datetime import datetime, timedelta
from itertools import chain, zip_longest
from typing import Hashable
import numpy as np
import pandas as pd
from pandas.errors import OutOfBoundsDatetime
from .duck_array_ops import array_equiv
from .options import OPTIONS
from .pycompat import dask_array_type, sparse_array_type
from .utils import is_duck_array
def pretty_print(x, numchars: int):
"""Given an object `x`, call `str(x)` and format the returned string so
that it is numchars long, padding with trailing spaces or truncating with
ellipses as necessary
"""
s = maybe_truncate(x, numchars)
return s + " " * max(numchars - len(s), 0)
def maybe_truncate(obj, maxlen=500):
s = str(obj)
if len(s) > maxlen:
s = s[: (maxlen - 3)] + "..."
return s
def wrap_indent(text, start="", length=None):
if length is None:
length = len(start)
indent = "\n" + " " * length
return start + indent.join(x for x in text.splitlines())
def _get_indexer_at_least_n_items(shape, n_desired, from_end):
assert 0 < n_desired <= np.prod(shape)
cum_items = np.cumprod(shape[::-1])
n_steps = np.argmax(cum_items >= n_desired)
stop = int(np.ceil(float(n_desired) / np.r_[1, cum_items][n_steps]))
indexer = (
((-1 if from_end else 0),) * (len(shape) - 1 - n_steps)
+ ((slice(-stop, None) if from_end else slice(stop)),)
+ (slice(None),) * n_steps
)
return indexer
def first_n_items(array, n_desired):
"""Returns the first n_desired items of an array"""
# Unfortunately, we can't just do array.flat[:n_desired] here because it
# might not be a numpy.ndarray. Moreover, access to elements of the array
# could be very expensive (e.g. if it's only available over DAP), so go out
# of our way to get them in a single call to __getitem__ using only slices.
if n_desired < 1:
raise ValueError("must request at least one item")
if array.size == 0:
# work around for https://github.com/numpy/numpy/issues/5195
return []
if n_desired < array.size:
indexer = _get_indexer_at_least_n_items(array.shape, n_desired, from_end=False)
array = array[indexer]
return np.asarray(array).flat[:n_desired]
def last_n_items(array, n_desired):
"""Returns the last n_desired items of an array"""
# Unfortunately, we can't just do array.flat[-n_desired:] here because it
# might not be a numpy.ndarray. Moreover, access to elements of the array
# could be very expensive (e.g. if it's only available over DAP), so go out
# of our way to get them in a single call to __getitem__ using only slices.
if (n_desired == 0) or (array.size == 0):
return []
if n_desired < array.size:
indexer = _get_indexer_at_least_n_items(array.shape, n_desired, from_end=True)
array = array[indexer]
return np.asarray(array).flat[-n_desired:]
def last_item(array):
"""Returns the last item of an array in a list or an empty list."""
if array.size == 0:
# work around for https://github.com/numpy/numpy/issues/5195
return []
indexer = (slice(-1, None),) * array.ndim
return np.ravel(np.asarray(array[indexer])).tolist()
def format_timestamp(t):
"""Cast given object to a Timestamp and return a nicely formatted string"""
# Timestamp is only valid for 1678 to 2262
try:
datetime_str = str(pd.Timestamp(t))
except OutOfBoundsDatetime:
datetime_str = str(t)
try:
date_str, time_str = datetime_str.split()
except ValueError:
# catch NaT and others that don't split nicely
return datetime_str
else:
if time_str == "00:00:00":
return date_str
else:
return f"{date_str}T{time_str}"
def format_timedelta(t, timedelta_format=None):
"""Cast given object to a Timestamp and return a nicely formatted string"""
timedelta_str = str(pd.Timedelta(t))
try:
days_str, time_str = timedelta_str.split(" days ")
except ValueError:
# catch NaT and others that don't split nicely
return timedelta_str
else:
if timedelta_format == "date":
return days_str + " days"
elif timedelta_format == "time":
return time_str
else:
return timedelta_str
def format_item(x, timedelta_format=None, quote_strings=True):
"""Returns a succinct summary of an object as a string"""
if isinstance(x, (np.datetime64, datetime)):
return format_timestamp(x)
if isinstance(x, (np.timedelta64, timedelta)):
return format_timedelta(x, timedelta_format=timedelta_format)
elif isinstance(x, (str, bytes)):
return repr(x) if quote_strings else x
elif np.issubdtype(type(x), np.floating):
return f"{x:.4}"
else:
return str(x)
def format_items(x):
"""Returns a succinct summaries of all items in a sequence as strings"""
x = np.asarray(x)
timedelta_format = "datetime"
if np.issubdtype(x.dtype, np.timedelta64):
x = np.asarray(x, dtype="timedelta64[ns]")
day_part = x[~pd.isnull(x)].astype("timedelta64[D]").astype("timedelta64[ns]")
time_needed = x[~pd.isnull(x)] != day_part
day_needed = day_part != np.timedelta64(0, "ns")
if np.logical_not(day_needed).all():
timedelta_format = "time"
elif np.logical_not(time_needed).all():
timedelta_format = "date"
formatted = [format_item(xi, timedelta_format) for xi in x]
return formatted
def format_array_flat(array, max_width: int):
"""Return a formatted string for as many items in the flattened version of
array that will fit within max_width characters.
"""
# every item will take up at least two characters, but we always want to
# print at least first and last items
max_possibly_relevant = min(
max(array.size, 1), max(int(np.ceil(max_width / 2.0)), 2)
)
relevant_front_items = format_items(
first_n_items(array, (max_possibly_relevant + 1) // 2)
)
relevant_back_items = format_items(last_n_items(array, max_possibly_relevant // 2))
# interleave relevant front and back items:
# [a, b, c] and [y, z] -> [a, z, b, y, c]
relevant_items = sum(
zip_longest(relevant_front_items, reversed(relevant_back_items)), ()
)[:max_possibly_relevant]
cum_len = np.cumsum([len(s) + 1 for s in relevant_items]) - 1
if (array.size > 2) and (
(max_possibly_relevant < array.size) or (cum_len > max_width).any()
):
padding = " ... "
count = min(
array.size, max(np.argmax(cum_len + len(padding) - 1 > max_width), 2)
)
else:
count = array.size
padding = "" if (count <= 1) else " "
num_front = (count + 1) // 2
num_back = count - num_front
# note that num_back is 0 <--> array.size is 0 or 1
# <--> relevant_back_items is []
pprint_str = "".join(
[
" ".join(relevant_front_items[:num_front]),
padding,
" ".join(relevant_back_items[-num_back:]),
]
)
# As a final check, if it's still too long even with the limit in values,
# replace the end with an ellipsis
# NB: this will still returns a full 3-character ellipsis when max_width < 3
if len(pprint_str) > max_width:
pprint_str = pprint_str[: max(max_width - 3, 0)] + "..."
return pprint_str
_KNOWN_TYPE_REPRS = {np.ndarray: "np.ndarray"}
with contextlib.suppress(ImportError):
import sparse
_KNOWN_TYPE_REPRS[sparse.COO] = "sparse.COO"
def inline_dask_repr(array):
"""Similar to dask.array.DataArray.__repr__, but without
redundant information that's already printed by the repr
function of the xarray wrapper.
"""
assert isinstance(array, dask_array_type), array
chunksize = tuple(c[0] for c in array.chunks)
if hasattr(array, "_meta"):
meta = array._meta
if type(meta) in _KNOWN_TYPE_REPRS:
meta_repr = _KNOWN_TYPE_REPRS[type(meta)]
else:
meta_repr = type(meta).__name__
meta_string = f", meta={meta_repr}"
else:
meta_string = ""
return f"dask.array<chunksize={chunksize}{meta_string}>"
def inline_sparse_repr(array):
"""Similar to sparse.COO.__repr__, but without the redundant shape/dtype."""
assert isinstance(array, sparse_array_type), array
return "<{}: nnz={:d}, fill_value={!s}>".format(
type(array).__name__, array.nnz, array.fill_value
)
def inline_variable_array_repr(var, max_width):
"""Build a one-line summary of a variable's data."""
if var._in_memory:
return format_array_flat(var, max_width)
elif isinstance(var._data, dask_array_type):
return inline_dask_repr(var.data)
elif isinstance(var._data, sparse_array_type):
return inline_sparse_repr(var.data)
elif hasattr(var._data, "_repr_inline_"):
return var._data._repr_inline_(max_width)
elif hasattr(var._data, "__array_function__"):
return maybe_truncate(repr(var._data).replace("\n", " "), max_width)
else:
# internal xarray array type
return "..."
def summarize_variable(
name: Hashable, var, col_width: int, marker: str = " ", max_width: int = None
):
"""Summarize a variable in one line, e.g., for the Dataset.__repr__."""
if max_width is None:
max_width_options = OPTIONS["display_width"]
if not isinstance(max_width_options, int):
raise TypeError(f"`max_width` value of `{max_width}` is not a valid int")
else:
max_width = max_width_options
first_col = pretty_print(f" {marker} {name} ", col_width)
if var.dims:
dims_str = "({}) ".format(", ".join(map(str, var.dims)))
else:
dims_str = ""
front_str = f"{first_col}{dims_str}{var.dtype} "
values_width = max_width - len(front_str)
values_str = inline_variable_array_repr(var, values_width)
return front_str + values_str
def _summarize_coord_multiindex(coord, col_width, marker):
first_col = pretty_print(f" {marker} {coord.name} ", col_width)
return "{}({}) MultiIndex".format(first_col, str(coord.dims[0]))
def _summarize_coord_levels(coord, col_width, marker="-"):
return "\n".join(
summarize_variable(
lname, coord.get_level_variable(lname), col_width, marker=marker
)
for lname in coord.level_names
)
def summarize_datavar(name, var, col_width):
return summarize_variable(name, var.variable, col_width)
def summarize_coord(name: Hashable, var, col_width: int):
is_index = name in var.dims
marker = "*" if is_index else " "
if is_index:
coord = var.variable.to_index_variable()
if coord.level_names is not None:
return "\n".join(
[
_summarize_coord_multiindex(coord, col_width, marker),
_summarize_coord_levels(coord, col_width),
]
)
return summarize_variable(name, var.variable, col_width, marker)
def summarize_attr(key, value, col_width=None):
"""Summary for __repr__ - use ``X.attrs[key]`` for full value."""
# Indent key and add ':', then right-pad if col_width is not None
k_str = f" {key}:"
if col_width is not None:
k_str = pretty_print(k_str, col_width)
# Replace tabs and newlines, so we print on one line in known width
v_str = str(value).replace("\t", "\\t").replace("\n", "\\n")
# Finally, truncate to the desired display width
return maybe_truncate(f"{k_str} {v_str}", OPTIONS["display_width"])
EMPTY_REPR = " *empty*"
def _get_col_items(mapping):
"""Get all column items to format, including both keys of `mapping`
and MultiIndex levels if any.
"""
from .variable import IndexVariable
col_items = []
for k, v in mapping.items():
col_items.append(k)
var = getattr(v, "variable", v)
if isinstance(var, IndexVariable):
level_names = var.to_index_variable().level_names
if level_names is not None:
col_items += list(level_names)
return col_items
def _calculate_col_width(col_items):
max_name_length = max(len(str(s)) for s in col_items) if col_items else 0
col_width = max(max_name_length, 7) + 6
return col_width
def _mapping_repr(mapping, title, summarizer, col_width=None):
if col_width is None:
col_width = _calculate_col_width(mapping)
summary = [f"{title}:"]
if mapping:
summary += [summarizer(k, v, col_width) for k, v in mapping.items()]
else:
summary += [EMPTY_REPR]
return "\n".join(summary)
data_vars_repr = functools.partial(
_mapping_repr, title="Data variables", summarizer=summarize_datavar
)
attrs_repr = functools.partial(
_mapping_repr, title="Attributes", summarizer=summarize_attr
)
def coords_repr(coords, col_width=None):
if col_width is None:
col_width = _calculate_col_width(_get_col_items(coords))
return _mapping_repr(
coords, title="Coordinates", summarizer=summarize_coord, col_width=col_width
)
def indexes_repr(indexes):
summary = []
for k, v in indexes.items():
summary.append(wrap_indent(repr(v), f"{k}: "))
return "\n".join(summary)
def dim_summary(obj):
elements = [f"{k}: {v}" for k, v in obj.sizes.items()]
return ", ".join(elements)
def unindexed_dims_repr(dims, coords):
unindexed_dims = [d for d in dims if d not in coords]
if unindexed_dims:
dims_str = ", ".join(f"{d}" for d in unindexed_dims)
return "Dimensions without coordinates: " + dims_str
else:
return None
@contextlib.contextmanager
def set_numpy_options(*args, **kwargs):
original = np.get_printoptions()
np.set_printoptions(*args, **kwargs)
try:
yield
finally:
np.set_printoptions(**original)
def limit_lines(string: str, *, limit: int):
"""
If the string is more lines than the limit,
this returns the middle lines replaced by an ellipsis
"""
lines = string.splitlines()
if len(lines) > limit:
string = "\n".join(chain(lines[: limit // 2], ["..."], lines[-limit // 2 :]))
return string
def short_numpy_repr(array):
array = np.asarray(array)
# default to lower precision so a full (abbreviated) line can fit on
# one line with the default display_width
options = {"precision": 6, "linewidth": OPTIONS["display_width"], "threshold": 200}
if array.ndim < 3:
edgeitems = 3
elif array.ndim == 3:
edgeitems = 2
else:
edgeitems = 1
options["edgeitems"] = edgeitems
with set_numpy_options(**options):
return repr(array)
def short_data_repr(array):
"""Format "data" for DataArray and Variable."""
internal_data = getattr(array, "variable", array)._data
if isinstance(array, np.ndarray):
return short_numpy_repr(array)
elif is_duck_array(internal_data):
return limit_lines(repr(array.data), limit=40)
elif array._in_memory or array.size < 1e5:
return short_numpy_repr(array)
else:
# internal xarray array type
return f"[{array.size} values with dtype={array.dtype}]"
def array_repr(arr):
# used for DataArray, Variable and IndexVariable
if hasattr(arr, "name") and arr.name is not None:
name_str = f"{arr.name!r} "
else:
name_str = ""
summary = [
"<xarray.{} {}({})>".format(type(arr).__name__, name_str, dim_summary(arr)),
short_data_repr(arr),
]
if hasattr(arr, "coords"):
if arr.coords:
summary.append(repr(arr.coords))
unindexed_dims_str = unindexed_dims_repr(arr.dims, arr.coords)
if unindexed_dims_str:
summary.append(unindexed_dims_str)
if arr.attrs:
summary.append(attrs_repr(arr.attrs))
return "\n".join(summary)
def dataset_repr(ds):
summary = ["<xarray.{}>".format(type(ds).__name__)]
col_width = _calculate_col_width(_get_col_items(ds.variables))
dims_start = pretty_print("Dimensions:", col_width)
summary.append("{}({})".format(dims_start, dim_summary(ds)))
if ds.coords:
summary.append(coords_repr(ds.coords, col_width=col_width))
unindexed_dims_str = unindexed_dims_repr(ds.dims, ds.coords)
if unindexed_dims_str:
summary.append(unindexed_dims_str)
summary.append(data_vars_repr(ds.data_vars, col_width=col_width))
if ds.attrs:
summary.append(attrs_repr(ds.attrs))
return "\n".join(summary)
def diff_dim_summary(a, b):
if a.dims != b.dims:
return "Differing dimensions:\n ({}) != ({})".format(
dim_summary(a), dim_summary(b)
)
else:
return ""
def _diff_mapping_repr(a_mapping, b_mapping, compat, title, summarizer, col_width=None):
def extra_items_repr(extra_keys, mapping, ab_side):
extra_repr = [summarizer(k, mapping[k], col_width) for k in extra_keys]
if extra_repr:
header = f"{title} only on the {ab_side} object:"
return [header] + extra_repr
else:
return []
a_keys = set(a_mapping)
b_keys = set(b_mapping)
summary = []
diff_items = []
for k in a_keys & b_keys:
try:
# compare xarray variable
if not callable(compat):
compatible = getattr(a_mapping[k], compat)(b_mapping[k])
else:
compatible = compat(a_mapping[k], b_mapping[k])
is_variable = True
except AttributeError:
# compare attribute value
if is_duck_array(a_mapping[k]) or is_duck_array(b_mapping[k]):
compatible = array_equiv(a_mapping[k], b_mapping[k])
else:
compatible = a_mapping[k] == b_mapping[k]
is_variable = False
if not compatible:
temp = [
summarizer(k, vars[k], col_width) for vars in (a_mapping, b_mapping)
]
if compat == "identical" and is_variable:
attrs_summary = []
for m in (a_mapping, b_mapping):
attr_s = "\n".join(
summarize_attr(ak, av) for ak, av in m[k].attrs.items()
)
attrs_summary.append(attr_s)
temp = [
"\n".join([var_s, attr_s]) if attr_s else var_s
for var_s, attr_s in zip(temp, attrs_summary)
]
diff_items += [ab_side + s[1:] for ab_side, s in zip(("L", "R"), temp)]
if diff_items:
summary += [f"Differing {title.lower()}:"] + diff_items
summary += extra_items_repr(a_keys - b_keys, a_mapping, "left")
summary += extra_items_repr(b_keys - a_keys, b_mapping, "right")
return "\n".join(summary)
diff_coords_repr = functools.partial(
_diff_mapping_repr, title="Coordinates", summarizer=summarize_coord
)
diff_data_vars_repr = functools.partial(
_diff_mapping_repr, title="Data variables", summarizer=summarize_datavar
)
diff_attrs_repr = functools.partial(
_diff_mapping_repr, title="Attributes", summarizer=summarize_attr
)
def _compat_to_str(compat):
if callable(compat):
compat = compat.__name__
if compat == "equals":
return "equal"
elif compat == "allclose":
return "close"
else:
return compat
def diff_array_repr(a, b, compat):
# used for DataArray, Variable and IndexVariable
summary = [
"Left and right {} objects are not {}".format(
type(a).__name__, _compat_to_str(compat)
)
]
summary.append(diff_dim_summary(a, b))
if callable(compat):
equiv = compat
else:
equiv = array_equiv
if not equiv(a.data, b.data):
temp = [wrap_indent(short_numpy_repr(obj), start=" ") for obj in (a, b)]
diff_data_repr = [
ab_side + "\n" + ab_data_repr
for ab_side, ab_data_repr in zip(("L", "R"), temp)
]
summary += ["Differing values:"] + diff_data_repr
if hasattr(a, "coords"):
col_width = _calculate_col_width(set(a.coords) | set(b.coords))
summary.append(
diff_coords_repr(a.coords, b.coords, compat, col_width=col_width)
)
if compat == "identical":
summary.append(diff_attrs_repr(a.attrs, b.attrs, compat))
return "\n".join(summary)
def diff_dataset_repr(a, b, compat):
summary = [
"Left and right {} objects are not {}".format(
type(a).__name__, _compat_to_str(compat)
)
]
col_width = _calculate_col_width(
set(_get_col_items(a.variables) + _get_col_items(b.variables))
)
summary.append(diff_dim_summary(a, b))
summary.append(diff_coords_repr(a.coords, b.coords, compat, col_width=col_width))
summary.append(
diff_data_vars_repr(a.data_vars, b.data_vars, compat, col_width=col_width)
)
if compat == "identical":
summary.append(diff_attrs_repr(a.attrs, b.attrs, compat))
return "\n".join(summary)
|
from pylatex import Document, LongTabu, HFill
from pylatex.utils import bold
def genenerate_longtabu():
geometry_options = {
"landscape": True,
"margin": "0.5in",
"headheight": "20pt",
"headsep": "10pt",
"includeheadfoot": True
}
doc = Document(page_numbers=True, geometry_options=geometry_options)
# Generate data table
with doc.create(LongTabu("X[r] X[r] X[r] X[r] X[r] X[r]")) as data_table:
header_row1 = ["Prov", "Num", "CurBal", "IntPay", "Total", "IntR"]
data_table.add_row(header_row1, mapper=[bold])
data_table.add_hline()
data_table.add_empty_row()
data_table.end_table_header()
data_table.add_row(["Prov", "Num", "CurBal", "IntPay", "Total",
"IntR"])
row = ["PA", "9", "$100", "%10", "$1000", "Test"]
for i in range(50):
data_table.add_row(row)
doc.append(bold("Grand Total:"))
doc.append(HFill())
doc.append(bold("Total"))
doc.generate_pdf("longtabu", clean_tex=False)
genenerate_longtabu()
|
import mock
from docker_registry.lib import checksums
from tests.base import TestCase
class TestShaMethods(TestCase):
def test_sha256_file(self):
self.assertEqual(
checksums.sha256_file(None, None),
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
self.assertEqual(
checksums.sha256_file(None, 'test'),
'9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08')
def test_compute_simple(self):
out = checksums.compute_simple(None, '')
self.assertTrue(out.startswith('sha256:'))
nl = '01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b'
self.assertTrue(out.endswith(nl))
out = checksums.compute_simple(None, 'test')
h = 'f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2'
self.assertTrue(out.endswith(h))
class TestTarSum(TestCase):
def setUp(self):
self.tar_sum = checksums.TarSum(None)
def test_append(self):
self.tar_sum.header_fields = tuple()
member = mock.MagicMock(size=1)
tarobj = mock.MagicMock(
extractfile=mock.MagicMock(side_effect=KeyError))
self.tar_sum.append(member, tarobj)
self.assertEqual(len(self.tar_sum.hashes), 1)
self.assertEqual(
self.tar_sum.hashes[0],
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
|
import re
from pyparsing import Optional, QuotedString, Regex, ZeroOrMore
def single_value_flag(func):
def parse_values(val):
if not val:
raise ValueError("Missing required parameter")
if len(val) > 1:
raise ValueError("Too many parameters")
return func(val[0])
return parse_values
def multi_value_flag(func, minimum=1, maximum=None, modulo=None):
def parse_values(val):
if modulo and len(val) % modulo != 0:
raise ValueError("Number of parameter is not even")
if minimum and len(val) < minimum:
raise ValueError("Missing required parameter")
if maximum and len(val) > maximum:
raise ValueError("Too many parameters")
return [func(x) for x in val]
return parse_values
class RawQuotedString(QuotedString):
def __init__(self, quoteChar, escChar="\\"): # noqa: N803
super().__init__(quoteChar, escChar=escChar, convertWhitespaceEscapes=False)
# unlike the QuotedString this replaces only escaped quotes and not all chars
self.escCharReplacePattern = (
re.escape(escChar) + "(" + re.escape(quoteChar) + ")"
)
SYNTAXCHARS = {",", ":", '"', "'", "\\"}
FlagName = Regex(r"""[^,:"'\\]+""")
RegexString = "r" + RawQuotedString('"')
FlagParam = Optional(
RegexString | FlagName | RawQuotedString("'") | RawQuotedString('"')
)
Flag = FlagName + ZeroOrMore(":" + FlagParam)
FlagsParser = Optional(Flag) + ZeroOrMore("," + Optional(Flag))
|
import logging
from typing import List, Optional
import voluptuous as vol
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
DEFAULT_MIN_TEMP,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_TEMPERATURE,
PRECISION_HALVES,
STATE_OFF,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_HEATING_POWER_REQUEST,
ATTR_SCHEDULE_NAME,
DATA_HANDLER,
DATA_HOMES,
DATA_SCHEDULES,
DOMAIN,
EVENT_TYPE_CANCEL_SET_POINT,
EVENT_TYPE_SET_POINT,
EVENT_TYPE_THERM_MODE,
MANUFACTURER,
SERVICE_SET_SCHEDULE,
SIGNAL_NAME,
)
from .data_handler import HOMEDATA_DATA_CLASS_NAME, HOMESTATUS_DATA_CLASS_NAME
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
PRESET_FROST_GUARD = "Frost Guard"
PRESET_SCHEDULE = "Schedule"
PRESET_MANUAL = "Manual"
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
SUPPORT_HVAC = [HVAC_MODE_HEAT, HVAC_MODE_AUTO, HVAC_MODE_OFF]
SUPPORT_PRESET = [PRESET_AWAY, PRESET_BOOST, PRESET_FROST_GUARD, PRESET_SCHEDULE]
STATE_NETATMO_SCHEDULE = "schedule"
STATE_NETATMO_HG = "hg"
STATE_NETATMO_MAX = "max"
STATE_NETATMO_AWAY = PRESET_AWAY
STATE_NETATMO_OFF = STATE_OFF
STATE_NETATMO_MANUAL = "manual"
STATE_NETATMO_HOME = "home"
PRESET_MAP_NETATMO = {
PRESET_FROST_GUARD: STATE_NETATMO_HG,
PRESET_BOOST: STATE_NETATMO_MAX,
PRESET_SCHEDULE: STATE_NETATMO_SCHEDULE,
PRESET_AWAY: STATE_NETATMO_AWAY,
STATE_NETATMO_OFF: STATE_NETATMO_OFF,
}
NETATMO_MAP_PRESET = {
STATE_NETATMO_HG: PRESET_FROST_GUARD,
STATE_NETATMO_MAX: PRESET_BOOST,
STATE_NETATMO_SCHEDULE: PRESET_SCHEDULE,
STATE_NETATMO_AWAY: PRESET_AWAY,
STATE_NETATMO_OFF: STATE_NETATMO_OFF,
STATE_NETATMO_MANUAL: STATE_NETATMO_MANUAL,
}
HVAC_MAP_NETATMO = {
PRESET_SCHEDULE: HVAC_MODE_AUTO,
STATE_NETATMO_HG: HVAC_MODE_AUTO,
PRESET_FROST_GUARD: HVAC_MODE_AUTO,
PRESET_BOOST: HVAC_MODE_HEAT,
STATE_NETATMO_OFF: HVAC_MODE_OFF,
STATE_NETATMO_MANUAL: HVAC_MODE_AUTO,
PRESET_MANUAL: HVAC_MODE_AUTO,
STATE_NETATMO_AWAY: HVAC_MODE_AUTO,
}
CURRENT_HVAC_MAP_NETATMO = {True: CURRENT_HVAC_HEAT, False: CURRENT_HVAC_IDLE}
DEFAULT_MAX_TEMP = 30
NA_THERM = "NATherm1"
NA_VALVE = "NRV"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Netatmo energy platform."""
data_handler = hass.data[DOMAIN][entry.entry_id][DATA_HANDLER]
await data_handler.register_data_class(
HOMEDATA_DATA_CLASS_NAME, HOMEDATA_DATA_CLASS_NAME, None
)
home_data = data_handler.data.get(HOMEDATA_DATA_CLASS_NAME)
if not home_data:
return
async def get_entities():
"""Retrieve Netatmo entities."""
entities = []
for home_id in get_all_home_ids(home_data):
_LOGGER.debug("Setting up home %s ...", home_id)
for room_id in home_data.rooms[home_id].keys():
room_name = home_data.rooms[home_id][room_id]["name"]
_LOGGER.debug("Setting up room %s (%s) ...", room_name, room_id)
signal_name = f"{HOMESTATUS_DATA_CLASS_NAME}-{home_id}"
await data_handler.register_data_class(
HOMESTATUS_DATA_CLASS_NAME, signal_name, None, home_id=home_id
)
home_status = data_handler.data.get(signal_name)
if home_status and room_id in home_status.rooms:
entities.append(NetatmoThermostat(data_handler, home_id, room_id))
hass.data[DOMAIN][DATA_SCHEDULES][home_id] = {
schedule_id: schedule_data.get("name")
for schedule_id, schedule_data in (
data_handler.data[HOMEDATA_DATA_CLASS_NAME]
.schedules[home_id]
.items()
)
}
hass.data[DOMAIN][DATA_HOMES] = {
home_id: home_data.get("name")
for home_id, home_data in (
data_handler.data[HOMEDATA_DATA_CLASS_NAME].homes.items()
)
}
return entities
async_add_entities(await get_entities(), True)
platform = entity_platform.current_platform.get()
if home_data is not None:
platform.async_register_entity_service(
SERVICE_SET_SCHEDULE,
{vol.Required(ATTR_SCHEDULE_NAME): cv.string},
"_service_set_schedule",
)
class NetatmoThermostat(NetatmoBase, ClimateEntity):
"""Representation a Netatmo thermostat."""
def __init__(self, data_handler, home_id, room_id):
"""Initialize the sensor."""
ClimateEntity.__init__(self)
super().__init__(data_handler)
self._id = room_id
self._home_id = home_id
self._home_status_class = f"{HOMESTATUS_DATA_CLASS_NAME}-{self._home_id}"
self._data_classes.extend(
[
{
"name": HOMEDATA_DATA_CLASS_NAME,
SIGNAL_NAME: HOMEDATA_DATA_CLASS_NAME,
},
{
"name": HOMESTATUS_DATA_CLASS_NAME,
"home_id": self._home_id,
SIGNAL_NAME: self._home_status_class,
},
]
)
self._home_status = self.data_handler.data[self._home_status_class]
self._room_status = self._home_status.rooms[room_id]
self._room_data = self._data.rooms[home_id][room_id]
self._model = NA_VALVE
for module in self._room_data.get("module_ids"):
if self._home_status.thermostats.get(module):
self._model = NA_THERM
break
self._state = None
self._device_name = self._data.rooms[home_id][room_id]["name"]
self._name = f"{MANUFACTURER} {self._device_name}"
self._current_temperature = None
self._target_temperature = None
self._preset = None
self._away = None
self._operation_list = [HVAC_MODE_AUTO, HVAC_MODE_HEAT]
self._support_flags = SUPPORT_FLAGS
self._hvac_mode = None
self._battery_level = None
self._connected = None
self._away_temperature = None
self._hg_temperature = None
self._boilerstatus = None
self._setpoint_duration = None
if self._model == NA_THERM:
self._operation_list.append(HVAC_MODE_OFF)
self._unique_id = f"{self._id}-{self._model}"
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
for event_type in (
EVENT_TYPE_SET_POINT,
EVENT_TYPE_THERM_MODE,
EVENT_TYPE_CANCEL_SET_POINT,
):
self._listeners.append(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-{event_type}",
self.handle_event,
)
)
async def handle_event(self, event):
"""Handle webhook events."""
data = event["data"]
if not data.get("home"):
return
home = data["home"]
if self._home_id == home["id"] and data["event_type"] == EVENT_TYPE_THERM_MODE:
self._preset = NETATMO_MAP_PRESET[home[EVENT_TYPE_THERM_MODE]]
self._hvac_mode = HVAC_MAP_NETATMO[self._preset]
if self._preset == PRESET_FROST_GUARD:
self._target_temperature = self._hg_temperature
elif self._preset == PRESET_AWAY:
self._target_temperature = self._away_temperature
elif self._preset == PRESET_SCHEDULE:
self.async_update_callback()
self.async_write_ha_state()
return
if not home.get("rooms"):
return
for room in home["rooms"]:
if data["event_type"] == EVENT_TYPE_SET_POINT:
if self._id == room["id"]:
if room["therm_setpoint_mode"] == STATE_NETATMO_OFF:
self._hvac_mode = HVAC_MODE_OFF
elif room["therm_setpoint_mode"] == STATE_NETATMO_MAX:
self._hvac_mode = HVAC_MODE_HEAT
self._target_temperature = DEFAULT_MAX_TEMP
else:
self._target_temperature = room["therm_setpoint_temperature"]
self.async_write_ha_state()
break
elif data["event_type"] == EVENT_TYPE_CANCEL_SET_POINT:
if self._id == room["id"]:
self.async_update_callback()
self.async_write_ha_state()
break
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_step(self) -> Optional[float]:
"""Return the supported step of target temperature."""
return PRECISION_HALVES
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes."""
return self._operation_list
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation if supported."""
if self._model == NA_THERM and self._boilerstatus is not None:
return CURRENT_HVAC_MAP_NETATMO[self._boilerstatus]
# Maybe it is a valve
if self._room_status and self._room_status.get("heating_power_request", 0) > 0:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
def set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
if hvac_mode == HVAC_MODE_OFF:
self.turn_off()
elif hvac_mode == HVAC_MODE_AUTO:
if self.hvac_mode == HVAC_MODE_OFF:
self.turn_on()
self.set_preset_mode(PRESET_SCHEDULE)
elif hvac_mode == HVAC_MODE_HEAT:
self.set_preset_mode(PRESET_BOOST)
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if self.target_temperature == 0:
self._home_status.set_room_thermpoint(
self._id,
STATE_NETATMO_HOME,
)
if preset_mode in [PRESET_BOOST, STATE_NETATMO_MAX] and self._model == NA_VALVE:
self._home_status.set_room_thermpoint(
self._id,
STATE_NETATMO_MANUAL,
DEFAULT_MAX_TEMP,
)
elif preset_mode in [PRESET_BOOST, STATE_NETATMO_MAX]:
self._home_status.set_room_thermpoint(
self._id, PRESET_MAP_NETATMO[preset_mode]
)
elif preset_mode in [PRESET_SCHEDULE, PRESET_FROST_GUARD, PRESET_AWAY]:
self._home_status.set_thermmode(PRESET_MAP_NETATMO[preset_mode])
else:
_LOGGER.error("Preset mode '%s' not available", preset_mode)
self.async_write_ha_state()
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp."""
return self._preset
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes."""
return SUPPORT_PRESET
def set_temperature(self, **kwargs):
"""Set new target temperature for 2 hours."""
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is None:
return
self._home_status.set_room_thermpoint(self._id, STATE_NETATMO_MANUAL, temp)
self.async_write_ha_state()
@property
def device_state_attributes(self):
"""Return the state attributes of the thermostat."""
attr = {}
if self._battery_level is not None:
attr[ATTR_BATTERY_LEVEL] = self._battery_level
if self._model == NA_VALVE:
attr[ATTR_HEATING_POWER_REQUEST] = self._room_status.get(
"heating_power_request", 0
)
return attr
def turn_off(self):
"""Turn the entity off."""
if self._model == NA_VALVE:
self._home_status.set_room_thermpoint(
self._id,
STATE_NETATMO_MANUAL,
DEFAULT_MIN_TEMP,
)
elif self.hvac_mode != HVAC_MODE_OFF:
self._home_status.set_room_thermpoint(self._id, STATE_NETATMO_OFF)
self.async_write_ha_state()
def turn_on(self):
"""Turn the entity on."""
self._home_status.set_room_thermpoint(self._id, STATE_NETATMO_HOME)
self.async_write_ha_state()
@property
def available(self) -> bool:
"""If the device hasn't been able to connect, mark as unavailable."""
return bool(self._connected)
@callback
def async_update_callback(self):
"""Update the entity's state."""
self._home_status = self.data_handler.data[self._home_status_class]
self._room_status = self._home_status.rooms.get(self._id)
self._room_data = self._data.rooms.get(self._home_id, {}).get(self._id)
if not self._room_status or not self._room_data:
if self._connected:
_LOGGER.info(
"The thermostat in room %s seems to be out of reach",
self._device_name,
)
self._connected = False
return
roomstatus = {"roomID": self._room_status.get("id", {})}
if self._room_status.get("reachable"):
roomstatus.update(self._build_room_status())
self._away_temperature = self._data.get_away_temp(self._home_id)
self._hg_temperature = self._data.get_hg_temp(self._home_id)
self._setpoint_duration = self._data.setpoint_duration[self._home_id]
if "current_temperature" not in roomstatus:
return
if self._model is None:
self._model = roomstatus["module_type"]
self._current_temperature = roomstatus["current_temperature"]
self._target_temperature = roomstatus["target_temperature"]
self._preset = NETATMO_MAP_PRESET[roomstatus["setpoint_mode"]]
self._hvac_mode = HVAC_MAP_NETATMO[self._preset]
self._battery_level = roomstatus.get("battery_level")
self._connected = True
self._away = self._hvac_mode == HVAC_MAP_NETATMO[STATE_NETATMO_AWAY]
def _build_room_status(self):
"""Construct room status."""
try:
roomstatus = {
"roomname": self._room_data["name"],
"target_temperature": self._room_status["therm_setpoint_temperature"],
"setpoint_mode": self._room_status["therm_setpoint_mode"],
"current_temperature": self._room_status["therm_measured_temperature"],
"module_type": self._data.get_thermostat_type(
home_id=self._home_id, room_id=self._id
),
"module_id": None,
"heating_status": None,
"heating_power_request": None,
}
batterylevel = None
for module_id in self._room_data["module_ids"]:
if (
self._data.modules[self._home_id][module_id]["type"] == NA_THERM
or roomstatus["module_id"] is None
):
roomstatus["module_id"] = module_id
if roomstatus["module_type"] == NA_THERM:
self._boilerstatus = self._home_status.boiler_status(
roomstatus["module_id"]
)
roomstatus["heating_status"] = self._boilerstatus
batterylevel = self._home_status.thermostats[
roomstatus["module_id"]
].get("battery_level")
elif roomstatus["module_type"] == NA_VALVE:
roomstatus["heating_power_request"] = self._room_status[
"heating_power_request"
]
roomstatus["heating_status"] = roomstatus["heating_power_request"] > 0
if self._boilerstatus is not None:
roomstatus["heating_status"] = (
self._boilerstatus and roomstatus["heating_status"]
)
batterylevel = self._home_status.valves[roomstatus["module_id"]].get(
"battery_level"
)
if batterylevel:
batterypct = interpolate(batterylevel, roomstatus["module_type"])
if (
not roomstatus.get("battery_level")
or batterypct < roomstatus["battery_level"]
):
roomstatus["battery_level"] = batterypct
return roomstatus
except KeyError as err:
_LOGGER.error("Update of room %s failed. Error: %s", self._id, err)
return {}
def _service_set_schedule(self, **kwargs):
schedule_name = kwargs.get(ATTR_SCHEDULE_NAME)
schedule_id = None
for sid, name in self.hass.data[DOMAIN][DATA_SCHEDULES][self._home_id].items():
if name == schedule_name:
schedule_id = sid
if not schedule_id:
_LOGGER.error("You passed an invalid schedule")
return
self._data.switch_home_schedule(home_id=self._home_id, schedule_id=schedule_id)
_LOGGER.debug(
"Setting %s schedule to %s (%s)",
self._home_id,
kwargs.get(ATTR_SCHEDULE_NAME),
schedule_id,
)
def interpolate(batterylevel, module_type):
"""Interpolate battery level depending on device type."""
na_battery_levels = {
NA_THERM: {
"full": 4100,
"high": 3600,
"medium": 3300,
"low": 3000,
"empty": 2800,
},
NA_VALVE: {
"full": 3200,
"high": 2700,
"medium": 2400,
"low": 2200,
"empty": 2200,
},
}
levels = sorted(na_battery_levels[module_type].values())
steps = [20, 50, 80, 100]
na_battery_level = na_battery_levels[module_type]
if batterylevel >= na_battery_level["full"]:
return 100
if batterylevel >= na_battery_level["high"]:
i = 3
elif batterylevel >= na_battery_level["medium"]:
i = 2
elif batterylevel >= na_battery_level["low"]:
i = 1
else:
return 0
pct = steps[i - 1] + (
(steps[i] - steps[i - 1])
* (batterylevel - levels[i])
/ (levels[i + 1] - levels[i])
)
return int(pct)
def get_all_home_ids(home_data):
"""Get all the home ids returned by NetAtmo API."""
if home_data is None:
return []
return [
home_data.homes[home_id]["id"]
for home_id in home_data.homes
if (
"therm_schedules" in home_data.homes[home_id]
and "modules" in home_data.homes[home_id]
)
]
|
import sys
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.cli.utils import list_deploy_groups
from paasta_tools.cli.utils import PaastaColors
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.deployment_utils import load_v2_deployments_json
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_service_docker_registry
from paasta_tools.utils import list_services
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"get-docker-image",
help="Gets the docker image URL for the deployment of a service",
)
list_parser.add_argument(
"-s",
"--service",
help="Name of the service which you want to get the docker image for.",
required=True,
).completer = lazy_choices_completer(list_services)
list_parser.add_argument(
"-i",
"-l",
"--deploy-group",
help='Name of the deploy group, like "prod".',
required=True,
).completer = lazy_choices_completer(list_deploy_groups)
list_parser.add_argument(
"-d",
"--soa-dir",
help="A directory from which soa-configs should be read from",
default=DEFAULT_SOA_DIR,
)
list_parser.set_defaults(command=paasta_get_docker_image)
def paasta_get_docker_image(args):
service = args.service
deploy_group = args.deploy_group
soa_dir = args.soa_dir
validate_service_name(service, soa_dir)
deployments = load_v2_deployments_json(service=service, soa_dir=soa_dir)
docker_image = deployments.get_docker_image_for_deploy_group(deploy_group)
if not docker_image:
print(
PaastaColors.red(
f"There is no {service} docker_image for {deploy_group}. Has it been deployed yet?"
),
file=sys.stderr,
)
return 1
else:
registry_uri = get_service_docker_registry(service=service, soa_dir=soa_dir)
docker_url = f"{registry_uri}/{docker_image}"
print(docker_url)
return 0
|
from unittest import TestCase
from scattertext.characteristic.DenseRankCharacteristicness import DenseRankCharacteristicness
from scattertext.test.test_TermDocMat import get_hamlet_term_doc_matrix
class TestDenseRankCharacteristicness(TestCase):
def test_get_scores(self):
c = get_hamlet_term_doc_matrix()
zero_point, scores = DenseRankCharacteristicness().get_scores(c)
self.assertGreater(zero_point, 0)
self.assertLessEqual(zero_point, 1)
self.assertGreater(len(scores), 100)
|
import pytest
@pytest.mark.parametrize("patterns,filename,expected_match", [
([r'*.csv'], 'foo.csv', True),
([r'*.cvs'], 'foo.csv', False),
([r'*.csv *.xml'], 'foo.csv', True),
([r'*.csv *.xml'], 'foo.xml', True),
([r'*.csv', r'*.xml'], 'foo.csv', True),
([r'*.csv', r'*.xml'], 'foo.xml', True),
([r'*.csv', r'*.xml'], 'dumbtest', False),
([r'thing*csv'], 'thingcsv', True),
([r'thing*csv'], 'thingwhatevercsv', True),
([r'thing*csv'], 'csvthing', False),
])
def test_file_filters(patterns, filename, expected_match):
from meld.filters import FilterEntry
filters = [
FilterEntry.new_from_gsetting(("name", True, p), FilterEntry.SHELL)
for p in patterns
]
# All of the dirdiff logic is "does this match any filter", so
# that's what we test here, even if it looks a bit weird.
match = any(f.filter.match(filename) for f in filters)
assert match == expected_match
@pytest.mark.parametrize("pattern", [
r'*.foo*', # Trailing wildcard
r'\xfoo', # Invalid escape
])
def test_bad_regex_compilation(pattern):
from meld.filters import FilterEntry
f = FilterEntry.new_from_gsetting(
("name", True, pattern), FilterEntry.REGEX)
assert f.filter is None
|
import argparse
import matplotlib.pyplot as plt
import chainer
from chainercv.datasets import ade20k_semantic_segmentation_label_colors
from chainercv.datasets import ade20k_semantic_segmentation_label_names
from chainercv.datasets import cityscapes_semantic_segmentation_label_colors
from chainercv.datasets import cityscapes_semantic_segmentation_label_names
from chainercv.datasets import voc_semantic_segmentation_label_colors
from chainercv.datasets import voc_semantic_segmentation_label_names
from chainercv.links import DeepLabV3plusXception65
from chainercv import utils
from chainercv.visualizations import vis_image
from chainercv.visualizations import vis_semantic_segmentation
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--pretrained-model')
parser.add_argument('--min-input-size', type=int, default=None)
parser.add_argument(
'--dataset', choices=('cityscapes', 'ade20k', 'voc'),
default='cityscapes')
parser.add_argument('image')
args = parser.parse_args()
if args.dataset == 'cityscapes':
if args.pretrained_model is None:
args.pretrained_model = 'cityscapes'
label_names = cityscapes_semantic_segmentation_label_names
colors = cityscapes_semantic_segmentation_label_colors
elif args.dataset == 'ade20k':
if args.pretrained_model is None:
args.pretrained_model = 'ade20k'
label_names = ade20k_semantic_segmentation_label_names
colors = ade20k_semantic_segmentation_label_colors
elif args.dataset == 'voc':
if args.pretrained_model is None:
args.pretrained_model = 'voc'
label_names = voc_semantic_segmentation_label_names
colors = voc_semantic_segmentation_label_colors
model = DeepLabV3plusXception65(
pretrained_model=args.pretrained_model,
min_input_size=args.min_input_size)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
img = utils.read_image(args.image, color=True)
labels = model.predict([img])
label = labels[0]
fig = plt.figure()
ax1 = fig.add_subplot(1, 2, 1)
vis_image(img, ax=ax1)
ax2 = fig.add_subplot(1, 2, 2)
# Do not overlay the label image on the color image
vis_semantic_segmentation(
None, label, label_names, colors, ax=ax2)
plt.show()
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals
from rules.BaseTrick import wordshaper
def SingleRule(cname, ename, sname, birth, usedpwd, phone, uphone, hphone, email, postcode, nickname, idcard, jobnum,
otherdate, usedchar):
for _ in wordshaper(cname, ename, sname, usedpwd, email, nickname, usedchar):
yield _
for bd in birth:
yield bd
yield bd[2:]
yield bd[:4] + bd[4:].replace('0', '')
for ph in phone:
yield ph
for uph in uphone:
yield uph
for hp in hphone:
yield hp
for em in email:
yield em
# {@xxx.xxx}
yield '@' + em.split('@')[1]
for pc in postcode:
yield pc
for ic in idcard:
yield ic[:6]
yield ic[-4:]
yield ic[-6:]
yield ic[-8:]
for jn in jobnum:
yield jn
for od in otherdate:
yield od
yield od[2:]
yield od[:4] + od[4:].replace('0', '')
# You can continue to add new and useful rules
#
|
import argparse
import json
from pathlib import Path
import re
from shutil import rmtree
import sys
from . import download, upload
from .const import INTEGRATIONS_DIR
from .util import get_base_arg_parser
def valid_integration(integration):
"""Test if it's a valid integration."""
if not (INTEGRATIONS_DIR / integration).is_dir():
raise argparse.ArgumentTypeError(
f"The integration {integration} does not exist."
)
return integration
def get_arguments() -> argparse.Namespace:
"""Get parsed passed in arguments."""
parser = get_base_arg_parser()
parser.add_argument(
"--integration", type=valid_integration, help="Integration to process."
)
return parser.parse_args()
def flatten_translations(translations):
"""Flatten all translations."""
stack = [iter(translations.items())]
key_stack = []
flattened_translations = {}
while stack:
for k, v in stack[-1]:
key_stack.append(k)
if isinstance(v, dict):
stack.append(iter(v.items()))
break
elif isinstance(v, str):
common_key = "::".join(key_stack)
flattened_translations[common_key] = v
key_stack.pop()
else:
stack.pop()
if len(key_stack) > 0:
key_stack.pop()
return flattened_translations
def substitute_translation_references(integration_strings, flattened_translations):
"""Recursively processes all translation strings for the integration."""
result = {}
for key, value in integration_strings.items():
if isinstance(value, dict):
sub_dict = substitute_translation_references(value, flattened_translations)
result[key] = sub_dict
elif isinstance(value, str):
result[key] = substitute_reference(value, flattened_translations)
return result
def substitute_reference(value, flattened_translations):
"""Substitute localization key references in a translation string."""
matches = re.findall(r"\[\%key:((?:[\w]+|[:]{2})*)\%\]", value)
if not matches:
return value
new = value
for key in matches:
if key in flattened_translations:
new = new.replace(f"[%key:{key}%]", flattened_translations[key])
else:
print(f"Invalid substitution key '{key}' found in string '{value}'")
sys.exit(1)
return new
def run():
"""Run the script."""
args = get_arguments()
if args.integration:
integration = args.integration
else:
integration = None
while (
integration is None
or not Path(f"homeassistant/components/{integration}").exists()
):
if integration is not None:
print(f"Integration {integration} doesn't exist!")
print()
integration = input("Integration to process: ")
translations = upload.generate_upload_data()
if integration not in translations["component"]:
print("Integration has no strings.json")
sys.exit(1)
flattened_translations = flatten_translations(translations)
integration_strings = translations["component"][integration]
translations["component"][integration] = substitute_translation_references(
integration_strings, flattened_translations
)
if download.DOWNLOAD_DIR.is_dir():
rmtree(str(download.DOWNLOAD_DIR))
download.DOWNLOAD_DIR.mkdir(parents=True)
(download.DOWNLOAD_DIR / "en.json").write_text(
json.dumps({"component": {integration: translations["component"][integration]}})
)
download.write_integration_translations()
return 0
|
import aiopulse
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.acmeda.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_HOST
from tests.async_mock import patch
from tests.common import MockConfigEntry
DUMMY_HOST1 = "127.0.0.1"
DUMMY_HOST2 = "127.0.0.2"
CONFIG = {
CONF_HOST: DUMMY_HOST1,
}
@pytest.fixture
def mock_hub_discover():
"""Mock the hub discover method."""
with patch("aiopulse.Hub.discover") as mock_discover:
yield mock_discover
@pytest.fixture
def mock_hub_run():
"""Mock the hub run method."""
with patch("aiopulse.Hub.run") as mock_run:
yield mock_run
async def async_generator(items):
"""Async yields items provided in a list."""
for item in items:
yield item
async def test_show_form_no_hubs(hass, mock_hub_discover):
"""Test that flow aborts if no hubs are discovered."""
mock_hub_discover.return_value = async_generator([])
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_devices_found"
# Check we performed the discovery
assert len(mock_hub_discover.mock_calls) == 1
async def test_show_form_one_hub(hass, mock_hub_discover, mock_hub_run):
"""Test that a config is created when one hub discovered."""
dummy_hub_1 = aiopulse.Hub(DUMMY_HOST1)
dummy_hub_1.id = "ABC123"
mock_hub_discover.return_value = async_generator([dummy_hub_1])
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == dummy_hub_1.id
assert result["result"].data == {
"host": DUMMY_HOST1,
}
# Check we performed the discovery
assert len(mock_hub_discover.mock_calls) == 1
async def test_show_form_two_hubs(hass, mock_hub_discover):
"""Test that the form is served when more than one hub discovered."""
dummy_hub_1 = aiopulse.Hub(DUMMY_HOST1)
dummy_hub_1.id = "ABC123"
dummy_hub_2 = aiopulse.Hub(DUMMY_HOST1)
dummy_hub_2.id = "DEF456"
mock_hub_discover.return_value = async_generator([dummy_hub_1, dummy_hub_2])
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# Check we performed the discovery
assert len(mock_hub_discover.mock_calls) == 1
async def test_create_second_entry(hass, mock_hub_run, mock_hub_discover):
"""Test that a config is created when a second hub is discovered."""
dummy_hub_1 = aiopulse.Hub(DUMMY_HOST1)
dummy_hub_1.id = "ABC123"
dummy_hub_2 = aiopulse.Hub(DUMMY_HOST2)
dummy_hub_2.id = "DEF456"
mock_hub_discover.return_value = async_generator([dummy_hub_1, dummy_hub_2])
MockConfigEntry(domain=DOMAIN, unique_id=dummy_hub_1.id, data=CONFIG).add_to_hass(
hass
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == dummy_hub_2.id
assert result["result"].data == {
"host": DUMMY_HOST2,
}
async def test_already_configured(hass, mock_hub_discover):
"""Test that flow aborts when all hubs are configured."""
dummy_hub_1 = aiopulse.Hub(DUMMY_HOST1)
dummy_hub_1.id = "ABC123"
mock_hub_discover.return_value = async_generator([dummy_hub_1])
MockConfigEntry(domain=DOMAIN, unique_id=dummy_hub_1.id, data=CONFIG).add_to_hass(
hass
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "no_devices_found"
|
from .base import Variable
import itertools
class InteractionType(Variable):
type = "Interaction"
def __init__(self, definition):
self.interactions = definition["interaction variables"]
self.name = "(Interaction: %s)" % str(self.interactions)
self.interaction_fields = self.interactions
super(InteractionType, self).__init__(definition)
def expandInteractions(self, field_model):
self.interaction_fields = self.atomicInteractions(self.interactions,
field_model)
for field in self.interaction_fields:
if field_model[field].has_missing:
self.has_missing = True
self.categorical(field_model)
def categorical(self, field_model):
categoricals = [field for field in self.interaction_fields
if hasattr(field_model[field], "higher_vars")]
noncategoricals = [field for field in self.interaction_fields
if not hasattr(field_model[field], "higher_vars")]
dummies = [field_model[field].higher_vars
for field in categoricals]
self.higher_vars = []
for combo in itertools.product(*dummies):
var_names = [field.name for field in combo] + noncategoricals
higher_var = InteractionType({'has missing': self.has_missing,
'interaction variables': var_names})
self.higher_vars.append(higher_var)
def atomicInteractions(self, interactions, field_model):
atomic_interactions = []
for field in interactions:
try:
field_model[field]
except KeyError:
raise KeyError("The interaction variable %s is "
"not a named variable in the variable "
"definition" % field)
if hasattr(field_model[field], 'interaction_fields'):
sub_interactions = field_model[field].interaction_fields
atoms = self.atomicInteractions(sub_interactions, field_model)
atomic_interactions.extend(atoms)
else:
atomic_interactions.append(field)
return atomic_interactions
|
from ...utils import verbose
from ..utils import (_data_path, _data_path_doc,
_get_version, _version_doc)
@verbose
def data_path(path=None, force_update=False, update_path=False,
download=True, verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='fake',
download=download)
data_path.__doc__ = _data_path_doc.format(name='fake',
conf='MNE_DATASETS_FAKE_PATH')
def get_version(): # noqa: D103
return _get_version('fake')
get_version.__doc__ = _version_doc.format(name='fake')
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from . import VelbusEntity
from .const import DOMAIN
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Velbus binary sensor based on config_entry."""
cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"]
modules_data = hass.data[DOMAIN][entry.entry_id]["binary_sensor"]
entities = []
for address, channel in modules_data:
module = cntrl.get_module(address)
entities.append(VelbusBinarySensor(module, channel))
async_add_entities(entities)
class VelbusBinarySensor(VelbusEntity, BinarySensorEntity):
"""Representation of a Velbus Binary Sensor."""
@property
def is_on(self):
"""Return true if the sensor is on."""
return self._module.is_closed(self._channel)
|
from homeassistant.components.cloud.const import DISPATCHER_REMOTE_UPDATE
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def test_remote_connection_sensor(hass):
"""Test the remote connection sensor."""
assert await async_setup_component(hass, "cloud", {"cloud": {}})
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.remote_ui") is None
# Fake connection/discovery
await hass.helpers.discovery.async_load_platform(
"binary_sensor", "cloud", {}, {"cloud": {}}
)
# Mock test env
cloud = hass.data["cloud"] = Mock()
cloud.remote.certificate = None
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.remote_ui")
assert state is not None
assert state.state == "unavailable"
with patch("homeassistant.components.cloud.binary_sensor.WAIT_UNTIL_CHANGE", 0):
cloud.remote.is_connected = False
cloud.remote.certificate = object()
hass.helpers.dispatcher.async_dispatcher_send(DISPATCHER_REMOTE_UPDATE, {})
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.remote_ui")
assert state.state == "off"
cloud.remote.is_connected = True
hass.helpers.dispatcher.async_dispatcher_send(DISPATCHER_REMOTE_UPDATE, {})
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.remote_ui")
assert state.state == "on"
|
from homeassistant.components.awair.const import (
API_CO2,
API_HUMID,
API_LUX,
API_PM10,
API_PM25,
API_SCORE,
API_SPL_A,
API_TEMP,
API_VOC,
ATTR_UNIQUE_ID,
DOMAIN,
SENSOR_TYPES,
)
from homeassistant.const import (
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
LIGHT_LUX,
PERCENTAGE,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
)
from .const import (
AWAIR_UUID,
CONFIG,
DEVICES_FIXTURE,
GEN1_DATA_FIXTURE,
GEN2_DATA_FIXTURE,
GLOW_DATA_FIXTURE,
MINT_DATA_FIXTURE,
OFFLINE_FIXTURE,
OMNI_DATA_FIXTURE,
UNIQUE_ID,
USER_FIXTURE,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def setup_awair(hass, fixtures):
"""Add Awair devices to hass, using specified fixtures for data."""
entry = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG)
with patch("python_awair.AwairClient.query", side_effect=fixtures):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
def assert_expected_properties(
hass, registry, name, unique_id, state_value, attributes
):
"""Assert expected properties from a dict."""
entry = registry.async_get(name)
assert entry.unique_id == unique_id
state = hass.states.get(name)
assert state
assert state.state == state_value
for attr, value in attributes.items():
assert state.attributes.get(attr) == value
async def test_awair_gen1_sensors(hass):
"""Test expected sensors on a 1st gen Awair."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN1_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = await hass.helpers.entity_registry.async_get_registry()
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"88",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_temperature",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_TEMP][ATTR_UNIQUE_ID]}",
"21.8",
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, "awair_index": 1.0},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_humidity",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_HUMID][ATTR_UNIQUE_ID]}",
"41.59",
{ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE, "awair_index": 0.0},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_carbon_dioxide",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_CO2][ATTR_UNIQUE_ID]}",
"654.0",
{
ATTR_ICON: "mdi:cloud",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_MILLION,
"awair_index": 0.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_volatile_organic_compounds",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_VOC][ATTR_UNIQUE_ID]}",
"366",
{
ATTR_ICON: "mdi:cloud",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_BILLION,
"awair_index": 1.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
# gen1 unique_id should be awair_12345-DUST, which matches old integration behavior
f"{AWAIR_UUID}_DUST",
"14.3",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 1.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm10",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM10][ATTR_UNIQUE_ID]}",
"14.3",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 1.0,
},
)
# We should not have a dust sensor; it's aliased as pm2.5
# and pm10 sensors.
assert hass.states.get("sensor.living_room_dust") is None
# We should not have sound or lux sensors.
assert hass.states.get("sensor.living_room_sound_level") is None
assert hass.states.get("sensor.living_room_illuminance") is None
async def test_awair_gen2_sensors(hass):
"""Test expected sensors on a 2nd gen Awair."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN2_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = await hass.helpers.entity_registry.async_get_registry()
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"97",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}",
"2.0",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 0.0,
},
)
# The Awair 2nd gen reports specifically a pm2.5 sensor,
# and so we don't alias anything. Make sure we didn't do that.
assert hass.states.get("sensor.living_room_pm10") is None
async def test_awair_mint_sensors(hass):
"""Test expected sensors on an Awair mint."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, MINT_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = await hass.helpers.entity_registry.async_get_registry()
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"98",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}",
"1.0",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 0.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_illuminance",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}",
"441.7",
{ATTR_UNIT_OF_MEASUREMENT: LIGHT_LUX},
)
# The Mint does not have a CO2 sensor.
assert hass.states.get("sensor.living_room_carbon_dioxide") is None
async def test_awair_glow_sensors(hass):
"""Test expected sensors on an Awair glow."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GLOW_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = await hass.helpers.entity_registry.async_get_registry()
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"93",
{ATTR_ICON: "mdi:blur"},
)
# The glow does not have a particle sensor
assert hass.states.get("sensor.living_room_pm2_5") is None
async def test_awair_omni_sensors(hass):
"""Test expected sensors on an Awair omni."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OMNI_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = await hass.helpers.entity_registry.async_get_registry()
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"99",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_sound_level",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SPL_A][ATTR_UNIQUE_ID]}",
"47.0",
{ATTR_ICON: "mdi:ear-hearing", ATTR_UNIT_OF_MEASUREMENT: "dBa"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_illuminance",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}",
"804.9",
{ATTR_UNIT_OF_MEASUREMENT: LIGHT_LUX},
)
async def test_awair_offline(hass):
"""Test expected behavior when an Awair is offline."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OFFLINE_FIXTURE]
await setup_awair(hass, fixtures)
# The expected behavior is that we won't have any sensors
# if the device is not online when we set it up. python_awair
# does not make any assumptions about what sensors a device
# might have - they are created dynamically.
# We check for the absence of the "awair score", which every
# device *should* have if it's online. If we don't see it,
# then we probably didn't set anything up. Which is correct,
# in this case.
assert hass.states.get("sensor.living_room_awair_score") is None
async def test_awair_unavailable(hass):
"""Test expected behavior when an Awair becomes offline later."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN1_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = await hass.helpers.entity_registry.async_get_registry()
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"88",
{ATTR_ICON: "mdi:blur"},
)
with patch("python_awair.AwairClient.query", side_effect=OFFLINE_FIXTURE):
await hass.helpers.entity_component.async_update_entity(
"sensor.living_room_awair_score"
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
STATE_UNAVAILABLE,
{ATTR_ICON: "mdi:blur"},
)
|
from Handler import Handler
import logging
try:
from riemann_client.transport import TCPTransport, UDPTransport
from riemann_client.client import Client
riemann_client = True
except ImportError:
riemann_client = None
class RiemannHandler(Handler):
def __init__(self, config=None):
# Initialize Handler
Handler.__init__(self, config)
if riemann_client is None:
logging.error("Failed to load riemann_client module")
return
# Initialize options
self.host = self.config['host']
self.port = int(self.config['port'])
self.transport = self.config['transport']
# Initialize client
if self.transport == 'tcp':
self.transport = TCPTransport(self.host, self.port)
else:
self.transport = UDPTransport(self.host, self.port)
self.client = Client(self.transport)
self._connect()
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(RiemannHandler, self).get_default_config_help()
config.update({
'host': '',
'port': '',
'transport': 'tcp or udp',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(RiemannHandler, self).get_default_config()
config.update({
'host': '',
'port': 123,
'transport': 'tcp',
})
return config
def process(self, metric):
"""
Send a metric to Riemann.
"""
event = self._metric_to_riemann_event(metric)
try:
self.client.send_event(event)
except Exception as e:
self.log.error(
"RiemannHandler: Error sending event to Riemann: %s", e)
def _metric_to_riemann_event(self, metric):
"""
Convert a metric to a dictionary representing a Riemann event.
"""
# Riemann has a separate "host" field, so remove from the path.
path = '%s.%s.%s' % (
metric.getPathPrefix(),
metric.getCollectorPath(),
metric.getMetricPath()
)
return self.client.create_event({
'host': metric.host,
'service': path,
'time': metric.timestamp,
'metric_f': float(metric.value),
'ttl': metric.ttl,
})
def _connect(self):
self.transport.connect()
def _close(self):
"""
Disconnect from Riemann.
"""
if hasattr(self, 'transport'):
self.transport.disconnect()
def __del__(self):
self._close()
|
import mock
import pytest
from addict import Dict
from paasta_tools import utils
from paasta_tools.frameworks import adhoc_scheduler
from paasta_tools.frameworks import native_scheduler
from paasta_tools.frameworks.native_service_config import NativeServiceConfig
from paasta_tools.frameworks.native_service_config import UnknownNativeServiceError
from paasta_tools.frameworks.task_store import DictTaskStore
@pytest.fixture
def system_paasta_config():
return utils.SystemPaastaConfig(
{"docker_registry": "fake", "volumes": []}, "/fake/system/configs"
)
def make_fake_offer(
cpu=50000, mem=50000, port_begin=31000, port_end=32000, pool="default"
):
offer = Dict(
agent_id=Dict(value="super_big_slave"),
resources=[
Dict(name="cpus", scalar=Dict(value=cpu)),
Dict(name="mem", scalar=Dict(value=mem)),
Dict(
name="ports", ranges=Dict(range=[Dict(begin=port_begin, end=port_end)])
),
],
attributes=[],
)
if pool is not None:
offer.attributes = [Dict(name="pool", text=Dict(value=pool))]
return offer
class TestAdhocScheduler:
def test_raise_error_when_cmd_missing(self, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_configs = [
NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 3,
"drain_method": "test",
},
branch_dict={"docker_image": "busybox", "desired_state": "start"},
soa_dir="/nail/etc/services",
)
]
with pytest.raises(UnknownNativeServiceError):
adhoc_scheduler.AdhocScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_configs[0],
dry_run=False,
reconcile_start_time=0,
staging_timeout=30,
task_store_type=DictTaskStore,
)
@mock.patch("paasta_tools.frameworks.native_scheduler._log", autospec=True)
def test_can_only_launch_task_once(self, mock_log, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_configs = [
NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 3,
"cmd": "sleep 50",
"drain_method": "test",
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": None,
},
soa_dir="/nail/etc/services",
)
]
scheduler = adhoc_scheduler.AdhocScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_configs[0],
dry_run=False,
reconcile_start_time=0,
staging_timeout=30,
task_store_type=DictTaskStore,
)
fake_driver = mock.Mock()
scheduler.registered(
driver=fake_driver, frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
with mock.patch(
"paasta_tools.utils.load_system_paasta_config",
autospec=True,
return_value=system_paasta_config,
):
# Check that offers with invalid pool don't get accepted
tasks, _ = scheduler.tasks_and_state_for_offer(
fake_driver, make_fake_offer(pool="notdefault"), {}
)
assert len(tasks) == 0
tasks, _ = scheduler.tasks_and_state_for_offer(
fake_driver, make_fake_offer(pool=None), {}
)
assert len(tasks) == 0
tasks = scheduler.launch_tasks_for_offers(fake_driver, [make_fake_offer()])
task_id = tasks[0]["task_id"]["value"]
task_name = tasks[0]["name"]
assert len(scheduler.task_store.get_all_tasks()) == 1
assert len(tasks) == 1
assert (
scheduler.need_more_tasks(
task_name, scheduler.task_store.get_all_tasks(), []
)
is False
)
assert scheduler.need_to_stop() is False
no_tasks = scheduler.launch_tasks_for_offers(
fake_driver, [make_fake_offer()]
)
assert len(scheduler.task_store.get_all_tasks()) == 1
assert len(no_tasks) == 0
assert scheduler.need_to_stop() is False
scheduler.statusUpdate(
fake_driver,
{
"task_id": {"value": task_id},
"state": native_scheduler.TASK_FINISHED,
},
)
assert len(scheduler.task_store.get_all_tasks()) == 1
assert scheduler.need_to_stop() is True
@mock.patch("paasta_tools.frameworks.native_scheduler._log", autospec=True)
def test_can_run_multiple_copies(self, mock_log, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_configs = [
NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 3,
"cmd": "sleep 50",
"drain_method": "test",
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": None,
},
soa_dir="/nail/etc/services",
)
]
scheduler = adhoc_scheduler.AdhocScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_configs[0],
dry_run=False,
reconcile_start_time=0,
staging_timeout=30,
service_config_overrides={"instances": 5},
task_store_type=DictTaskStore,
)
fake_driver = mock.Mock()
scheduler.registered(
driver=fake_driver, frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
with mock.patch(
"paasta_tools.utils.load_system_paasta_config",
autospec=True,
return_value=system_paasta_config,
):
tasks = scheduler.launch_tasks_for_offers(fake_driver, [make_fake_offer()])
task_name = tasks[0]["name"]
task_ids = [t["task_id"]["value"] for t in tasks]
assert len(scheduler.task_store.get_all_tasks()) == 5
assert len(tasks) == 5
assert (
scheduler.need_more_tasks(
task_name, scheduler.task_store.get_all_tasks(), []
)
is False
)
assert scheduler.need_to_stop() is False
no_tasks = scheduler.launch_tasks_for_offers(
fake_driver, [make_fake_offer()]
)
assert len(scheduler.task_store.get_all_tasks()) == 5
assert len(no_tasks) == 0
assert scheduler.need_to_stop() is False
for idx, task_id in enumerate(task_ids):
scheduler.statusUpdate(
fake_driver,
{
"task_id": {"value": task_id},
"state": native_scheduler.TASK_FINISHED,
},
)
assert scheduler.need_to_stop() is (idx == 4)
|
from plumbum import cli
class Main3Validator(cli.Application):
def main(self, myint:int, myint2:int, *mylist:int):
print(myint, myint2, mylist)
class TestProg3:
def test_prog(self, capsys):
_, rc = Main3Validator.run(["prog", "1", "2", '3', '4', '5'], exit = False)
assert rc == 0
assert "1 2 (3, 4, 5)" in capsys.readouterr()[0]
class Main4Validator(cli.Application):
def main(self, myint:int, myint2:int, *mylist:int) -> None:
print(myint, myint2, mylist)
class TestProg4:
def test_prog(self, capsys):
_, rc = Main4Validator.run(["prog", "1", "2", '3', '4', '5'], exit = False)
assert rc == 0
assert "1 2 (3, 4, 5)" in capsys.readouterr()[0]
|
import os.path
import pytest
pytest.importorskip('PyQt5.QtWebEngineWidgets')
from qutebrowser.browser.webengine import webenginedownloads
@pytest.mark.parametrize('path, expected', [
(os.path.join('subfolder', 'foo'), 'foo'),
('foo(1)', 'foo'),
('foo (1)', 'foo'),
('foo - 1970-01-01T00:00:00.000Z', 'foo'),
('foo(a)', 'foo(a)'),
('foo1', 'foo1'),
('foo%20bar', 'foo%20bar'),
('foo%2Fbar', 'foo%2Fbar'),
])
def test_get_suggested_filename(path, expected):
assert webenginedownloads._get_suggested_filename(path) == expected
|
from __future__ import print_function
__docformat__ = "restructuredtext en"
# modified copy of some functions from test/regrtest.py from PyXml
# disable camel case warning
# pylint: disable=C0103
from contextlib import contextmanager
import sys
import os, os.path as osp
import re
import difflib
import tempfile
import math
import warnings
from shutil import rmtree
from operator import itemgetter
from inspect import isgeneratorfunction
from six import PY2, add_metaclass, string_types
from six.moves import builtins, range, configparser, input
from logilab.common.deprecation import class_deprecated, deprecated
import unittest as unittest_legacy
if not getattr(unittest_legacy, "__package__", None):
try:
import unittest2 as unittest
from unittest2 import SkipTest
except ImportError:
raise ImportError("You have to install python-unittest2 to use %s" % __name__)
else:
import unittest as unittest
from unittest import SkipTest
from functools import wraps
from logilab.common.debugger import Debugger
from logilab.common.decorators import cached, classproperty
from logilab.common import textutils
__all__ = ['unittest_main', 'find_tests', 'nocoverage', 'pause_trace']
DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest',
'func', 'validation')
is_generator = deprecated('[lgc 0.63] use inspect.isgeneratorfunction')(isgeneratorfunction)
# used by unittest to count the number of relevant levels in the traceback
__unittest = 1
@deprecated('with_tempdir is deprecated, use {0}.TemporaryDirectory.'.format(
'tempfile' if not PY2 else 'backports.tempfile'))
def with_tempdir(callable):
"""A decorator ensuring no temporary file left when the function return
Work only for temporary file created with the tempfile module"""
if isgeneratorfunction(callable):
def proxy(*args, **kwargs):
old_tmpdir = tempfile.gettempdir()
new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
tempfile.tempdir = new_tmpdir
try:
for x in callable(*args, **kwargs):
yield x
finally:
try:
rmtree(new_tmpdir, ignore_errors=True)
finally:
tempfile.tempdir = old_tmpdir
return proxy
@wraps(callable)
def proxy(*args, **kargs):
old_tmpdir = tempfile.gettempdir()
new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
tempfile.tempdir = new_tmpdir
try:
return callable(*args, **kargs)
finally:
try:
rmtree(new_tmpdir, ignore_errors=True)
finally:
tempfile.tempdir = old_tmpdir
return proxy
def in_tempdir(callable):
"""A decorator moving the enclosed function inside the tempfile.tempfdir
"""
@wraps(callable)
def proxy(*args, **kargs):
old_cwd = os.getcwd()
os.chdir(tempfile.tempdir)
try:
return callable(*args, **kargs)
finally:
os.chdir(old_cwd)
return proxy
def within_tempdir(callable):
"""A decorator run the enclosed function inside a tmpdir removed after execution
"""
proxy = with_tempdir(in_tempdir(callable))
proxy.__name__ = callable.__name__
return proxy
def find_tests(testdir,
prefixes=DEFAULT_PREFIXES, suffix=".py",
excludes=(),
remove_suffix=True):
"""
Return a list of all applicable test modules.
"""
tests = []
for name in os.listdir(testdir):
if not suffix or name.endswith(suffix):
for prefix in prefixes:
if name.startswith(prefix):
if remove_suffix and name.endswith(suffix):
name = name[:-len(suffix)]
if name not in excludes:
tests.append(name)
tests.sort()
return tests
## PostMortem Debug facilities #####
def start_interactive_mode(result):
"""starts an interactive shell so that the user can inspect errors
"""
debuggers = result.debuggers
descrs = result.error_descrs + result.fail_descrs
if len(debuggers) == 1:
# don't ask for test name if there's only one failure
debuggers[0].start()
else:
while True:
testindex = 0
print("Choose a test to debug:")
# order debuggers in the same way than errors were printed
print("\n".join(['\t%s : %s' % (i, descr) for i, (_, descr)
in enumerate(descrs)]))
print("Type 'exit' (or ^D) to quit")
print()
try:
todebug = input('Enter a test name: ')
if todebug.strip().lower() == 'exit':
print()
break
else:
try:
testindex = int(todebug)
debugger = debuggers[descrs[testindex][0]]
except (ValueError, IndexError):
print("ERROR: invalid test number %r" % (todebug, ))
else:
debugger.start()
except (EOFError, KeyboardInterrupt):
print()
break
# coverage pausing tools #####################################################
@contextmanager
def replace_trace(trace=None):
"""A context manager that temporary replaces the trace function"""
oldtrace = sys.gettrace()
sys.settrace(trace)
try:
yield
finally:
# specific hack to work around a bug in pycoverage, see
# https://bitbucket.org/ned/coveragepy/issue/123
if (oldtrace is not None and not callable(oldtrace) and
hasattr(oldtrace, 'pytrace')):
oldtrace = oldtrace.pytrace
sys.settrace(oldtrace)
pause_trace = replace_trace
def nocoverage(func):
"""Function decorator that pauses tracing functions"""
if hasattr(func, 'uncovered'):
return func
func.uncovered = True
def not_covered(*args, **kwargs):
with pause_trace():
return func(*args, **kwargs)
not_covered.uncovered = True
return not_covered
# test utils ##################################################################
# Add deprecation warnings about new api used by module level fixtures in unittest2
# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule
class _DebugResult(object): # simplify import statement among unittest flavors..
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
# backward compatibility: TestSuite might be imported from lgc.testlib
TestSuite = unittest.TestSuite
class keywords(dict):
"""Keyword args (**kwargs) support for generative tests."""
class starargs(tuple):
"""Variable arguments (*args) for generative tests."""
def __new__(cls, *args):
return tuple.__new__(cls, args)
unittest_main = unittest.main
class InnerTestSkipped(SkipTest):
"""raised when a test is skipped"""
pass
def parse_generative_args(params):
args = []
varargs = ()
kwargs = {}
flags = 0 # 2 <=> starargs, 4 <=> kwargs
for param in params:
if isinstance(param, starargs):
varargs = param
if flags:
raise TypeError('found starargs after keywords !')
flags |= 2
args += list(varargs)
elif isinstance(param, keywords):
kwargs = param
if flags & 4:
raise TypeError('got multiple keywords parameters')
flags |= 4
elif flags & 2 or flags & 4:
raise TypeError('found parameters after kwargs or args')
else:
args.append(param)
return args, kwargs
class InnerTest(tuple):
def __new__(cls, name, *data):
instance = tuple.__new__(cls, data)
instance.name = name
return instance
class Tags(set):
"""A set of tag able validate an expression"""
def __init__(self, *tags, **kwargs):
self.inherit = kwargs.pop('inherit', True)
if kwargs:
raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys())
if len(tags) == 1 and not isinstance(tags[0], string_types):
tags = tags[0]
super(Tags, self).__init__(tags, **kwargs)
def __getitem__(self, key):
return key in self
def match(self, exp):
return eval(exp, {}, self)
def __or__(self, other):
return Tags(*super(Tags, self).__or__(other))
# duplicate definition from unittest2 of the _deprecate decorator
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
DeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
class TestCase(unittest.TestCase):
"""A unittest.TestCase extension with some additional methods."""
maxDiff = None
tags = Tags()
def __init__(self, methodName='runTest'):
super(TestCase, self).__init__(methodName)
self.__exc_info = sys.exc_info
self.__testMethodName = self._testMethodName
self._current_test_descr = None
self._options_ = None
@classproperty
@cached
def datadir(cls): # pylint: disable=E0213
"""helper attribute holding the standard test's data directory
NOTE: this is a logilab's standard
"""
mod = sys.modules[cls.__module__]
return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data')
# cache it (use a class method to cache on class since TestCase is
# instantiated for each test run)
@classmethod
def datapath(cls, *fname):
"""joins the object's datadir and `fname`"""
return osp.join(cls.datadir, *fname)
def set_description(self, descr):
"""sets the current test's description.
This can be useful for generative tests because it allows to specify
a description per yield
"""
self._current_test_descr = descr
# override default's unittest.py feature
def shortDescription(self):
"""override default unittest shortDescription to handle correctly
generative tests
"""
if self._current_test_descr is not None:
return self._current_test_descr
return super(TestCase, self).shortDescription()
def quiet_run(self, result, func, *args, **kwargs):
try:
func(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except unittest.SkipTest as e:
if hasattr(result, 'addSkip'):
result.addSkip(self, str(e))
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
return False
except:
result.addError(self, self.__exc_info())
return False
return True
def _get_test_method(self):
"""return the test method"""
return getattr(self, self._testMethodName)
def optval(self, option, default=None):
"""return the option value or default if the option is not define"""
return getattr(self._options_, option, default)
def __call__(self, result=None, runcondition=None, options=None):
"""rewrite TestCase.__call__ to support generative tests
This is mostly a copy/paste from unittest.py (i.e same
variable names, same logic, except for the generative tests part)
"""
if result is None:
result = self.defaultTestResult()
self._options_ = options
# if result.cvg:
# result.cvg.start()
testMethod = self._get_test_method()
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
if hasattr(result, 'addSkip'):
result.addSkip(self, skip_why)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
finally:
result.stopTest(self)
return
if runcondition and not runcondition(testMethod):
return # test is skipped
result.startTest(self)
try:
if not self.quiet_run(result, self.setUp):
return
generative = isgeneratorfunction(testMethod)
# generative tests
if generative:
self._proceed_generative(result, testMethod,
runcondition)
else:
status = self._proceed(result, testMethod)
success = (status == 0)
if not self.quiet_run(result, self.tearDown):
return
if not generative and success:
result.addSuccess(self)
finally:
# if result.cvg:
# result.cvg.stop()
result.stopTest(self)
def _proceed_generative(self, result, testfunc, runcondition=None):
# cancel startTest()'s increment
result.testsRun -= 1
success = True
try:
for params in testfunc():
if runcondition and not runcondition(testfunc,
skipgenerator=False):
if not (isinstance(params, InnerTest)
and runcondition(params)):
continue
if not isinstance(params, (tuple, list)):
params = (params, )
func = params[0]
args, kwargs = parse_generative_args(params[1:])
# increment test counter manually
result.testsRun += 1
status = self._proceed(result, func, args, kwargs)
if status == 0:
result.addSuccess(self)
success = True
else:
success = False
# XXX Don't stop anymore if an error occured
#if status == 2:
# result.shouldStop = True
if result.shouldStop: # either on error or on exitfirst + error
break
except self.failureException:
result.addFailure(self, self.__exc_info())
success = False
except SkipTest as e:
result.addSkip(self, e)
except:
# if an error occurs between two yield
result.addError(self, self.__exc_info())
success = False
return success
def _proceed(self, result, testfunc, args=(), kwargs=None):
"""proceed the actual test
returns 0 on success, 1 on failure, 2 on error
Note: addSuccess can't be called here because we have to wait
for tearDown to be successfully executed to declare the test as
successful
"""
kwargs = kwargs or {}
try:
testfunc(*args, **kwargs)
except self.failureException:
result.addFailure(self, self.__exc_info())
return 1
except KeyboardInterrupt:
raise
except InnerTestSkipped as e:
result.addSkip(self, e)
return 1
except SkipTest as e:
result.addSkip(self, e)
return 0
except:
result.addError(self, self.__exc_info())
return 2
return 0
def innerSkip(self, msg=None):
"""mark a generative test as skipped for the <msg> reason"""
msg = msg or 'test was skipped'
raise InnerTestSkipped(msg)
if sys.version_info >= (3,2):
assertItemsEqual = unittest.TestCase.assertCountEqual
else:
assertCountEqual = unittest.TestCase.assertItemsEqual
TestCase.assertItemsEqual = deprecated('assertItemsEqual is deprecated, use assertCountEqual')(
TestCase.assertItemsEqual)
import doctest
class SkippedSuite(unittest.TestSuite):
def test(self):
"""just there to trigger test execution"""
self.skipped_test('doctest module has no DocTestSuite class')
class DocTestFinder(doctest.DocTestFinder):
def __init__(self, *args, **kwargs):
self.skipped = kwargs.pop('skipped', ())
doctest.DocTestFinder.__init__(self, *args, **kwargs)
def _get_test(self, obj, name, module, globs, source_lines):
"""override default _get_test method to be able to skip tests
according to skipped attribute's value
"""
if getattr(obj, '__name__', '') in self.skipped:
return None
return doctest.DocTestFinder._get_test(self, obj, name, module,
globs, source_lines)
@add_metaclass(class_deprecated)
class DocTest(TestCase):
"""trigger module doctest
I don't know how to make unittest.main consider the DocTestSuite instance
without this hack
"""
__deprecation_warning__ = 'use stdlib doctest module with unittest API directly'
skipped = ()
def __call__(self, result=None, runcondition=None, options=None):\
# pylint: disable=W0613
try:
finder = DocTestFinder(skipped=self.skipped)
suite = doctest.DocTestSuite(self.module, test_finder=finder)
# XXX iirk
doctest.DocTestCase._TestCase__exc_info = sys.exc_info
except AttributeError:
suite = SkippedSuite()
# doctest may gork the builtins dictionnary
# This happen to the "_" entry used by gettext
old_builtins = builtins.__dict__.copy()
try:
return suite.run(result)
finally:
builtins.__dict__.clear()
builtins.__dict__.update(old_builtins)
run = __call__
def test(self):
"""just there to trigger test execution"""
class MockConnection:
"""fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)"""
def __init__(self, results):
self.received = []
self.states = []
self.results = results
def cursor(self):
"""Mock cursor method"""
return self
def execute(self, query, args=None):
"""Mock execute method"""
self.received.append( (query, args) )
def fetchone(self):
"""Mock fetchone method"""
return self.results[0]
def fetchall(self):
"""Mock fetchall method"""
return self.results
def commit(self):
"""Mock commiy method"""
self.states.append( ('commit', len(self.received)) )
def rollback(self):
"""Mock rollback method"""
self.states.append( ('rollback', len(self.received)) )
def close(self):
"""Mock close method"""
pass
def mock_object(**params):
"""creates an object using params to set attributes
>>> option = mock_object(verbose=False, index=range(5))
>>> option.verbose
False
>>> option.index
[0, 1, 2, 3, 4]
"""
return type('Mock', (), params)()
def create_files(paths, chroot):
"""Creates directories and files found in <path>.
:param paths: list of relative paths to files or directories
:param chroot: the root directory in which paths will be created
>>> from os.path import isdir, isfile
>>> isdir('/tmp/a')
False
>>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp')
>>> isdir('/tmp/a')
True
>>> isdir('/tmp/a/b/c')
True
>>> isfile('/tmp/a/b/c/d/e.py')
True
>>> isfile('/tmp/a/b/foo.py')
True
"""
dirs, files = set(), set()
for path in paths:
path = osp.join(chroot, path)
filename = osp.basename(path)
# path is a directory path
if filename == '':
dirs.add(path)
# path is a filename path
else:
dirs.add(osp.dirname(path))
files.add(path)
for dirpath in dirs:
if not osp.isdir(dirpath):
os.makedirs(dirpath)
for filepath in files:
open(filepath, 'w').close()
class AttrObject: # XXX cf mock_object
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def tag(*args, **kwargs):
"""descriptor adding tag to a function"""
def desc(func):
assert not hasattr(func, 'tags')
func.tags = Tags(*args, **kwargs)
return func
return desc
def require_version(version):
""" Compare version of python interpreter to the given one. Skip the test
if older.
"""
def check_require_version(f):
version_elements = version.split('.')
try:
compare = tuple([int(v) for v in version_elements])
except ValueError:
raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
current = sys.version_info[:3]
if current < compare:
def new_f(self, *args, **kwargs):
self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current])))
new_f.__name__ = f.__name__
return new_f
else:
return f
return check_require_version
def require_module(module):
""" Check if the given module is loaded. Skip the test if not.
"""
def check_require_module(f):
try:
__import__(module)
return f
except ImportError:
def new_f(self, *args, **kwargs):
self.skipTest('%s can not be imported.' % module)
new_f.__name__ = f.__name__
return new_f
return check_require_module
|
import abc
import logging
import uuid
from threading import RLock, Event
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED, FIRST_EXCEPTION
from six import iteritems, itervalues
from arctic._config import ARCTIC_ASYNC_NWORKERS
from arctic.exceptions import AsyncArcticException
ABC = abc.ABCMeta('ABC', (object,), {})
def _looping_task(shutdown_flag, fun, *args, **kwargs):
while not shutdown_flag.is_set():
try:
fun(*args, **kwargs)
except Exception as e:
logging.exception("Task failed {}".format(fun))
raise e
def _exec_task(fun, *args, **kwargs):
try:
fun(*args, **kwargs)
except Exception as e:
logging.exception("Task failed {}".format(fun))
raise e
class LazySingletonTasksCoordinator(ABC):
"""
A Thread-Safe singleton lazily initialized thread pool class (encapsulating concurrent.futures.ThreadPoolExecutor)
"""
_instance = None
_SINGLETON_LOCK = RLock()
_POOL_LOCK = RLock()
@classmethod
def is_initialized(cls):
with cls._POOL_LOCK:
is_init = cls._instance is not None and cls._instance._pool is not None
return is_init
@classmethod
def get_instance(cls, pool_size=None):
if cls._instance is not None:
return cls._instance
# Lazy init
with cls._SINGLETON_LOCK:
if cls._instance is None:
cls._instance = cls(ARCTIC_ASYNC_NWORKERS if pool_size is None else pool_size)
return cls._instance
@property
def _workers_pool(self):
if self._pool is not None:
return self._pool
# lazy init the workers pool
got_initialized = False
with type(self)._POOL_LOCK:
if self._pool is None:
self._pool = ThreadPoolExecutor(max_workers=self._pool_size,
thread_name_prefix='AsyncArcticWorker')
got_initialized = True
# Call hooks outside the lock, to minimize time-under-lock
if got_initialized:
for hook in self._pool_update_hooks:
hook(self._pool_size)
return self._pool
def __init__(self, pool_size):
# Only allow creation via get_instance
if not type(self)._SINGLETON_LOCK._is_owned():
raise AsyncArcticException("{} is a singleton, can't create a new instance".format(type(self)))
pool_size = int(pool_size)
if pool_size < 1:
raise ValueError("{} can't be instantiated with a pool_size of {}".format(type(self), pool_size))
# Enforce the singleton pattern
with type(self)._SINGLETON_LOCK:
if type(self)._instance is not None:
raise AsyncArcticException("LazySingletonTasksCoordinator is a singleton, can't create a new instance")
self._lock = RLock()
self._pool = None
self._pool_size = int(pool_size)
self._pool_update_hooks = []
self.alive_tasks = {}
self.is_shutdown = False
def reset(self, pool_size=None, timeout=None):
pool_size = ARCTIC_ASYNC_NWORKERS if pool_size is None else int(pool_size)
with type(self)._POOL_LOCK:
self.shutdown(timeout=timeout)
pool_size = max(pool_size, 1)
self._pool = None
self._pool_size = pool_size
self.is_shutdown = False
# pool will be lazily initialized with pool_size on next request submission
def stop_all_running_tasks(self):
with type(self)._POOL_LOCK:
for fut, ev in (v for v in itervalues(self.alive_tasks) if not v[0].done()):
if ev:
ev.set()
fut.cancel()
@staticmethod
def wait_tasks(futures, timeout=None, return_when=ALL_COMPLETED, raise_exceptions=True):
running_futures = [fut for fut in futures if not fut.done()]
done, _ = wait(running_futures, timeout=timeout, return_when=return_when)
if raise_exceptions:
[f.result() for f in done if not f.cancelled() and f.exception() is not None] # raises the exception
@staticmethod
def wait_tasks_or_abort(futures, timeout=60, kill_switch_ev=None):
try:
LazySingletonTasksCoordinator.wait_tasks(futures, return_when=FIRST_EXCEPTION, raise_exceptions=True)
except Exception as e:
if kill_switch_ev is not None:
# Used when we want to keep both raise the exception and wait for all tasks to finish
kill_switch_ev.set()
LazySingletonTasksCoordinator.wait_tasks(futures, return_when=ALL_COMPLETED,
raise_exceptions=False, timeout=timeout)
raise e
def register_update_hook(self, fun):
with type(self)._POOL_LOCK:
self._pool_update_hooks.append(fun)
def submit_task(self, is_looping, fun, *args, **kwargs):
new_id = uuid.uuid4()
shutdown_flag = Event() if is_looping else None
with type(self)._POOL_LOCK:
if self.is_shutdown:
raise AsyncArcticException("The worker pool has been shutdown and can no longer accept new requests.")
if is_looping:
new_future = self._workers_pool.submit(_looping_task, shutdown_flag, fun, *args, **kwargs)
else:
new_future = self._workers_pool.submit(_exec_task, fun, *args, **kwargs)
self.alive_tasks = {k: v for k, v in iteritems(self.alive_tasks) if not v[0].done()}
self.alive_tasks[new_id] = (new_future, shutdown_flag)
return new_id, new_future
def total_alive_tasks(self):
with type(self)._POOL_LOCK:
self.alive_tasks = {k: v for k, v in iteritems(self.alive_tasks) if not v[0].done()}
total = len(self.alive_tasks)
return total
def shutdown(self, timeout=None):
if self.is_shutdown:
return
with type(self)._POOL_LOCK:
self.is_shutdown = True
if timeout is not None:
self.await_termination(timeout=timeout)
self._workers_pool.shutdown(wait=timeout is not None)
def await_termination(self, timeout=None):
with type(self)._POOL_LOCK:
if not self.is_shutdown:
raise AsyncArcticException("The workers pool has not been shutdown, please call shutdown() first.")
LazySingletonTasksCoordinator.wait_tasks(
[v[0] for v in itervalues(self.alive_tasks)],
timeout=timeout, return_when=ALL_COMPLETED, raise_exceptions=False)
with type(self)._POOL_LOCK:
self.alive_tasks = {}
@property
def actual_pool_size(self):
return self._workers_pool._max_workers
@abc.abstractmethod
def __reduce__(self):
pass
|
import numpy as np
from numpy.testing import assert_allclose
import pytest
from mne._ola import _COLA, _Interp2, _Storer
def test_interp_2pt():
"""Test our two-point interpolator."""
n_pts = 200
assert n_pts % 50 == 0
feeds = [ # test a bunch of feeds to make sure they don't break things
[n_pts],
[50] * (n_pts // 50),
[10] * (n_pts // 10),
[5] * (n_pts // 5),
[2] * (n_pts // 2),
[1] * n_pts,
]
# ZOH
values = np.array([10, -10])
expected = np.full(n_pts, 10)
for feed in feeds:
expected[-1] = 10
interp = _Interp2([0, n_pts], values, 'zero')
out = np.concatenate([interp.feed(f)[0] for f in feed])
assert_allclose(out, expected)
interp = _Interp2([0, n_pts - 1], values, 'zero')
expected[-1] = -10
out = np.concatenate([interp.feed(f)[0] for f in feed])
assert_allclose(out, expected)
# linear and inputs of different sizes
values = [np.arange(2)[:, np.newaxis, np.newaxis], np.array([20, 10])]
expected = [
np.linspace(0, 1, n_pts, endpoint=False)[np.newaxis, np.newaxis, :],
np.linspace(20, 10, n_pts, endpoint=False)]
for feed in feeds:
interp = _Interp2([0, n_pts], values, 'linear')
outs = [interp.feed(f) for f in feed]
outs = [np.concatenate([o[0] for o in outs], axis=-1),
np.concatenate([o[1] for o in outs], axis=-1)]
assert_allclose(outs[0], expected[0], atol=1e-7)
assert_allclose(outs[1], expected[1], atol=1e-7)
# cos**2 and more interesting bounds
values = np.array([10, -10])
expected = np.full(n_pts, 10.)
expected[-5:] = -10
cos = np.cos(np.linspace(0, np.pi / 2., n_pts - 9,
endpoint=False))
expected[4:-5] = cos ** 2 * 20 - 10
for feed in feeds:
interp = _Interp2([4, n_pts - 5], values, 'cos2')
out = np.concatenate([interp.feed(f)[0] for f in feed])
assert_allclose(out, expected, atol=1e-7)
out = interp.feed(10)[0]
assert_allclose(out, [values[-1]] * 10, atol=1e-7)
# hann and broadcasting
n_hann = n_pts - 9
expected[4:-5] = np.hanning(2 * n_hann + 1)[n_hann:-1] * 20 - 10
expected = np.array([expected, expected[::-1] * 0.5])
values = np.array([values, values[::-1] * 0.5]).T
for feed in feeds:
interp = _Interp2([4, n_pts - 5], values, 'hann')
out = np.concatenate([interp.feed(f)[0] for f in feed], axis=-1)
assert_allclose(out, expected, atol=1e-7)
# one control point and None support
values = [np.array([10]), None]
for start in [0, 50, 99, 100, 1000]:
interp = _Interp2([start], values, 'zero')
out, none = interp.feed(n_pts)
assert none is None
expected = np.full(n_pts, 10.)
assert_allclose(out, expected)
@pytest.mark.parametrize('ndim', (1, 2, 3))
def test_cola(ndim):
"""Test COLA processing."""
sfreq = 1000.
rng = np.random.RandomState(0)
def processor(x):
return (x / 2.,) # halve the signal
for n_total in (999, 1000, 1001):
signal = rng.randn(n_total)
out = rng.randn(n_total) # shouldn't matter
for _ in range(ndim - 1):
signal = signal[np.newaxis]
out = out[np.newaxis]
for n_samples in (99, 100, 101, 102,
n_total - n_total // 2 + 1, n_total):
for window in ('hann', 'bartlett', 'boxcar', 'triang'):
# A few example COLA possibilities
n_overlaps = ()
if window in ('hann', 'bartlett') or n_samples % 2 == 0:
n_overlaps += ((n_samples + 1) // 2,)
if window == 'boxcar':
n_overlaps += (0,)
for n_overlap in n_overlaps:
# can pass callable or ndarray
for storer in (out, _Storer(out)):
cola = _COLA(processor, storer, n_total, n_samples,
n_overlap, sfreq, window)
n_input = 0
# feed data in an annoying way
while n_input < n_total:
next_len = min(rng.randint(1, 30),
n_total - n_input)
cola.feed(signal[..., n_input:n_input + next_len])
n_input += next_len
assert_allclose(out, signal / 2., atol=1e-7)
|
import datetime as dt
import logging
import re
from typing import Optional, Tuple
from homeassistant.components.media_player.const import (
MEDIA_CLASS_DIRECTORY,
MEDIA_CLASS_VIDEO,
MEDIA_TYPE_VIDEO,
)
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.components.media_source.const import MEDIA_MIME_TYPES
from homeassistant.components.media_source.error import MediaSourceError, Unresolvable
from homeassistant.components.media_source.models import (
BrowseMediaSource,
MediaSource,
MediaSourceItem,
PlayMedia,
)
from homeassistant.core import HomeAssistant, callback
from .const import DATA_CAMERAS, DATA_EVENTS, DOMAIN, MANUFACTURER
_LOGGER = logging.getLogger(__name__)
MIME_TYPE = "application/x-mpegURL"
class IncompatibleMediaSource(MediaSourceError):
"""Incompatible media source attributes."""
async def async_get_media_source(hass: HomeAssistant):
"""Set up Netatmo media source."""
return NetatmoSource(hass)
class NetatmoSource(MediaSource):
"""Provide Netatmo camera recordings as media sources."""
name: str = MANUFACTURER
def __init__(self, hass: HomeAssistant):
"""Initialize Netatmo source."""
super().__init__(DOMAIN)
self.hass = hass
self.events = self.hass.data[DOMAIN][DATA_EVENTS]
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
_, camera_id, event_id = async_parse_identifier(item)
url = self.events[camera_id][event_id]["media_url"]
return PlayMedia(url, MIME_TYPE)
async def async_browse_media(
self, item: MediaSourceItem, media_types: Tuple[str] = MEDIA_MIME_TYPES
) -> BrowseMediaSource:
"""Return media."""
try:
source, camera_id, event_id = async_parse_identifier(item)
except Unresolvable as err:
raise BrowseError(str(err)) from err
return self._browse_media(source, camera_id, event_id)
def _browse_media(
self, source: str, camera_id: str, event_id: int
) -> BrowseMediaSource:
"""Browse media."""
if camera_id and camera_id not in self.events:
raise BrowseError("Camera does not exist.")
if event_id and event_id not in self.events[camera_id]:
raise BrowseError("Event does not exist.")
return self._build_item_response(source, camera_id, event_id)
def _build_item_response(
self, source: str, camera_id: str, event_id: int = None
) -> BrowseMediaSource:
if event_id and event_id in self.events[camera_id]:
created = dt.datetime.fromtimestamp(event_id)
if self.events[camera_id][event_id]["type"] == "outdoor":
thumbnail = (
self.events[camera_id][event_id]["event_list"][0]
.get("snapshot", {})
.get("url")
)
message = remove_html_tags(
self.events[camera_id][event_id]["event_list"][0]["message"]
)
else:
thumbnail = (
self.events[camera_id][event_id].get("snapshot", {}).get("url")
)
message = remove_html_tags(self.events[camera_id][event_id]["message"])
title = f"{created} - {message}"
else:
title = self.hass.data[DOMAIN][DATA_CAMERAS].get(camera_id, MANUFACTURER)
thumbnail = None
if event_id:
path = f"{source}/{camera_id}/{event_id}"
else:
path = f"{source}/{camera_id}"
media_class = MEDIA_CLASS_DIRECTORY if event_id is None else MEDIA_CLASS_VIDEO
media = BrowseMediaSource(
domain=DOMAIN,
identifier=path,
media_class=media_class,
media_content_type=MEDIA_TYPE_VIDEO,
title=title,
can_play=bool(
event_id and self.events[camera_id][event_id].get("media_url")
),
can_expand=event_id is None,
thumbnail=thumbnail,
)
if not media.can_play and not media.can_expand:
_LOGGER.debug(
"Camera %s with event %s without media url found", camera_id, event_id
)
raise IncompatibleMediaSource
if not media.can_expand:
return media
media.children = []
# Append first level children
if not camera_id:
for cid in self.events:
child = self._build_item_response(source, cid)
if child:
media.children.append(child)
else:
for eid in self.events[camera_id]:
try:
child = self._build_item_response(source, camera_id, eid)
except IncompatibleMediaSource:
continue
if child:
media.children.append(child)
return media
def remove_html_tags(text):
"""Remove html tags from string."""
clean = re.compile("<.*?>")
return re.sub(clean, "", text)
@callback
def async_parse_identifier(
item: MediaSourceItem,
) -> Tuple[str, str, Optional[int]]:
"""Parse identifier."""
if not item.identifier:
return "events", "", None
source, path = item.identifier.lstrip("/").split("/", 1)
if source != "events":
raise Unresolvable("Unknown source directory.")
if "/" in path:
camera_id, event_id = path.split("/", 1)
return source, camera_id, int(event_id)
return source, path, None
|
from datetime import timedelta
import logging
import defusedxml.ElementTree as ET
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
_LOGGER = logging.getLogger(__name__)
DOMAIN = "namecheapdns"
INTERVAL = timedelta(minutes=5)
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_DOMAIN): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HOST, default="@"): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Initialize the namecheap DNS component."""
host = config[DOMAIN][CONF_HOST]
domain = config[DOMAIN][CONF_DOMAIN]
password = config[DOMAIN][CONF_PASSWORD]
session = async_get_clientsession(hass)
result = await _update_namecheapdns(session, host, domain, password)
if not result:
return False
async def update_domain_interval(now):
"""Update the namecheap DNS entry."""
await _update_namecheapdns(session, host, domain, password)
async_track_time_interval(hass, update_domain_interval, INTERVAL)
return result
async def _update_namecheapdns(session, host, domain, password):
"""Update namecheap DNS entry."""
params = {"host": host, "domain": domain, "password": password}
resp = await session.get(UPDATE_URL, params=params)
xml_string = await resp.text()
root = ET.fromstring(xml_string)
err_count = root.find("ErrCount").text
if int(err_count) != 0:
_LOGGER.warning("Updating namecheap domain failed: %s", domain)
return False
return True
|
import os
from tempfile import NamedTemporaryFile
import pytest
from PIL import Image, ImageDraw
from nikola.plugins.task import scale_images
# These tests don't require valid profiles. They need only to verify
# that profile data is/isn't saved with images.
# It would be nice to use PIL.ImageCms to create valid profiles, but
# in many Pillow distributions ImageCms is a stub.
# ICC file data format specification:
# http://www.color.org/icc32.pdf
PROFILE = b"invalid profile data"
def test_handling_icc_profiles(test_images, destination_dir):
filename, expected_profile = test_images
pathname = os.path.join(str(destination_dir), filename)
assert os.path.exists(pathname), pathname
img = Image.open(pathname)
actual_profile = img.info.get("icc_profile")
assert actual_profile == expected_profile
@pytest.fixture(
params=[
pytest.param(True, id="with icc filename"),
pytest.param(False, id="without icc filename"),
]
)
def test_images(request, preserve_icc_profiles, source_dir, site):
image_filename = create_src_image(str(source_dir), request.param)
run_task(site)
if request.param:
yield image_filename, PROFILE if preserve_icc_profiles else None
else:
yield image_filename, None
@pytest.fixture(
params=[
pytest.param(True, id="profiles preserved"),
pytest.param(False, id="profiles not preserved"),
]
)
def preserve_icc_profiles(request):
return request.param
@pytest.fixture
def source_dir(tmpdir_factory):
return tmpdir_factory.mktemp("image_source")
@pytest.fixture
def site(preserve_icc_profiles, source_dir, destination_dir):
config = {
"IMAGE_FOLDERS": {str(source_dir): ""},
"OUTPUT_FOLDER": str(destination_dir),
"IMAGE_THUMBNAIL_SIZE": 128,
"IMAGE_THUMBNAIL_FORMAT": "{name}.thumbnail{ext}",
"MAX_IMAGE_SIZE": 512,
"FILTERS": {},
"PRESERVE_EXIF_DATA": False,
"EXIF_WHITELIST": {},
"PRESERVE_ICC_PROFILES": preserve_icc_profiles,
}
return FakeSite(config)
class FakeSite:
def __init__(self, config):
self.config = config
self.debug = True
@pytest.fixture
def destination_dir(tmpdir_factory):
return tmpdir_factory.mktemp("image_output")
def run_task(site):
task_instance = get_task_instance(site)
for task in task_instance.gen_tasks():
for action, args in task.get("actions", []):
action(*args)
def get_task_instance(site):
result = scale_images.ScaleImage()
result.set_site(site)
return result
def create_src_image(testdir, use_icc_profile):
img = create_test_image()
pathname = tmp_img_name(testdir)
# Test two variants: with and without an associated icc_profile
if use_icc_profile:
img.save(pathname, icc_profile=PROFILE)
else:
img.save(pathname)
return os.path.basename(pathname)
def create_test_image():
# Make a white image with a red stripe on the diagonal.
width = 64
height = 64
img = Image.new("RGB", (width, height), (255, 255, 255))
draw = ImageDraw.Draw(img)
draw.line((0, 0, width, height), fill=(255, 128, 128))
draw.line((width, 0, 0, height), fill=(128, 128, 255))
return img
def tmp_img_name(dirname):
pathname = NamedTemporaryFile(suffix=".jpg", dir=dirname, delete=False)
return pathname.name
|
from datetime import timedelta
import logging
import async_timeout
from pyipma.api import IPMA_API
from pyipma.location import Location
import voluptuous as vol
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
PLATFORM_SCHEMA,
WeatherEntity,
)
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MODE,
CONF_NAME,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util import Throttle
from homeassistant.util.dt import now, parse_datetime
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Instituto Português do Mar e Atmosfera"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30)
CONDITION_CLASSES = {
"cloudy": [4, 5, 24, 25, 27],
"fog": [16, 17, 26],
"hail": [21, 22],
"lightning": [19],
"lightning-rainy": [20, 23],
"partlycloudy": [2, 3],
"pouring": [8, 11],
"rainy": [6, 7, 9, 10, 12, 13, 14, 15],
"snowy": [18],
"snowy-rainy": [],
"sunny": [1],
"windy": [],
"windy-variant": [],
"exceptional": [],
}
FORECAST_MODE = ["hourly", "daily"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_MODE, default="daily"): vol.In(FORECAST_MODE),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the ipma platform.
Deprecated.
"""
_LOGGER.warning("Loading IPMA via platform config is deprecated")
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
if None in (latitude, longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return
api = await async_get_api(hass)
location = await async_get_location(hass, api, latitude, longitude)
async_add_entities([IPMAWeather(location, api, config)], True)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add a weather entity from a config_entry."""
latitude = config_entry.data[CONF_LATITUDE]
longitude = config_entry.data[CONF_LONGITUDE]
mode = config_entry.data[CONF_MODE]
api = await async_get_api(hass)
location = await async_get_location(hass, api, latitude, longitude)
# Migrate old unique_id
@callback
def _async_migrator(entity_entry: entity_registry.RegistryEntry):
# Reject if new unique_id
if entity_entry.unique_id.count(",") == 2:
return None
new_unique_id = (
f"{location.station_latitude}, {location.station_longitude}, {mode}"
)
_LOGGER.info(
"Migrating unique_id from [%s] to [%s]",
entity_entry.unique_id,
new_unique_id,
)
return {"new_unique_id": new_unique_id}
await entity_registry.async_migrate_entries(
hass, config_entry.entry_id, _async_migrator
)
async_add_entities([IPMAWeather(location, api, config_entry.data)], True)
async def async_get_api(hass):
"""Get the pyipma api object."""
websession = async_get_clientsession(hass)
return IPMA_API(websession)
async def async_get_location(hass, api, latitude, longitude):
"""Retrieve pyipma location, location name to be used as the entity name."""
with async_timeout.timeout(30):
location = await Location.get(api, float(latitude), float(longitude))
_LOGGER.debug(
"Initializing for coordinates %s, %s -> station %s (%d, %d)",
latitude,
longitude,
location.station,
location.id_station,
location.global_id_local,
)
return location
class IPMAWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, location: Location, api: IPMA_API, config):
"""Initialise the platform with a data instance and station name."""
self._api = api
self._location_name = config.get(CONF_NAME, location.name)
self._mode = config.get(CONF_MODE)
self._location = location
self._observation = None
self._forecast = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Update Condition and Forecast."""
with async_timeout.timeout(10):
new_observation = await self._location.observation(self._api)
new_forecast = await self._location.forecast(self._api)
if new_observation:
self._observation = new_observation
else:
_LOGGER.warning("Could not update weather observation")
if new_forecast:
self._forecast = new_forecast
else:
_LOGGER.warning("Could not update weather forecast")
_LOGGER.debug(
"Updated location %s, observation %s",
self._location.name,
self._observation,
)
@property
def unique_id(self) -> str:
"""Return a unique id."""
return f"{self._location.station_latitude}, {self._location.station_longitude}, {self._mode}"
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def name(self):
"""Return the name of the station."""
return self._location_name
@property
def condition(self):
"""Return the current condition."""
if not self._forecast:
return
return next(
(
k
for k, v in CONDITION_CLASSES.items()
if self._forecast[0].weather_type in v
),
None,
)
@property
def temperature(self):
"""Return the current temperature."""
if not self._observation:
return None
return self._observation.temperature
@property
def pressure(self):
"""Return the current pressure."""
if not self._observation:
return None
return self._observation.pressure
@property
def humidity(self):
"""Return the name of the sensor."""
if not self._observation:
return None
return self._observation.humidity
@property
def wind_speed(self):
"""Return the current windspeed."""
if not self._observation:
return None
return self._observation.wind_intensity_km
@property
def wind_bearing(self):
"""Return the current wind bearing (degrees)."""
if not self._observation:
return None
return self._observation.wind_direction
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def forecast(self):
"""Return the forecast array."""
if not self._forecast:
return []
if self._mode == "hourly":
forecast_filtered = [
x
for x in self._forecast
if x.forecasted_hours == 1
and parse_datetime(x.forecast_date)
> (now().utcnow() - timedelta(hours=1))
]
fcdata_out = [
{
ATTR_FORECAST_TIME: data_in.forecast_date,
ATTR_FORECAST_CONDITION: next(
(
k
for k, v in CONDITION_CLASSES.items()
if int(data_in.weather_type) in v
),
None,
),
ATTR_FORECAST_TEMP: float(data_in.feels_like_temperature),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: (
int(float(data_in.precipitation_probability))
if int(float(data_in.precipitation_probability)) >= 0
else None
),
ATTR_FORECAST_WIND_SPEED: data_in.wind_strength,
ATTR_FORECAST_WIND_BEARING: data_in.wind_direction,
}
for data_in in forecast_filtered
]
else:
forecast_filtered = [f for f in self._forecast if f.forecasted_hours == 24]
fcdata_out = [
{
ATTR_FORECAST_TIME: data_in.forecast_date,
ATTR_FORECAST_CONDITION: next(
(
k
for k, v in CONDITION_CLASSES.items()
if int(data_in.weather_type) in v
),
None,
),
ATTR_FORECAST_TEMP_LOW: data_in.min_temperature,
ATTR_FORECAST_TEMP: data_in.max_temperature,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: data_in.precipitation_probability,
ATTR_FORECAST_WIND_SPEED: data_in.wind_strength,
ATTR_FORECAST_WIND_BEARING: data_in.wind_direction,
}
for data_in in forecast_filtered
]
return fcdata_out
|
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import mnist_benchmark
from perfkitbenchmarker.linux_benchmarks import resnet_benchmark
from perfkitbenchmarker.sample import Sample
class ResNetBenchmarkTestCase(unittest.TestCase, test_util.SamplesTestMixin):
def setUp(self):
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'resnet_output.txt')
with open(path) as fp:
self.contents = fp.read()
self.metadata_input = {'num_examples_per_epoch': 1251.1,
'train_batch_size': 1024}
self.metadata_output = {'epoch': 4.000479577971386, 'elapsed_seconds': 0,
'train_batch_size': 1024,
'num_examples_per_epoch': 1251.1, 'step': 5005}
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testTrainResults(self):
samples = mnist_benchmark.MakeSamplesFromTrainOutput(
self.metadata_input, self.contents, 0, 5005)
golden = [
Sample('Loss', 3.6859958, '', self.metadata_output),
Sample('Global Steps Per Second', 3.6699466666666667,
'global_steps/sec', self.metadata_output),
Sample('Examples Per Second', 3758.023333333333,
'examples/sec', self.metadata_output)
]
self.assertEqual(samples, golden)
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testEvalResults(self):
samples = resnet_benchmark.MakeSamplesFromEvalOutput(
self.metadata_input, self.contents, 0)
golden = [
Sample('Eval Loss', 3.86324, '', self.metadata_output),
Sample('Top 1 Accuracy', 32.751465, '%', self.metadata_output),
Sample('Top 5 Accuracy', 58.825684, '%', self.metadata_output)
]
self.assertEqual(samples, golden)
if __name__ == '__main__':
unittest.main()
|
from django.urls import reverse
from weblate.trans.models import Comment
from weblate.trans.tests.test_views import FixtureTestCase
class CommentViewTest(FixtureTestCase):
def setUp(self):
super().setUp()
self.translation = self.component.translation_set.get(language_code="cs")
def test_add_target_comment(self):
unit = self.get_unit()
# Add comment
response = self.client.post(
reverse("comment", kwargs={"pk": unit.id}),
{"comment": "New target testing comment", "scope": "translation"},
)
self.assertRedirects(response, unit.get_absolute_url())
# Check it is shown on page
response = self.client.get(unit.get_absolute_url())
self.assertContains(response, "New target testing comment")
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of comments
self.assertTrue(unit.has_comment)
self.assertEqual(translation.stats.comments, 1)
def test_add_source_comment(self):
unit = self.get_unit()
# Add comment
response = self.client.post(
reverse("comment", kwargs={"pk": unit.id}),
{"comment": "New source testing comment", "scope": "global"},
)
self.assertRedirects(response, unit.get_absolute_url())
# Check it is shown on page
response = self.client.get(unit.get_absolute_url())
self.assertContains(response, "New source testing comment")
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of comments
self.assertFalse(unit.has_comment)
self.assertEqual(translation.stats.comments, 0)
def test_add_source_report(self):
unit = self.get_unit()
# Add comment
response = self.client.post(
reverse("comment", kwargs={"pk": unit.id}),
{"comment": "New issue testing comment", "scope": "report"},
)
self.assertRedirects(response, unit.get_absolute_url())
# Check it is shown on page
response = self.client.get(unit.get_absolute_url())
self.assertNotContains(response, "New source testing comment")
# Enable reviews
self.project.source_review = True
self.project.save(update_fields=["source_review"])
# Add comment
response = self.client.post(
reverse("comment", kwargs={"pk": unit.id}),
{"comment": "New issue testing comment", "scope": "report"},
)
self.assertRedirects(response, unit.get_absolute_url())
# Check it is shown on page
response = self.client.get(unit.get_absolute_url())
self.assertContains(response, "New issue testing comment")
self.assertContains(response, "Source needs review")
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of comments
self.assertFalse(unit.has_comment)
self.assertEqual(translation.stats.comments, 0)
def test_delete_comment(self, **kwargs):
unit = self.get_unit()
self.make_manager()
# Add comment
response = self.client.post(
reverse("comment", kwargs={"pk": unit.id}),
{"comment": "New target testing comment", "scope": "translation"},
)
comment = Comment.objects.all()[0]
response = self.client.post(
reverse("delete-comment", kwargs={"pk": comment.pk}), kwargs
)
self.assertRedirects(response, unit.get_absolute_url())
def test_spam_comment(self):
self.test_delete_comment(spam=1)
def test_resolve_comment(self):
unit = self.get_unit()
self.make_manager()
# Add comment
response = self.client.post(
reverse("comment", kwargs={"pk": unit.id}),
{"comment": "New target testing comment", "scope": "translation"},
)
comment = Comment.objects.all()[0]
response = self.client.post(
reverse("resolve-comment", kwargs={"pk": comment.pk})
)
self.assertRedirects(response, unit.get_absolute_url())
comment.refresh_from_db()
self.assertTrue(comment.resolved)
self.assertFalse(comment.unit.has_comment)
|
import logging
import unittest
import numpy as np
from gensim.corpora.dictionary import Dictionary
from gensim.topic_coherence import indirect_confirmation_measure
from gensim.topic_coherence import text_analysis
class TestIndirectConfirmation(unittest.TestCase):
def setUp(self):
# Set up toy example for better understanding and testing
# of this module. See the modules for the mathematical formulas
self.topics = [np.array([1, 2])]
# Result from s_one_set segmentation:
self.segmentation = [[(1, np.array([1, 2])), (2, np.array([1, 2]))]]
self.gamma = 1
self.measure = 'nlr'
self.dictionary = Dictionary()
self.dictionary.id2token = {1: 'fake', 2: 'tokens'}
def testCosineSimilarity(self):
"""Test cosine_similarity()"""
accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, self.dictionary)
accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}}
accumulator._num_docs = 5
obtained = indirect_confirmation_measure.cosine_similarity(
self.segmentation, accumulator, self.topics, self.measure, self.gamma)
# The steps involved in this calculation are as follows:
# 1. Take (1, array([1, 2]). Take w' which is 1.
# 2. Calculate nlr(1, 1), nlr(1, 2). This is our first vector.
# 3. Take w* which is array([1, 2]).
# 4. Calculate nlr(1, 1) + nlr(2, 1). Calculate nlr(1, 2), nlr(2, 2). This is our second vector.
# 5. Find out cosine similarity between these two vectors.
# 6. Similarly for the second segmentation.
expected = (0.6230 + 0.6230) / 2. # To account for EPSILON approximation
self.assertAlmostEqual(expected, obtained[0], 4)
mean, std = indirect_confirmation_measure.cosine_similarity(
self.segmentation, accumulator, self.topics, self.measure, self.gamma,
with_std=True)[0]
self.assertAlmostEqual(expected, mean, 4)
self.assertAlmostEqual(0.0, std, 1)
def testWord2VecSimilarity(self):
"""Sanity check word2vec_similarity."""
accumulator = text_analysis.WordVectorsAccumulator({1, 2}, self.dictionary)
accumulator.accumulate([
['fake', 'tokens'],
['tokens', 'fake']
], 5)
mean, std = indirect_confirmation_measure.word2vec_similarity(
self.segmentation, accumulator, with_std=True)[0]
self.assertNotEqual(0.0, mean)
self.assertNotEqual(0.0, std)
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
|
import asyncio
from datetime import timedelta
import logging
from canary.api import Api
from requests import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.components.camera.const import DOMAIN as CAMERA_DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
CONF_FFMPEG_ARGUMENTS,
DATA_COORDINATOR,
DATA_UNDO_UPDATE_LISTENER,
DEFAULT_FFMPEG_ARGUMENTS,
DEFAULT_TIMEOUT,
DOMAIN,
)
from .coordinator import CanaryDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["alarm_control_panel", "camera", "sensor"]
async def async_setup(hass: HomeAssistantType, config: dict) -> bool:
"""Set up the Canary integration."""
hass.data.setdefault(DOMAIN, {})
if hass.config_entries.async_entries(DOMAIN):
return True
ffmpeg_arguments = DEFAULT_FFMPEG_ARGUMENTS
if CAMERA_DOMAIN in config:
camera_config = next(
(item for item in config[CAMERA_DOMAIN] if item["platform"] == DOMAIN),
None,
)
if camera_config:
ffmpeg_arguments = camera_config.get(
CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS
)
if DOMAIN in config:
if ffmpeg_arguments != DEFAULT_FFMPEG_ARGUMENTS:
config[DOMAIN][CONF_FFMPEG_ARGUMENTS] = ffmpeg_arguments
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config[DOMAIN],
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up Canary from a config entry."""
if not entry.options:
options = {
CONF_FFMPEG_ARGUMENTS: entry.data.get(
CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS
),
CONF_TIMEOUT: entry.data.get(CONF_TIMEOUT, DEFAULT_TIMEOUT),
}
hass.config_entries.async_update_entry(entry, options=options)
try:
canary_api = await hass.async_add_executor_job(_get_canary_api_instance, entry)
except (ConnectTimeout, HTTPError) as error:
_LOGGER.error("Unable to connect to Canary service: %s", str(error))
raise ConfigEntryNotReady from error
coordinator = CanaryDataUpdateCoordinator(hass, api=canary_api)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
undo_listener = entry.add_update_listener(_async_update_listener)
hass.data[DOMAIN][entry.entry_id] = {
DATA_COORDINATOR: coordinator,
DATA_UNDO_UPDATE_LISTENER: undo_listener,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][entry.entry_id][DATA_UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def _async_update_listener(hass: HomeAssistantType, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
def _get_canary_api_instance(entry: ConfigEntry) -> Api:
"""Initialize a new instance of CanaryApi."""
canary = Api(
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
entry.options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT),
)
return canary
|
import argparse
import matplotlib.pyplot as plt
import chainer
from chainercv.datasets import voc_bbox_label_names
from chainercv.links import SSD300
from chainercv.links import SSD512
from chainercv import utils
from chainercv.visualizations import vis_bbox
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model', choices=('ssd300', 'ssd512'), default='ssd300')
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--pretrained-model')
parser.add_argument(
'--dataset', choices=('voc',), default='voc')
parser.add_argument('image')
args = parser.parse_args()
if args.model == 'ssd300':
cls = SSD300
elif args.model == 'ssd512':
cls = SSD512
if args.dataset == 'voc':
if args.pretrained_model is None:
args.pretrained_model = 'voc0712'
label_names = voc_bbox_label_names
model = cls(n_fg_class=len(label_names),
pretrained_model=args.pretrained_model)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
img = utils.read_image(args.image, color=True)
bboxes, labels, scores = model.predict([img])
bbox, label, score = bboxes[0], labels[0], scores[0]
vis_bbox(
img, bbox, label, score, label_names=label_names)
plt.show()
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import nets_factory
slim = tf.contrib.slim
class NetworksTest(tf.test.TestCase):
def testGetNetworkFn(self):
batch_size = 5
num_classes = 1000
for net in nets_factory.networks_map:
with self.test_session():
net_fn = nets_factory.get_network_fn(net, num_classes)
# Most networks use 224 as their default_image_size
image_size = getattr(net_fn, 'default_image_size', 224)
inputs = tf.random_uniform((batch_size, image_size, image_size, 3))
logits, end_points = net_fn(inputs)
self.assertTrue(isinstance(logits, tf.Tensor))
self.assertTrue(isinstance(end_points, dict))
self.assertEqual(logits.get_shape().as_list()[0], batch_size)
self.assertEqual(logits.get_shape().as_list()[-1], num_classes)
def testGetNetworkFnArgScope(self):
batch_size = 5
num_classes = 10
net = 'cifarnet'
with self.test_session(use_gpu=True):
net_fn = nets_factory.get_network_fn(net, num_classes)
image_size = getattr(net_fn, 'default_image_size', 224)
with slim.arg_scope([slim.model_variable, slim.variable],
device='/CPU:0'):
inputs = tf.random_uniform((batch_size, image_size, image_size, 3))
net_fn(inputs)
weights = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, 'CifarNet/conv1')[0]
self.assertDeviceEqual('/CPU:0', weights.device)
if __name__ == '__main__':
tf.test.main()
|
from homeassistant.components.fan import (
ATTR_DIRECTION,
ATTR_OSCILLATING,
ATTR_SPEED,
DOMAIN,
SERVICE_OSCILLATE,
SERVICE_SET_DIRECTION,
SERVICE_SET_SPEED,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ENTITY_MATCH_ALL,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL, speed: str = None) -> None:
"""Turn all or specified fan on."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_SPEED, speed)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL) -> None:
"""Turn all or specified fan off."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
async def async_oscillate(
hass, entity_id=ENTITY_MATCH_ALL, should_oscillate: bool = True
) -> None:
"""Set oscillation on all or specified fan."""
data = {
key: value
for key, value in [
(ATTR_ENTITY_ID, entity_id),
(ATTR_OSCILLATING, should_oscillate),
]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_OSCILLATE, data, blocking=True)
async def async_set_speed(hass, entity_id=ENTITY_MATCH_ALL, speed: str = None) -> None:
"""Set speed for all or specified fan."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_SPEED, speed)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_SET_SPEED, data, blocking=True)
async def async_set_direction(
hass, entity_id=ENTITY_MATCH_ALL, direction: str = None
) -> None:
"""Set direction for all or specified fan."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_DIRECTION, direction)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_SET_DIRECTION, data, blocking=True)
|
import os
from contextlib import contextmanager
from yapsy.PluginManager import PluginManager
import nikola.utils
import nikola.shortcodes
from nikola.plugin_categories import (
Command,
Task,
LateTask,
TemplateSystem,
PageCompiler,
TaskMultiplier,
CompilerExtension,
MarkdownExtension,
RestExtension,
)
__all__ = ["cd", "FakeSite"]
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
class FakeSite:
def __init__(self):
self.template_system = self
self.invariant = False
self.debug = True
self.config = {
"DISABLED_PLUGINS": [],
"EXTRA_PLUGINS": [],
"DEFAULT_LANG": "en",
"MARKDOWN_EXTENSIONS": [
"markdown.extensions.fenced_code",
"markdown.extensions.codehilite",
],
"TRANSLATIONS_PATTERN": "{path}.{lang}.{ext}",
"LISTINGS_FOLDERS": {"listings": "listings"},
"TRANSLATIONS": {"en": ""},
}
self.EXTRA_PLUGINS = self.config["EXTRA_PLUGINS"]
self.plugin_manager = PluginManager(
categories_filter={
"Command": Command,
"Task": Task,
"LateTask": LateTask,
"TemplateSystem": TemplateSystem,
"PageCompiler": PageCompiler,
"TaskMultiplier": TaskMultiplier,
"CompilerExtension": CompilerExtension,
"MarkdownExtension": MarkdownExtension,
"RestExtension": RestExtension,
}
)
self.shortcode_registry = {}
self.plugin_manager.setPluginInfoExtension("plugin")
places = [os.path.join(os.path.dirname(nikola.utils.__file__), "plugins")]
self.plugin_manager.setPluginPlaces(places)
self.plugin_manager.collectPlugins()
self.compiler_extensions = self._activate_plugins_of_category(
"CompilerExtension"
)
self.timeline = [FakePost(title="Fake post", slug="fake-post")]
self.rst_transforms = []
self.post_per_input_file = {}
# This is to make plugin initialization happy
self.template_system = self
self.name = "mako"
def _activate_plugins_of_category(self, category):
"""Activate all the plugins of a given category and return them."""
# this code duplicated in nikola/nikola.py
plugins = []
for plugin_info in self.plugin_manager.getPluginsOfCategory(category):
if plugin_info.name in self.config.get("DISABLED_PLUGINS"):
self.plugin_manager.removePluginFromCategory(plugin_info, category)
else:
self.plugin_manager.activatePluginByName(plugin_info.name)
plugin_info.plugin_object.set_site(self)
plugins.append(plugin_info)
return plugins
def render_template(self, name, _, context):
return '<img src="IMG.jpg">'
# this code duplicated in nikola/nikola.py
def register_shortcode(self, name, f):
"""Register function f to handle shortcode "name"."""
if name in self.shortcode_registry:
nikola.utils.LOGGER.warning('Shortcode name conflict: %s', name)
return
self.shortcode_registry[name] = f
def apply_shortcodes(self, data, *a, **kw):
"""Apply shortcodes from the registry on data."""
return nikola.shortcodes.apply_shortcodes(data, self.shortcode_registry, **kw)
def apply_shortcodes_uuid(self, data, shortcodes, *a, **kw):
"""Apply shortcodes from the registry on data."""
return nikola.shortcodes.apply_shortcodes(data, self.shortcode_registry, **kw)
class FakePost:
def __init__(self, title, slug):
self._title = title
self._slug = slug
self._meta = {"slug": slug}
def title(self):
return self._title
def meta(self, key):
return self._meta[key]
def permalink(self):
return "/posts/" + self._slug
|