content
stringlengths 5
1.05M
|
---|
"""Tools for multi-streams"""
from itertools import product
from typing import Mapping, Iterable, Any, Optional, Callable
import heapq
from dataclasses import dataclass
from functools import partial
from operator import itemgetter
from creek.util import Pipe, identity_func
StreamsMap = Mapping[Any, Iterable] # a map of {name: stream} pairs
@dataclass
class MergedStreams:
"""Creates an iterable of (stream_id, stream_item) pairs from a stream Mapping,
that is, {stream_id: stream, ...}.
The stream_item will be yield in sorted order.
Sort behavior can be modified by the ``sort_key`` argument which behaves like ``key``
arguments of built-in like ``sorted``, ``heapq.merge``, ``itertools.groupby``, etc.
If given, the `sort_key` function applies to ``stream_item`` (not to ``stream_id``).
Important: To function as expected, the streams should be already sorted (according
to the ``sort_key`` order).
The cannonical use case of this function is to "flatten", or "weave together"
multiple streams of timestamped data. We're given several streams that provide
``(timestamp, data)`` items (where timestamps arrive in order within each stream)
and we get a single stream of ``(stream_id, (timestamp, data))`` items where
the ``timestamp``s are yield in sorted order.
The following example uses a dict pointing to a fixed-size list as the ``stream_map``
but in general the ``stream_map`` will be a ``Mapping`` (not necessarily a dict)
whose values are potentially bound-less streams.
>>> streams_map = {
... 'hello': [(2, 'two'), (3, 'three'), (5, 'five')],
... 'world': [(0, 'zero'), (1, 'one'), (3, 'three'), (6, 'six')]
... }
>>> streams_items = MergedStreams(streams_map)
>>> it = iter(streams_items)
>>> list(it) # doctest: +NORMALIZE_WHITESPACE
[('world', (0, 'zero')),
('world', (1, 'one')),
('hello', (2, 'two')),
('hello', (3, 'three')),
('world', (3, 'three')),
('hello', (5, 'five')),
('world', (6, 'six'))]
"""
streams_map: StreamsMap
sort_key: Optional[Callable] = None
def __post_init__(self):
if self.sort_key is None:
self.effective_sort_key = itemgetter(1)
else:
self.effective_sort_key = Pipe(itemgetter(1), self.sort_key)
def __iter__(self):
for item in heapq.merge(
*multi_stream_items(self.streams_map), key=self.effective_sort_key
):
yield item
def multi_stream_items(streams_map: StreamsMap):
"""Provides a iterable of (k1, v1_1), (k1, v1_2), ...
>>> streams_map = {'hello': 'abc', 'world': [1, 2]}
>>> hello_items, world_items = multi_stream_items(streams_map)
>>> list(hello_items)
[('hello', 'a'), ('hello', 'b'), ('hello', 'c')]
>>> list(world_items)
[('world', 1), ('world', 2)]
"""
for stream_id, stream in streams_map.items():
yield product([stream_id], stream)
def transform_methods(cls, method_trans=staticmethod):
"""Applies method_trans to all the methods of `cls`
>>> from functools import partial
>>> staticmethods = partial(transform_methods, method_trans=staticmethod)
Now staticmethods is a class decorator that can be used to make all methods
be defined as staticmethods in bulk
>>> @staticmethods
... class C:
... foo = lambda x: x + 1
... bar = lambda y: y * 2
>>> c = C()
>>> c.foo(6)
7
>>> c.bar(6)
12
"""
for attr_name in vars(cls):
attr = getattr(cls, attr_name)
if callable(attr):
setattr(cls, attr_name, method_trans(attr))
return cls
staticmethods = partial(transform_methods, method_trans=staticmethod)
@staticmethods
class SortKeys:
all_but_last = itemgetter(-1)
second_item = itemgetter(1)
|
# coding: utf-8
"""
蓝鲸用户管理 API
蓝鲸用户管理后台服务 API # noqa: E501
OpenAPI spec version: v2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class CreateCategory(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'int',
'configured': 'bool',
'syncing': 'bool',
'unfilled_namespaces': 'list[str]',
'display_name': 'str',
'domain': 'str',
'create_time': 'datetime',
'update_time': 'datetime',
'type': 'str',
'description': 'str',
'default': 'bool',
'enabled': 'bool',
'status': 'str',
'order': 'int',
'last_synced_time': 'datetime'
}
attribute_map = {
'id': 'id',
'configured': 'configured',
'syncing': 'syncing',
'unfilled_namespaces': 'unfilled_namespaces',
'display_name': 'display_name',
'domain': 'domain',
'create_time': 'create_time',
'update_time': 'update_time',
'type': 'type',
'description': 'description',
'default': 'default',
'enabled': 'enabled',
'status': 'status',
'order': 'order',
'last_synced_time': 'last_synced_time'
}
def __init__(self, id=None, configured=None, syncing=None, unfilled_namespaces=None, display_name=None, domain=None, create_time=None, update_time=None, type=None, description=None, default=None, enabled=None, status=None, order=None, last_synced_time=None): # noqa: E501
"""CreateCategory - a model defined in Swagger""" # noqa: E501
self._id = None
self._configured = None
self._syncing = None
self._unfilled_namespaces = None
self._display_name = None
self._domain = None
self._create_time = None
self._update_time = None
self._type = None
self._description = None
self._default = None
self._enabled = None
self._status = None
self._order = None
self._last_synced_time = None
self.discriminator = None
if id is not None:
self.id = id
if configured is not None:
self.configured = configured
if syncing is not None:
self.syncing = syncing
if unfilled_namespaces is not None:
self.unfilled_namespaces = unfilled_namespaces
self.display_name = display_name
self.domain = domain
if create_time is not None:
self.create_time = create_time
if update_time is not None:
self.update_time = update_time
self.type = type
if description is not None:
self.description = description
if default is not None:
self.default = default
if enabled is not None:
self.enabled = enabled
if status is not None:
self.status = status
if order is not None:
self.order = order
if last_synced_time is not None:
self.last_synced_time = last_synced_time
@property
def id(self):
"""Gets the id of this CreateCategory. # noqa: E501
:return: The id of this CreateCategory. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this CreateCategory.
:param id: The id of this CreateCategory. # noqa: E501
:type: int
"""
self._id = id
@property
def configured(self):
"""Gets the configured of this CreateCategory. # noqa: E501
:return: The configured of this CreateCategory. # noqa: E501
:rtype: bool
"""
return self._configured
@configured.setter
def configured(self, configured):
"""Sets the configured of this CreateCategory.
:param configured: The configured of this CreateCategory. # noqa: E501
:type: bool
"""
self._configured = configured
@property
def syncing(self):
"""Gets the syncing of this CreateCategory. # noqa: E501
:return: The syncing of this CreateCategory. # noqa: E501
:rtype: bool
"""
return self._syncing
@syncing.setter
def syncing(self, syncing):
"""Sets the syncing of this CreateCategory.
:param syncing: The syncing of this CreateCategory. # noqa: E501
:type: bool
"""
self._syncing = syncing
@property
def unfilled_namespaces(self):
"""Gets the unfilled_namespaces of this CreateCategory. # noqa: E501
:return: The unfilled_namespaces of this CreateCategory. # noqa: E501
:rtype: list[str]
"""
return self._unfilled_namespaces
@unfilled_namespaces.setter
def unfilled_namespaces(self, unfilled_namespaces):
"""Sets the unfilled_namespaces of this CreateCategory.
:param unfilled_namespaces: The unfilled_namespaces of this CreateCategory. # noqa: E501
:type: list[str]
"""
self._unfilled_namespaces = unfilled_namespaces
@property
def display_name(self):
"""Gets the display_name of this CreateCategory. # noqa: E501
:return: The display_name of this CreateCategory. # noqa: E501
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""Sets the display_name of this CreateCategory.
:param display_name: The display_name of this CreateCategory. # noqa: E501
:type: str
"""
if display_name is None:
raise ValueError("Invalid value for `display_name`, must not be `None`") # noqa: E501
self._display_name = display_name
@property
def domain(self):
"""Gets the domain of this CreateCategory. # noqa: E501
:return: The domain of this CreateCategory. # noqa: E501
:rtype: str
"""
return self._domain
@domain.setter
def domain(self, domain):
"""Sets the domain of this CreateCategory.
:param domain: The domain of this CreateCategory. # noqa: E501
:type: str
"""
if domain is None:
raise ValueError("Invalid value for `domain`, must not be `None`") # noqa: E501
self._domain = domain
@property
def create_time(self):
"""Gets the create_time of this CreateCategory. # noqa: E501
:return: The create_time of this CreateCategory. # noqa: E501
:rtype: datetime
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""Sets the create_time of this CreateCategory.
:param create_time: The create_time of this CreateCategory. # noqa: E501
:type: datetime
"""
self._create_time = create_time
@property
def update_time(self):
"""Gets the update_time of this CreateCategory. # noqa: E501
:return: The update_time of this CreateCategory. # noqa: E501
:rtype: datetime
"""
return self._update_time
@update_time.setter
def update_time(self, update_time):
"""Sets the update_time of this CreateCategory.
:param update_time: The update_time of this CreateCategory. # noqa: E501
:type: datetime
"""
self._update_time = update_time
@property
def type(self):
"""Gets the type of this CreateCategory. # noqa: E501
:return: The type of this CreateCategory. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CreateCategory.
:param type: The type of this CreateCategory. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = ["local", "mad", "ldap", "tof", "custom", "pluggable"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def description(self):
"""Gets the description of this CreateCategory. # noqa: E501
:return: The description of this CreateCategory. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this CreateCategory.
:param description: The description of this CreateCategory. # noqa: E501
:type: str
"""
self._description = description
@property
def default(self):
"""Gets the default of this CreateCategory. # noqa: E501
:return: The default of this CreateCategory. # noqa: E501
:rtype: bool
"""
return self._default
@default.setter
def default(self, default):
"""Sets the default of this CreateCategory.
:param default: The default of this CreateCategory. # noqa: E501
:type: bool
"""
self._default = default
@property
def enabled(self):
"""Gets the enabled of this CreateCategory. # noqa: E501
:return: The enabled of this CreateCategory. # noqa: E501
:rtype: bool
"""
return self._enabled
@enabled.setter
def enabled(self, enabled):
"""Sets the enabled of this CreateCategory.
:param enabled: The enabled of this CreateCategory. # noqa: E501
:type: bool
"""
self._enabled = enabled
@property
def status(self):
"""Gets the status of this CreateCategory. # noqa: E501
:return: The status of this CreateCategory. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this CreateCategory.
:param status: The status of this CreateCategory. # noqa: E501
:type: str
"""
allowed_values = ["normal", "inactive"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
@property
def order(self):
"""Gets the order of this CreateCategory. # noqa: E501
:return: The order of this CreateCategory. # noqa: E501
:rtype: int
"""
return self._order
@order.setter
def order(self, order):
"""Sets the order of this CreateCategory.
:param order: The order of this CreateCategory. # noqa: E501
:type: int
"""
self._order = order
@property
def last_synced_time(self):
"""Gets the last_synced_time of this CreateCategory. # noqa: E501
:return: The last_synced_time of this CreateCategory. # noqa: E501
:rtype: datetime
"""
return self._last_synced_time
@last_synced_time.setter
def last_synced_time(self, last_synced_time):
"""Sets the last_synced_time of this CreateCategory.
:param last_synced_time: The last_synced_time of this CreateCategory. # noqa: E501
:type: datetime
"""
self._last_synced_time = last_synced_time
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CreateCategory, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateCategory):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
#!/usr/bin/env python
# encoding: utf-8
"""
dataloader.py
Created by Shuailong on 2016-10-12.
Dataset for Blame Analysis project.
"""
import os
import csv
import re
import datetime
import string
#fix relative import statement to absolute import statement
from blamepipeline import DATA_DIR
#sets folder under data
BLAME_DATA = os.path.join(DATA_DIR, 'Jan2013-2017')
#sets subfolders, be sure to include any subsub folders to access data
FOX_PATH = os.path.join(BLAME_DATA, 'Hannity (opinion)/datasets')
# Article structure for USA today, New York Times, and Wall Street Journal <- comment by author
#Patterns for USA and NYT are doc file formats from NexisUni (formerly LexisNexis)
#I changed the USA Today structure to matche Fox news structure
FOX_PATTERN = r'Fox News Network\s*(?P<title>\w+ \d+, \d+ \w+).*'\
r'\s*(?P<subtitle>(?!^BYLINE.*$)^[^\n]*$\)'\
r'(\s|\u2022)Section:((\s|\u2022))((?P<section>[\w]+);((\s|\u2022)))?((\s|\u2022))\w+' \
r'\s*LENGTH: (?P<length>\d+ words)' \
r'\s*BYLINE: (?P<author>[^\n]*)?' \
r'\s*GUESTS: (?P<guests>[^\n]*))?' \
r'\s*(?P<content>.*)LOAD-DATE'\
# compile regex
PATTERNS = {'FOX': FOX_PATTERN}
REGEX = {source: re.compile(PATTERNS[source], re.DOTALL | re.MULTILINE) for source in PATTERNS}
# June 29, 2010 Tuesday
FOX_DATE_FORMAT = '%B %d, %Y %A'
class DBReader():
'''
A class to read from csv file.
Input: filename
Output: List[{Entry}]
Entry: {title, date, section, source, s_title, s_category, target, t_title,
t_category, claim}
'''
def __init__(self, filename, source):
print('Reading from {}...'.format(filename))
self.source = source
self.entries = []
with open(filename) as csv_file:
fieldnames = ['valid', 'title', 'date', 'source', 'target', 'claim']
reader = csv.DictReader(csv_file, fieldnames=fieldnames, dialect="excel")
next(reader)
for row in reader:
if row['valid'] == '0':
continue
r = {}
r['title'] = row['title'].strip().lstrip(', ')
r['date'] = row['date'].strip()
r['source'] = row['source'].strip()
r['target'] = row['target'].strip()
r['claim'] = row['claim'].strip().lstrip(string.punctuation).replace("''", '"')
if r not in self.entries:
self.entries.append(r)
def get_entries(self):
'''
Return all the entries in the dataset.
'''
return self.entries
class Articles():
'''
A list of news articles.
Input: filename
Output: List[{Article}]
Article: {date, title, subtitle, author, section, length, guests, content}
'''
def __init__(self, filename, source):
# read articles and match
self.source = source
self.articles = []
with open(filename) as article_file:
raw_articles = article_file.read().strip().split('\f')
for raw_article in raw_articles:
raw_article = raw_article.strip()
article = self.match(raw_article)
if article and (article['title'] or article['subtitle']):
if article not in self.articles:
self.articles.append(article)
def match(self, raw_article):
'''
Using regex to extract information
raw_article: str
return: {date, title, subtitle, author, section, length, guests, content}
'''
regex = REGEX[self.source]
search_res = regex.search(raw_article)
article = {}
if search_res:
date = search_res.group('date')
if date:
date = self.reformat_date(date)
title = search_res.group('title')
if title:
title = title.strip()
if title.endswith(';'):
title = title[:-1]
if self.source == 'FOX':
subtitle = '' # no such data in NYT database
else:
subtitle = search_res.group('subtitle')
if subtitle:
subtitle = subtitle.strip()
author = search_res.group('author')
if author:
author = author.strip()
section = search_res.group('section')
if section:
section = section.strip()
guests = search_res.group('guests')
if guests:
guests =guests.strip()
length = search_res.group('length')
content = search_res.group('content')
if content:
content = content.strip().replace("''", '"')
article = {'date': date, 'title': title, 'subtitle': subtitle,
'author': author, 'section': section, 'length': length, 'guests': guests,
'content': content}
else:
article = None
return article
def reformat_date(self, date):
'''
format date string to 'YYYYmmdd'
'''
if self.source == 'FOX':
old_format = FOX_DATE_FORMAT
elif self.source == 'WSJ':
old_format = WSJ_DATE_FORMAT
reformat = '%Y%m%d'
date = datetime.datetime.strptime(date, old_format).strftime(reformat)
return date
def get_articles(self):
'''
Return all the articles in the dataset.
'''
return self.articles
class Dataset():
'''A class interface to access datasets'''
def __init__(self, source):
dataset_dir = {'FOX': FOX_PATH}
if source not in dataset_dir:
raise ValueError(
'{} is not supported yet. Consider {} instead'.format(
source,
dataset_dir.keys()))
self.source = source
self.dirname = dataset_dir[source]
self.files = os.listdir(self.dirname)
self.files.sort()
def get_entries(self):
'''
Return entries of the dataset.
'''
filename = self.source + '_blame_relation_database.csv'
entries = DBReader(os.path.join(self.dirname, filename), self.source).get_entries()
return entries
def get_articles(self):
'''
Return articles of the dataset.
'''
article_data = []
for filename in self.files:
if filename.endswith('.txt'):
article_data += Articles(
os.path.join(self.dirname, filename),
self.source).get_articles()
return article_data
def main():
'''
Entry of the program.
'''
print('Dataset statistics:')
datasetnames = ['FOX']
for datasetname in datasetnames:
print('\n{}:'.format(datasetname))
dataset = Dataset(datasetname)
entries = dataset.get_entries()
articles = dataset.get_articles()
print('{} articles. {} entries.'.format(len(articles), len(entries)))
if __name__ == '__main__':
main()
|
import urllib.request
from dataclasses import dataclass, field
from pathlib import Path
from typing import Tuple
import numpy as np
import pandas as pd
@dataclass
class BaseDataset:
name: str = field(init=False)
num_samples: int = field(init=False)
num_features: int = field(init=False)
num_outlier: int = field(init=False)
num_duplicates: int = field(init=False)
pct_outlier: float = field(init=False)
_derived_datasets = []
def __init_subclass__(cls, **kwargs):
"""
class method that registers every derived subclass. This method is needed to load all datasets in a
DatasetColelction.
:param kwargs: passed
:return: None
"""
super().__init_subclass__(**kwargs)
cls._derived_datasets.append(cls)
@property
def path(self) -> Path:
from outlier_benchmark.config import DATA_HOME
cls_name = self.__class__.__name__
return DATA_HOME / cls_name / (cls_name + '.csv')
def __post_init__(self):
self.pct_outlier = round((self.num_outlier / self.num_samples) * 100, 2)
def load(self, download: bool = True) -> Tuple[np.ndarray, np.ndarray]:
"""
loads the data X and y, both numpy arrays. If not previously downloaded and ``download=True``, before loading
the dataset will be downloaded to your local machine.
:param download: bool, if download is allowed
:return: X and y, both numpy arrays.
"""
if not self.path.exists():
if download:
self._download()
else:
raise ValueError(
f'{self.name} data has not yet been downloaded to your machine and you passed download=False. '
f'Pass download=True to download and load data.')
df = pd.read_csv(self.path)
X = df.drop('outlier', axis=1).values
y = df['outlier'].values.astype(int)
return X, y
def _download(self):
from outlier_benchmark.config import DOWNLOAD_BASE_URL, DATA_HOME
download_from = self.path.relative_to(DATA_HOME)
url = DOWNLOAD_BASE_URL + '/' + download_from.as_posix()
print('download from', url, '...')
response = urllib.request.urlopen(url)
data = response.read() # a `bytes` object
text = data.decode('utf-8')
# make path
self.path.parent.mkdir(parents=True, exist_ok=True)
self.path.write_text(text, encoding='utf-8')
|
#!/usr/bin/env python3
# Copyright (c) 2019 The Zcash developers
# Copyright (c) 2020 The PIVX developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import SurgeTestFramework
from test_framework.util import *
from decimal import Decimal
# Test wallet change address behaviour
class WalletChangeAddressesTest(SurgeTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
saplingUpgrade = ['-nuparams=v5_shield:1']
self.extra_args = [saplingUpgrade, saplingUpgrade]
def run_test(self):
self.nodes[0].generate(110)
# Obtain some transparent funds
midAddr = self.nodes[0].getnewshieldaddress()
# Shield almost all the balance
txid = self.nodes[0].shieldsendmany(get_coinstake_address(self.nodes[0]), [{"address": midAddr, "amount": Decimal(2400)}])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
taddrSource = self.nodes[0].getnewaddress()
for _ in range(6):
recipients = [{"address": taddrSource, "amount": Decimal('3')}]
txid = self.nodes[0].shieldsendmany(midAddr, recipients, 1)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
def check_change_taddr_reuse(target, isTargetShielded):
recipients = [{"address": target, "amount": Decimal('1')}]
# Send funds to recipient address twice
txid1 = self.nodes[0].shieldsendmany(taddrSource, recipients, 1)
self.nodes[1].generate(1)
self.sync_all()
txid2 = self.nodes[0].shieldsendmany(taddrSource, recipients, 1)
self.nodes[1].generate(1)
self.sync_all()
# Verify that the two transactions used different change addresses
tx1 = self.nodes[0].getrawtransaction(txid1, 1)
tx2 = self.nodes[0].getrawtransaction(txid2, 1)
assert_true(len(tx1['vout']) >= 1) # at least one output
assert_true(len(tx2['vout']) >= 1)
for i in range(len(tx1['vout'])):
tx1OutAddrs = tx1['vout'][i]['scriptPubKey']['addresses']
tx2OutAddrs = tx2['vout'][i]['scriptPubKey']['addresses']
if tx1OutAddrs != [target]:
print('Source address: %s' % taddrSource)
print('TX1 change address: %s' % tx1OutAddrs[0])
print('TX2 change address: %s' % tx2OutAddrs[0])
assert(tx1OutAddrs != tx2OutAddrs)
taddr = self.nodes[0].getnewaddress()
saplingAddr = self.nodes[0].getnewshieldaddress()
print()
print('Checking shieldsendmany(taddr->Sapling)')
check_change_taddr_reuse(saplingAddr, True)
print()
print('Checking shieldsendmany(taddr->taddr)')
check_change_taddr_reuse(taddr, False)
if __name__ == '__main__':
WalletChangeAddressesTest().main()
|
from django.contrib.gis import admin
from .models import WorldBorder, Forest, Lake, Province, POI, River
admin.site.register(WorldBorder, admin.GeoModelAdmin)
admin.site.register(Forest, admin.GeoModelAdmin)
admin.site.register(Lake, admin.GeoModelAdmin)
admin.site.register(Province, admin.GeoModelAdmin)
admin.site.register(POI, admin.GeoModelAdmin)
admin.site.register(River, admin.GeoModelAdmin)
# Register your models here.
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
import unittest
import os
import json
class KmsTest(unittest.TestCase):
def test_describe_key_list(self):
cmd = """python ../../main.py kms describe-key-list """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_key(self):
cmd = """python ../../main.py kms create-key --key-cfg '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_key(self):
cmd = """python ../../main.py kms describe-key --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_key_description(self):
cmd = """python ../../main.py kms update-key-description --key-id 'xxx' --key-cfg '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_enable_key(self):
cmd = """python ../../main.py kms enable-key --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disable_key(self):
cmd = """python ../../main.py kms disable-key --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_schedule_key_deletion(self):
cmd = """python ../../main.py kms schedule-key-deletion --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_cancel_key_deletion(self):
cmd = """python ../../main.py kms cancel-key-deletion --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_key_rotation(self):
cmd = """python ../../main.py kms key-rotation --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_encrypt(self):
cmd = """python ../../main.py kms encrypt --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_decrypt(self):
cmd = """python ../../main.py kms decrypt --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_get_public_key(self):
cmd = """python ../../main.py kms get-public-key --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_sign(self):
cmd = """python ../../main.py kms sign --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_validate(self):
cmd = """python ../../main.py kms validate --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_generate_data_key(self):
cmd = """python ../../main.py kms generate-data-key --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_key_detail(self):
cmd = """python ../../main.py kms describe-key-detail --key-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_enable_key_version(self):
cmd = """python ../../main.py kms enable-key-version --key-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disable_key_version(self):
cmd = """python ../../main.py kms disable-key-version --key-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_schedule_key_version_deletion(self):
cmd = """python ../../main.py kms schedule-key-version-deletion --key-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_cancel_key_version_deletion(self):
cmd = """python ../../main.py kms cancel-key-version-deletion --key-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_secret_list(self):
cmd = """python ../../main.py kms describe-secret-list """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_secret(self):
cmd = """python ../../main.py kms create-secret --secret-cfg '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_import_secret(self):
cmd = """python ../../main.py kms import-secret """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_secret_version_list(self):
cmd = """python ../../main.py kms describe-secret-version-list --secret-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_secret(self):
cmd = """python ../../main.py kms update-secret --secret-id 'xxx' --secret-desc-cfg '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_enable_secret(self):
cmd = """python ../../main.py kms enable-secret --secret-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disable_secret(self):
cmd = """python ../../main.py kms disable-secret --secret-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_secret(self):
cmd = """python ../../main.py kms delete-secret --secret-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_secret_version(self):
cmd = """python ../../main.py kms create-secret-version --secret-id 'xxx' --secret-version-cfg '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_export_secret(self):
cmd = """python ../../main.py kms export-secret --secret-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_secret_version_info(self):
cmd = """python ../../main.py kms describe-secret-version-info --secret-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_secret_version(self):
cmd = """python ../../main.py kms update-secret-version --secret-id 'xxx' --version 'xxx' --secret-time-cfg '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_enable_secret_version(self):
cmd = """python ../../main.py kms enable-secret-version --secret-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disable_secret_version(self):
cmd = """python ../../main.py kms disable-secret-version --secret-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_secret_version(self):
cmd = """python ../../main.py kms delete-secret-version --secret-id 'xxx' --version 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
|
class Solution:
def lengthOfLastWord(self, s: str) -> int:
# Approach 2
# Check only the tail of the string
# Trim if empty space is present
# iterate until the next space
space = chr(32)
i = len(s)-1
count = 0
while i>=0 and s[i] == space:
i -= 1
while i>=0 and s[i] != space:
count += 1
i -= 1
return count
# Approach 1:
# Time complexity: O(n)
# Space complexity: O(1)
right = len(s)-1
while right >= 0:
if s[right] != chr(32):
break
right -= 1
alen = 0
while right >= 0 and s[right] != chr(32):
alen += 1
right -= 1
return alen
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Colony Framework
# Copyright (c) 2008-2020 Hive Solutions Lda.
#
# This file is part of Hive Colony Framework.
#
# Hive Colony Framework is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Colony Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Colony Framework. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <[email protected]>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2020 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import colony
CONSOLE_EXTENSION_NAME = "printing"
""" The console extension name """
class ConsolePrintingManager(colony.System):
"""
The console printing manager class, responsible
for the handling of the printing commands.
"""
def __init__(self, plugin):
colony.System.__init__(self, plugin)
self.commands_map = self.__generate_commands_map()
def get_console_extension_name(self):
return CONSOLE_EXTENSION_NAME
def get_commands_map(self):
return self.commands_map
def process_print_test(
self,
arguments,
arguments_map,
output_method,
console_context
):
printing_manager = self.plugin.system
printing_manager.print_test()
def process_print_test_image(
self,
arguments,
arguments_map,
output_method,
console_context
):
printing_manager = self.plugin.system
printing_manager.print_test_image()
def process_print_printing_language(
self,
arguments,
arguments_map,
output_method,
console_context,
file_path
):
# retrieves the provided file path value and reads it's contents
# then closes the file, these contents are the ones that are going
# to be used for the printing process of the file
file_path = arguments_map.get("file_path", None)
file = open(file_path, "r")
try: contents = file.read()
finally: file.close()
# retrieves the reference to the printing manager instance
# and runs the printing process for the provided contents
printing_manager = self.plugin.system
printing_manager.print_printing_language(contents)
def __generate_commands_map(self):
return {
"print_test" : {
"handler" : self.process_print_test,
"description" : "prints a test page"
},
"print_image" : {
"handler" : self.process_print_test_image,
"description" : "prints a test page with an image"
},
"print_language" : {
"handler" : self.process_print_test_image,
"description" : "prints the page described in the file of the given file path",
"arguments" : [
{
"name" : "file_path",
"description" : "path to the file name to be printed",
"values" : str,
"mandatory" : False
}
]
}
}
|
import numpy as np
import pandas as pd
def calculate_bounds(df, modes, maximum_bin_count, distance = "euclidean_distance", minimum_distance = 0.0, maximum_distance = np.inf):
bounds = {}
df = df.copy()
df = df[df[distance] >= minimum_distance]
df = df[df[distance] < maximum_distance]
for mode in modes:
values = df[distance].values
cdf = df["weight"].values
sorter = np.argsort(values)
values = values[sorter]
cdf = np.cumsum(cdf[sorter])
cdf = cdf / cdf[-1]
probabilities = np.linspace(0.0, 1.0, maximum_bin_count + 1)
quantiles = np.unique([
values[np.argmin(cdf <= p)] for p in probabilities
])
bounds[mode] = list(zip(np.arange(len(quantiles)), quantiles[:-1], quantiles[1:]))
return bounds, distance
def calculate_shares(df_trips, bounds):
bounds, distance = bounds
df_shares = []
if not distance in df_trips:
raise RuntimeError("Reference has been calculated with %s, but column is not present" % distance)
# Filter relevant mode
df_trips = df_trips[df_trips["mode"].isin(list(bounds.keys()))]
for mode in bounds.keys():
df_mode = df_trips[df_trips["mode"] == mode]
for index, lower, upper in bounds[mode]:
value = df_mode[df_mode["euclidean_distance"].between(lower, upper, inclusive = "left")]["weight"].sum()
value /= df_trips[df_trips["euclidean_distance"].between(lower, upper, inclusive = "left")]["weight"].sum()
df_shares.append({
"mode": mode,
"bin_index": index,
"lower_bound": lower,
"upper_bound": upper,
"share": value
})
return pd.DataFrame.from_records(df_shares)
def calculate_difference(df_reference, df_simulation, bounds):
df_reference = calculate_shares(df_reference, bounds)
df_simulation = df_simulation.copy()
df_simulation["weight"] = 1.0
df_simulation = calculate_shares(df_simulation, bounds)
df_difference = pd.merge(
df_reference.rename(columns = { "share": "reference_share" }),
df_simulation.rename(columns = { "share": "simulation_share" })[[
"simulation_share", "mode", "bin_index"
]],
on = ["mode", "bin_index"],
how = "left"
).fillna(0.0)
df_difference["difference"] = df_difference["simulation_share"] - df_difference["reference_share"]
return df_difference
if __name__ == "__main__":
import matplotlib.pyplot as plt
df_reference = pd.read_csv("data/egt_trips.csv", sep = ";")
bounds = calculate_bounds(
df = df_reference,
modes = ["car", "pt", "bike", "walk"],
maximum_bin_count = 20,
minimum_distance = 250.0,
maximum_distance = 40 * 1e3
)
df_simulation = pd.read_csv("data/test_output/eqasim_trips.csv", sep = ";")
df_difference = calculate_difference(df_reference, df_simulation, bounds)
print(df_difference)
df_mode = df_difference[df_difference["mode"] == "pt"]
midpoint = 0.5 * (df_mode["upper_bound"] - df_mode["lower_bound"])
plt.figure()
plt.plot(midpoint, df_mode["reference_share"], "--")
plt.plot(midpoint, df_mode["simulation_share"])
plt.show()
|
import cv2
import numpy as np
from math import exp as exp
# object label list
LABELS=['person', 'bicycle', 'car', 'motorbike', 'aeroplane', 'bus', 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'sofa', 'pottedplant', 'bed', 'diningtable', 'toilet', 'tvmonitor', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush']
# parameter list
params={'anchors': [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0, 198.0, 373.0, 326.0], 'axis': 1, 'coords': 4, 'classes': 80, 'end_axis': 3, 'num': 9, 'do_softmax': False, 'mask': [3, 4, 5]}
class YoloParams_v3:
# ------------------------------------------- extract layer parameters ------------------------------------------
def __init__(self, param, side):
self.num = 3 if 'num' not in param else int(param['num'])
self.coords = 4 if 'coords' not in param else int(param['coords'])
self.classes = 80 if 'classes' not in param else int(param['classes'])
self.side = side
self.anchors = [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0,
198.0, 373.0, 326.0] if 'anchors' not in param else param['anchors']
self.isYoloV3 = False
if param.get('mask'):
mask = param['mask']
self.num = len(mask)
maskedAnchors = []
for idx in mask:
maskedAnchors += [self.anchors[idx * 2], self.anchors[idx * 2 + 1]]
self.anchors = maskedAnchors
self.isYoloV3 = True # Weak way to determine but the only one.
class Yolo_v3:
@staticmethod
def entry_index(side, coord, classes, location, entry):
side_power_2 = side ** 2
n = location // side_power_2
loc = location % side_power_2
return int(side_power_2 * (n * (coord + classes + 1) + entry) + loc)
@staticmethod
def scale_bbox(x, y, h, w, class_id, confidence, h_scale, w_scale):
xmin = int((x - w / 2) * w_scale)
ymin = int((y - h / 2) * h_scale)
xmax = int(xmin + w * w_scale)
ymax = int(ymin + h * h_scale)
return dict(xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, class_id=class_id, confidence=confidence)
@staticmethod
def intersection_over_union(box_1, box_2):
width_of_overlap_area = min(box_1['xmax'], box_2['xmax']) - max(box_1['xmin'], box_2['xmin'])
height_of_overlap_area = min(box_1['ymax'], box_2['ymax']) - max(box_1['ymin'], box_2['ymin'])
if width_of_overlap_area < 0 or height_of_overlap_area < 0: area_of_overlap = 0
else: area_of_overlap = width_of_overlap_area * height_of_overlap_area
box_1_area = (box_1['ymax'] - box_1['ymin']) * (box_1['xmax'] - box_1['xmin'])
box_2_area = (box_2['ymax'] - box_2['ymin']) * (box_2['xmax'] - box_2['xmin'])
area_of_union = box_1_area + box_2_area - area_of_overlap
if area_of_union == 0: return 0
return area_of_overlap / area_of_union
@staticmethod
def scale_bbox(x, y, height, width, class_id, confidence, im_h, im_w, is_proportional):
if is_proportional:
scale = np.array([min(im_w/im_h, 1), min(im_h/im_w, 1)])
offset = 0.5*(np.ones(2) - scale)
x, y = (np.array([x, y]) - offset) / scale
width, height = np.array([width, height]) / scale
xmin = int((x - width / 2) * im_w)
ymin = int((y - height / 2) * im_h)
xmax = int(xmin + width * im_w)
ymax = int(ymin + height * im_h)
return dict(xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, class_id=class_id.item(), confidence=confidence.item())
@staticmethod
def parse_yolo_region(blob, resized_image_shape, frameinal_im_shape, params, threshold):
# ------------------------------------------ output parameters ------------------------------------------
_, _, out_blob_h, out_blob_w = blob.shape
assert out_blob_w == out_blob_h, "Invalid size of output blob. It sould be in NCHW layout and height should " \
"be equal to width. Current height = {}, current width = {}" \
"".format(out_blob_h, out_blob_w)
# ------------------------------------------ extract layer parameters -------------------------------------------
orig_im_h, orig_im_w = frameinal_im_shape
resized_image_h, resized_image_w = resized_image_shape
objects = list()
size_normalizer = (resized_image_w, resized_image_h) if params.isYoloV3 else (params.side, params.side)
bbox_size = params.coords + 1 + params.classes
# ------------------------------------------- analyze YOLO Region output -------------------------------------------
for row, col, n in np.ndindex(params.side, params.side, params.num):
# Getting raw values for each detection bounding box
bbox = blob[0, n*bbox_size:(n+1)*bbox_size, row, col]
x, y, width, height, object_probability = bbox[:5]
class_probabilities = bbox[5:]
if object_probability < threshold: continue
# Process raw value
x = (col + x) / params.side
y = (row + y) / params.side
# Value for exp is very big number in some cases so following construction is using here
try: width, height = exp(width), exp(height)
except OverflowError: continue
# Depends on topology we need to normalize sizes by feature maps (up to YOLOv3) or by input shape (YOLOv3)
width = width * params.anchors[2 * n] / size_normalizer[0]
height = height * params.anchors[2 * n + 1] / size_normalizer[1]
class_id = np.argmax(class_probabilities)
confidence = class_probabilities[class_id]*object_probability
if confidence < threshold: continue
objects.append(Yolo_v3.scale_bbox(x=x, y=y, height=height, width=width, class_id=class_id, confidence=confidence,
im_h=orig_im_h, im_w=orig_im_w, is_proportional=0.15))
return objects
def object_detection(frame, img_np, outputs):
COLORS = np.random.uniform(0, 255, size=(len(LABELS), 3))
prob_threshold, iou_threshold=0.5, 0.15
objects = []
masks=[[6, 7, 8], [3, 4, 5], [0, 1, 2]]
# output object's coordinates
for i, outBlob in enumerate(outputs):
outBlob = outBlob.reshape(outputs[i].shape)
params['mask']=masks[i]
layerParams = YoloParams_v3(params, outBlob.shape[2])
objects += Yolo_v3.parse_yolo_region(outBlob, img_np.shape[2:], \
frame.shape[:-1], layerParams, prob_threshold)
# measure the object's confidence
for i in range(len(objects)):
if objects[i]["confidence"] == 0: continue
for j in range(i + 1, len(objects)):
if Yolo_v3.intersection_over_union(objects[i], objects[j]) > iou_threshold:
objects[j]["confidence"] = 0
objects = [obj for obj in objects if obj['confidence'] >= prob_threshold]
endY, endX = frame.shape[:-1]
for obj in objects:
# clipping
if obj["xmax"] > endX or obj["ymax"] > endY or obj["xmin"] < 0 or obj["ymin"] < 0: continue
label=f'{LABELS[obj["class_id"]]};{obj["confidence"] * 100:.2f}%'
y = obj["ymin"] - 10 if obj["ymin"] - 10 > 10 else obj["ymin"] + 10
# measure text length of age information---------------
# size of text
size=(obj["xmax"]-obj["xmin"])/125
length=cv2.getTextSize(label,
cv2.FONT_HERSHEY_SIMPLEX,
size,
1)[1]
thickness_text=int(length/3.5)
# drw a rectangle surronding objects and text which shows what it is.
cv2.rectangle(frame, (obj["xmin"], obj["ymin"]), (obj["xmax"], obj["ymax"]), COLORS[obj["class_id"]], 2)
cv2.putText(frame, label, (obj["xmin"], y), cv2.FONT_HERSHEY_SIMPLEX, size, COLORS[obj["class_id"]], thickness_text)
return frame |
#
# PySNMP MIB module SLP1-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SLP1-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:07:31 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Integer32, Unsigned32, NotificationType, MibIdentifier, iso, TimeTicks, IpAddress, ModuleIdentity, enterprises, Bits, Counter64, ObjectIdentity, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Integer32", "Unsigned32", "NotificationType", "MibIdentifier", "iso", "TimeTicks", "IpAddress", "ModuleIdentity", "enterprises", "Bits", "Counter64", "ObjectIdentity", "Counter32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
slp1 = ModuleIdentity((1, 3, 6, 1, 4, 1, 244, 1, 9, 1))
if mibBuilder.loadTexts: slp1.setLastUpdated('200412091740Z')
if mibBuilder.loadTexts: slp1.setOrganization('Lantronix')
if mibBuilder.loadTexts: slp1.setContactInfo('Postal: Lantronix 15353 Barranca Parkway Irvine, CA 92618 Tel: (949) 453-7198 Email: [email protected]')
if mibBuilder.loadTexts: slp1.setDescription('This is the MIB module for the SecureLinx Power Manager.')
lantronix = MibIdentifier((1, 3, 6, 1, 4, 1, 244))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 244, 1))
slp = MibIdentifier((1, 3, 6, 1, 4, 1, 244, 1, 9))
slp1SystemGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 1))
slp1SystemVersion = MibScalar((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1SystemVersion.setStatus('current')
if mibBuilder.loadTexts: slp1SystemVersion.setDescription('The firmware version of the system.')
slp1SystemNICSerialNumber = MibScalar((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1SystemNICSerialNumber.setStatus('current')
if mibBuilder.loadTexts: slp1SystemNICSerialNumber.setDescription('The serial number of the network interface card in the system.')
slp1SystemLocation = MibScalar((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1SystemLocation.setStatus('current')
if mibBuilder.loadTexts: slp1SystemLocation.setDescription('The location of the system.')
slp1SystemTowerCount = MibScalar((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1SystemTowerCount.setStatus('current')
if mibBuilder.loadTexts: slp1SystemTowerCount.setDescription('The number of towers in the system.')
slp1SystemEnvMonCount = MibScalar((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1SystemEnvMonCount.setStatus('current')
if mibBuilder.loadTexts: slp1SystemEnvMonCount.setDescription('The number of environmental monitors in the system.')
slp1SystemTables = MibIdentifier((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2))
slp1TowerTable = MibTable((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1), )
if mibBuilder.loadTexts: slp1TowerTable.setStatus('current')
if mibBuilder.loadTexts: slp1TowerTable.setDescription('A table of towers.')
slp1TowerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1, 1), ).setIndexNames((0, "SLP1-MIB", "slp1TowerIndex"))
if mibBuilder.loadTexts: slp1TowerEntry.setStatus('current')
if mibBuilder.loadTexts: slp1TowerEntry.setDescription('Row definition for the tower table.')
slp1TowerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2)))
if mibBuilder.loadTexts: slp1TowerIndex.setStatus('current')
if mibBuilder.loadTexts: slp1TowerIndex.setDescription('Index for the tower table.')
slp1TowerID = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TowerID.setStatus('current')
if mibBuilder.loadTexts: slp1TowerID.setDescription('The ID of the tower.')
slp1TowerName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1TowerName.setStatus('current')
if mibBuilder.loadTexts: slp1TowerName.setDescription('The name of the tower.')
slp1TowerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("normal", 0), ("noComm", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TowerStatus.setStatus('current')
if mibBuilder.loadTexts: slp1TowerStatus.setDescription('The operational status of the tower.')
slp1TowerInfeedCount = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TowerInfeedCount.setStatus('current')
if mibBuilder.loadTexts: slp1TowerInfeedCount.setDescription('The number of input feeds on the tower.')
slp1InfeedTable = MibTable((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2), )
if mibBuilder.loadTexts: slp1InfeedTable.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedTable.setDescription('A table of input feeds.')
slp1InfeedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1), ).setIndexNames((0, "SLP1-MIB", "slp1TowerIndex"), (0, "SLP1-MIB", "slp1InfeedIndex"))
if mibBuilder.loadTexts: slp1InfeedEntry.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedEntry.setDescription('Row definition for the input feed table.')
slp1InfeedIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4)))
if mibBuilder.loadTexts: slp1InfeedIndex.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedIndex.setDescription('Index for the input feed table.')
slp1InfeedID = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1InfeedID.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedID.setDescription('The ID of the input feed.')
slp1InfeedName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1InfeedName.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedName.setDescription('The name of the input feed.')
slp1InfeedCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 4), Bits().clone(namedValues=NamedValues(("onSense", 0), ("loadSense", 1), ("powerControl", 2), ("failSafe", 3), ("defaultOff", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1InfeedCapabilities.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedCapabilities.setDescription('The capabilities of the input feed.')
slp1InfeedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("off", 0), ("on", 1), ("offWait", 2), ("onWait", 3), ("offError", 4), ("onError", 5), ("noComm", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1InfeedStatus.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedStatus.setDescription("The status of the input feed. If the slp1InfeedCapabilities 'onSense' bit is TRUE, then all of the states are supported and indicate the sensed state of the input feed. If the slp1InfeedCapabilities 'onSense' bit is FALSE, then only the 'on' and 'noComm' states are supported, and 'on' indicates a derived state, not a sensed state.")
slp1InfeedLoadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("normal", 0), ("notOn", 1), ("reading", 2), ("loadLow", 3), ("loadHigh", 4), ("overLoad", 5), ("readError", 6), ("noComm", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1InfeedLoadStatus.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedLoadStatus.setDescription("The status of the load measured on the input feed. If the slp1InfeedCapabilities 'loadSense' bit is TRUE, then all of the states are supported. If the slp1InfeedCapabilities 'loadSense' bit is FALSE, then only the 'normal' and 'noComm' states are supported.")
slp1InfeedLoadValue = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 25500))).setUnits('hundredth Amps').setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1InfeedLoadValue.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedLoadValue.setDescription('The load measured on the input feed. A non-negative value indicates the measured load in hundredths of Amps. A negative value indicates that a load value was not able to be measured.')
slp1InfeedLoadHighThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('Amps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1InfeedLoadHighThresh.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedLoadHighThresh.setDescription('The load high threshold value of the input feed in Amps.')
slp1InfeedOutletCount = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1InfeedOutletCount.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedOutletCount.setDescription('The number of outlets on the input feed.')
slp1OutletTable = MibTable((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3), )
if mibBuilder.loadTexts: slp1OutletTable.setStatus('current')
if mibBuilder.loadTexts: slp1OutletTable.setDescription('A table of outlets.')
slp1OutletEntry = MibTableRow((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1), ).setIndexNames((0, "SLP1-MIB", "slp1TowerIndex"), (0, "SLP1-MIB", "slp1InfeedIndex"), (0, "SLP1-MIB", "slp1OutletIndex"))
if mibBuilder.loadTexts: slp1OutletEntry.setStatus('current')
if mibBuilder.loadTexts: slp1OutletEntry.setDescription('Row definition for the outlet table.')
slp1OutletIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16)))
if mibBuilder.loadTexts: slp1OutletIndex.setStatus('current')
if mibBuilder.loadTexts: slp1OutletIndex.setDescription('Index for the outlet table.')
slp1OutletID = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1OutletID.setStatus('current')
if mibBuilder.loadTexts: slp1OutletID.setDescription('The ID of the outlet.')
slp1OutletName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1OutletName.setStatus('current')
if mibBuilder.loadTexts: slp1OutletName.setDescription('The name of the outlet.')
slp1OutletCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 4), Bits().clone(namedValues=NamedValues(("onSense", 0), ("loadSense", 1), ("powerControl", 2), ("shutdown", 3), ("defaultOn", 4), ("ownInfeed", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1OutletCapabilities.setStatus('current')
if mibBuilder.loadTexts: slp1OutletCapabilities.setDescription('The capabilities of the outlet.')
slp1OutletStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("off", 0), ("on", 1), ("offWait", 2), ("onWait", 3), ("offError", 4), ("onError", 5), ("noComm", 6), ("reading", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1OutletStatus.setStatus('current')
if mibBuilder.loadTexts: slp1OutletStatus.setDescription("The status of the outlet. If the slp1OutletCapabilities 'onSense' bit is TRUE, then all of the states are supported and indicate the sensed state of the outlet. If the slp1OutletCapabilities 'onSense' bit is FALSE, then only the 'off', 'on', and 'noComm' states are supported, and 'off' and 'on' indicate a derived state, not a sensed state.")
slp1OutletLoadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("normal", 0), ("notOn", 1), ("reading", 2), ("loadLow", 3), ("loadHigh", 4), ("overLoad", 5), ("readError", 6), ("noComm", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1OutletLoadStatus.setStatus('current')
if mibBuilder.loadTexts: slp1OutletLoadStatus.setDescription("The status of the load measured on the outlet. If the slp1OutletCapabilities 'loadSense' bit is TRUE, then all of the states are supported. If the slp1OutletCapabilities 'loadSense' bit is FALSE, then only the 'normal' and 'noComm' states are supported.")
slp1OutletLoadValue = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 25500))).setUnits('hundredth Amps').setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1OutletLoadValue.setStatus('current')
if mibBuilder.loadTexts: slp1OutletLoadValue.setDescription('The load measured on the outlet. A non-negative value indicates the measured load in hundredths of Amps. A negative value indicates that a load value was not able to be measured.')
slp1OutletLoadLowThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('Amps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1OutletLoadLowThresh.setStatus('current')
if mibBuilder.loadTexts: slp1OutletLoadLowThresh.setDescription('The load low threshold value of the outlet in Amps.')
slp1OutletLoadHighThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('Amps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1OutletLoadHighThresh.setStatus('current')
if mibBuilder.loadTexts: slp1OutletLoadHighThresh.setDescription('The load high threshold value of the outlet in Amps.')
slp1OutletControlState = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("idleOff", 0), ("idleOn", 1), ("wakeOff", 2), ("wakeOn", 3), ("off", 4), ("on", 5), ("lockedOff", 6), ("lockedOn", 7), ("reboot", 8), ("shutdown", 9), ("pendOn", 10), ("pendOff", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1OutletControlState.setStatus('current')
if mibBuilder.loadTexts: slp1OutletControlState.setDescription("The control state of the outlet. The supported states are dependent upon the slp1OutletCapabilities 'powerControl' and 'shutdown' bits, as well as the supported features of the firmware version.")
slp1OutletControlAction = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("on", 1), ("off", 2), ("reboot", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1OutletControlAction.setStatus('current')
if mibBuilder.loadTexts: slp1OutletControlAction.setDescription("An action to change the control state of the outlet. If the slp1OutletCapabilities 'powerControl' bit is TRUE, then the 'on', 'off', and 'reboot' actions can be used to change the control state of the outlet. If the slp1OutletCapabilities 'powerControl' bit is FALSE, then the actions have no effect.")
slp1EnvMonTable = MibTable((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4), )
if mibBuilder.loadTexts: slp1EnvMonTable.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonTable.setDescription('A table of environmental monitors.')
slp1EnvMonEntry = MibTableRow((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1), ).setIndexNames((0, "SLP1-MIB", "slp1EnvMonIndex"))
if mibBuilder.loadTexts: slp1EnvMonEntry.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonEntry.setDescription('Row definition for the environmental monitor table.')
slp1EnvMonIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4)))
if mibBuilder.loadTexts: slp1EnvMonIndex.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonIndex.setDescription('Index for the environmental monitor table.')
slp1EnvMonID = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonID.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonID.setDescription('The ID of the environmental monitor.')
slp1EnvMonName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1EnvMonName.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonName.setDescription('The name of the environmental monitor.')
slp1EnvMonStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("normal", 0), ("noComm", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonStatus.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonStatus.setDescription('The operational status of the environmental monitor.')
slp1EnvMonWaterSensorName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1EnvMonWaterSensorName.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonWaterSensorName.setDescription('The name of the water sensor.')
slp1EnvMonWaterSensorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("normal", 0), ("alarm", 1), ("noComm", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonWaterSensorStatus.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonWaterSensorStatus.setDescription('The status of the water sensor.')
slp1EnvMonADCName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1EnvMonADCName.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonADCName.setDescription('The name of the analog-to-digital converter.')
slp1EnvMonADCStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("normal", 0), ("reading", 1), ("countLow", 2), ("countHigh", 3), ("readError", 4), ("noComm", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonADCStatus.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonADCStatus.setDescription('The status of the analog-to-digital converter.')
slp1EnvMonADCCount = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonADCCount.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonADCCount.setDescription('The 8-bit count value from the analog-to-digital converter. A non-negative value indicates the digital value retrieved from the ADC. A negative value indicates that a digital value was not able to be retrieved.')
slp1EnvMonADCLowThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1EnvMonADCLowThresh.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonADCLowThresh.setDescription('The 8-bit count low threshold value of the analog-to-digital converter.')
slp1EnvMonADCHighThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1EnvMonADCHighThresh.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonADCHighThresh.setDescription('The 8-bit count high threshold value of the analog-to-digital converter.')
slp1EnvMonTempHumidSensorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonTempHumidSensorCount.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonTempHumidSensorCount.setDescription('The number of temperature/humidity sensors on the environmental monitor.')
slp1EnvMonContactClosureCount = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 4, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1EnvMonContactClosureCount.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonContactClosureCount.setDescription('The number of contact closures on the environmental monitor.')
slp1TempHumidSensorTable = MibTable((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5), )
if mibBuilder.loadTexts: slp1TempHumidSensorTable.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorTable.setDescription('A table of temperature/humidity sensors.')
slp1TempHumidSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1), ).setIndexNames((0, "SLP1-MIB", "slp1EnvMonIndex"), (0, "SLP1-MIB", "slp1TempHumidSensorIndex"))
if mibBuilder.loadTexts: slp1TempHumidSensorEntry.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorEntry.setDescription('Row definition for the temperature/humidity sensor table.')
slp1TempHumidSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2)))
if mibBuilder.loadTexts: slp1TempHumidSensorIndex.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorIndex.setDescription('Index for the temperature/humidity sensor table.')
slp1TempHumidSensorID = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TempHumidSensorID.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorID.setDescription('The ID of the temperature/humidity sensor.')
slp1TempHumidSensorName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1TempHumidSensorName.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorName.setDescription('The name of the temperature/humidity sensor.')
slp1TempHumidSensorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("found", 0), ("notFound", 1), ("lost", 2), ("noComm", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TempHumidSensorStatus.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorStatus.setDescription('The operational status of the temperature/humidity sensor.')
slp1TempHumidSensorTempStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("normal", 0), ("notFound", 1), ("reading", 2), ("tempLow", 3), ("tempHigh", 4), ("readError", 5), ("lost", 6), ("noComm", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TempHumidSensorTempStatus.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorTempStatus.setDescription('The status of the temperature sensor.')
slp1TempHumidSensorTempValue = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 1235))).setUnits('tenth degrees Celsius').setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TempHumidSensorTempValue.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorTempValue.setDescription('The temperature measured by the sensor. A non-negative value indicates the measured temperature in tenths of degrees Celsius. A negative value indicates that a temperature value was not able to be measured.')
slp1TempHumidSensorTempLowThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 123))).setUnits('degrees Celsius').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1TempHumidSensorTempLowThresh.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorTempLowThresh.setDescription('The temperature low threshold value of the sensor in degrees Celsius.')
slp1TempHumidSensorTempHighThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 123))).setUnits('degrees Celsius').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1TempHumidSensorTempHighThresh.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorTempHighThresh.setDescription('The temperature high threshold value of the sensor in degrees Celsius.')
slp1TempHumidSensorHumidStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("normal", 0), ("notFound", 1), ("reading", 2), ("humidLow", 3), ("humidHigh", 4), ("readError", 5), ("lost", 6), ("noComm", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TempHumidSensorHumidStatus.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorHumidStatus.setDescription('The status of the humidity sensor.')
slp1TempHumidSensorHumidValue = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 100))).setUnits('percentage relative humidity').setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1TempHumidSensorHumidValue.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorHumidValue.setDescription('The humidity measured by the sensor. A non-negative value indicates the measured humidity in percentage relative humidity. A negative value indicates that a humidity value was not able to be measured.')
slp1TempHumidSensorHumidLowThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setUnits('percentage relative humidity').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1TempHumidSensorHumidLowThresh.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorHumidLowThresh.setDescription('The humidity low threshold value of the sensor in percentage relative humidity.')
slp1TempHumidSensorHumidHighThresh = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 5, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setUnits('percentage relative humidity').setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1TempHumidSensorHumidHighThresh.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorHumidHighThresh.setDescription('The humidity low threshold value of the sensor in percentage relative humidity.')
slp1ContactClosureTable = MibTable((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 6), )
if mibBuilder.loadTexts: slp1ContactClosureTable.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureTable.setDescription('A table of contact closures.')
slp1ContactClosureEntry = MibTableRow((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 6, 1), ).setIndexNames((0, "SLP1-MIB", "slp1EnvMonIndex"), (0, "SLP1-MIB", "slp1ContactClosureIndex"))
if mibBuilder.loadTexts: slp1ContactClosureEntry.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureEntry.setDescription('Row definition for the contact closure table.')
slp1ContactClosureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4)))
if mibBuilder.loadTexts: slp1ContactClosureIndex.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureIndex.setDescription('Index for the contact closure table.')
slp1ContactClosureID = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 6, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1ContactClosureID.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureID.setDescription('The ID of the contact closure.')
slp1ContactClosureName = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 6, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slp1ContactClosureName.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureName.setDescription('The name of the contact closure.')
slp1ContactClosureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 2, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("normal", 0), ("alarm", 1), ("noComm", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slp1ContactClosureStatus.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureStatus.setDescription('The status of the contact closure.')
slp1Traps = MibIdentifier((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100))
slp1Events = MibIdentifier((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0))
slp1TowerStatusEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 1)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1TowerID"), ("SLP1-MIB", "slp1TowerName"), ("SLP1-MIB", "slp1TowerStatus"))
if mibBuilder.loadTexts: slp1TowerStatusEvent.setStatus('current')
if mibBuilder.loadTexts: slp1TowerStatusEvent.setDescription("Tower status event. If enabled, this trap is sent when the slp1TowerStatus indicates an error state ('noComm'). This trap is repeated periodically while the slp1TowerStatus remains in an error state. If the slp1TowerStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1TowerStatus, and then stops being repeated. While the slp1TowerStatus indicates an error state, all status and load traps are suppressed for input feeds and outlets on the tower.")
slp1InfeedStatusEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 2)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1InfeedID"), ("SLP1-MIB", "slp1InfeedName"), ("SLP1-MIB", "slp1InfeedStatus"))
if mibBuilder.loadTexts: slp1InfeedStatusEvent.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedStatusEvent.setDescription("Input feed status event. If enabled, this trap is sent when the slp1InfeedStatus indicates an error state ('offError', 'onError', or 'noComm'). This trap is repeated periodically while the slp1InfeedStatus remains in an error state. If the slp1InfeedStatus returns to a non-error state ('off' or 'on'), this trap is sent once more with the non-error slp1InfeedStatus, and then stops being repeated. While the slp1InfeedStatus indicates an error state, load traps are suppressed for the input feed, and, if the slp1InfeedCapabilities 'failSafe' bit is FALSE, all status and load traps are suppressed for outlets on the input feed.")
slp1InfeedLoadEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 3)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1InfeedID"), ("SLP1-MIB", "slp1InfeedName"), ("SLP1-MIB", "slp1InfeedLoadStatus"), ("SLP1-MIB", "slp1InfeedLoadValue"), ("SLP1-MIB", "slp1InfeedLoadHighThresh"))
if mibBuilder.loadTexts: slp1InfeedLoadEvent.setStatus('current')
if mibBuilder.loadTexts: slp1InfeedLoadEvent.setDescription("Input feed load event. If enabled, this trap is sent when the slp1InfeedLoadStatus indicates an error state ('loadLow', 'loadHigh', 'overLoad', 'readError', or 'noComm'). This trap is repeated periodically while the slp1InfeedLoadStatus remains in an error state. If the slp1InfeedLoadStatus returns to a non-error state ('normal' or 'notOn'), this trap is sent once more with the non-error slp1InfeedLoadStatus, and then stops being repeated.")
slp1OutletStatusEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 4)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1OutletID"), ("SLP1-MIB", "slp1OutletName"), ("SLP1-MIB", "slp1OutletStatus"))
if mibBuilder.loadTexts: slp1OutletStatusEvent.setStatus('current')
if mibBuilder.loadTexts: slp1OutletStatusEvent.setDescription("Outlet status event. If enabled, this trap is sent when the slp1OutletStatus indicates an error state ('offError', 'onError', or 'noComm'). This trap is repeated periodically while the slp1OutletStatus remains in an error state. If the slp1OutletStatus returns to a non-error state ('off' or 'on'), this trap is sent once more with the non-error slp1OutletStatus, and then stops being repeated. While the slp1OutletStatus indicates an error state, load traps are suppressed for the outlet.")
slp1OutletLoadEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 5)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1OutletID"), ("SLP1-MIB", "slp1OutletName"), ("SLP1-MIB", "slp1OutletLoadStatus"), ("SLP1-MIB", "slp1OutletLoadValue"), ("SLP1-MIB", "slp1OutletLoadLowThresh"), ("SLP1-MIB", "slp1OutletLoadHighThresh"))
if mibBuilder.loadTexts: slp1OutletLoadEvent.setStatus('current')
if mibBuilder.loadTexts: slp1OutletLoadEvent.setDescription("Outlet load event. If enabled, this trap is sent when the slp1OutletLoadStatus indicates an error state ('loadLow', 'loadHigh', 'overLoad', 'readError', or 'noComm'). This trap is repeated periodically while the slp1OutletLoadStatus remains in an error state. If the slp1OutletLoadStatus returns to a non-error state ('normal' or 'notOn'), this trap is sent once more with the non-error slp1OutletLoadStatus, and then stops being repeated.")
slp1OutletChangeEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 6)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1OutletID"), ("SLP1-MIB", "slp1OutletName"), ("SLP1-MIB", "slp1OutletStatus"), ("SLP1-MIB", "slp1OutletControlState"))
if mibBuilder.loadTexts: slp1OutletChangeEvent.setStatus('current')
if mibBuilder.loadTexts: slp1OutletChangeEvent.setDescription("Outlet on/off change event. If enabled, this trap is sent when the slp1OutletStatus changes from any 'on' state ('on', 'onWait' or 'onError') to any 'off' state ('off', 'offWait' or 'offError'), and vice-versa.")
slp1EnvMonStatusEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 7)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1EnvMonID"), ("SLP1-MIB", "slp1EnvMonName"), ("SLP1-MIB", "slp1EnvMonStatus"))
if mibBuilder.loadTexts: slp1EnvMonStatusEvent.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonStatusEvent.setDescription("Environmental monitor status event. If enabled, this trap is sent when the slp1EnvMonStatus indicates an error state ('noComm'). This trap is repeated periodically while the slp1EnvMonStatus remains in an error state. If the slp1EnvMonStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1EnvMonStatus, and then stops being repeated. While the slp1EnvMonStatus indicates an error state, all status traps are suppressed for the water sensor, ADC, temperature/humudity sensors, and contact closures on the environmental monitor.")
slp1EnvMonWaterSensorEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 8)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1EnvMonID"), ("SLP1-MIB", "slp1EnvMonWaterSensorName"), ("SLP1-MIB", "slp1EnvMonWaterSensorStatus"))
if mibBuilder.loadTexts: slp1EnvMonWaterSensorEvent.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonWaterSensorEvent.setDescription("Environmental monitor water sensor event. If enabled, this trap is sent when the slp1EnvMonWaterSensorStatus indicates an error state ('alarm'). This trap is repeated periodically while the slp1EnvMonWaterSensorStatus remains in an error state. If the slp1EnvMonWaterSensorStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1EnvMonWaterSensorStatus, and then stops being repeated.")
slp1EnvMonADCEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 9)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1EnvMonID"), ("SLP1-MIB", "slp1EnvMonADCName"), ("SLP1-MIB", "slp1EnvMonADCStatus"), ("SLP1-MIB", "slp1EnvMonADCCount"), ("SLP1-MIB", "slp1EnvMonADCLowThresh"), ("SLP1-MIB", "slp1EnvMonADCHighThresh"))
if mibBuilder.loadTexts: slp1EnvMonADCEvent.setStatus('current')
if mibBuilder.loadTexts: slp1EnvMonADCEvent.setDescription("Environmental monitor analog-to-digital converter event. If enabled, this trap is sent when the slp1EnvMonADCStatus indicates an error state ('countLow' or 'countHigh'). This trap is repeated periodically while the slp1EnvMonADCStatus remains in an error state. If the slp1EnvMonADCStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1EnvMonADCStatus, and then stops being repeated.")
slp1TempHumidSensorStatusEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 10)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1TempHumidSensorID"), ("SLP1-MIB", "slp1TempHumidSensorName"), ("SLP1-MIB", "slp1TempHumidSensorStatus"))
if mibBuilder.loadTexts: slp1TempHumidSensorStatusEvent.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorStatusEvent.setDescription("Temperature/humidity sensor status event. If enabled, this trap is sent when the slp1TempHumidSensorStatus indicates an error state ('lost'). This trap is repeated periodically while the slp1TempHumidSensorStatus remains in an error state. If the slp1TempHumidSensorStatus returns to a non-error state ('found'), this trap is sent once more with the non-error slp1TempHumidSensorStatus, and then stops being repeated. While the slp1TempHumidSensorStatus indicates an error state, all temperature and humidity status traps are suppressed for the temperature/humudity sensor.")
slp1TempHumidSensorTempEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 11)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1TempHumidSensorID"), ("SLP1-MIB", "slp1TempHumidSensorName"), ("SLP1-MIB", "slp1TempHumidSensorTempStatus"), ("SLP1-MIB", "slp1TempHumidSensorTempValue"), ("SLP1-MIB", "slp1TempHumidSensorTempLowThresh"), ("SLP1-MIB", "slp1TempHumidSensorTempHighThresh"))
if mibBuilder.loadTexts: slp1TempHumidSensorTempEvent.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorTempEvent.setDescription("Temperature/humidity sensor temperature event. If enabled, this trap is sent when the slp1TempHumidSensorTempStatus indicates an error state ('tempLow' or 'tempHigh'). This trap is repeated periodically while the slp1TempHumidSensorTempStatus remains in an error state. If the slp1TempHumidSensorTempStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1TempHumidSensorTempStatus, and then stops being repeated.")
slp1TempHumidSensorHumidEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 12)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1TempHumidSensorID"), ("SLP1-MIB", "slp1TempHumidSensorName"), ("SLP1-MIB", "slp1TempHumidSensorHumidStatus"), ("SLP1-MIB", "slp1TempHumidSensorHumidValue"), ("SLP1-MIB", "slp1TempHumidSensorHumidLowThresh"), ("SLP1-MIB", "slp1TempHumidSensorHumidHighThresh"))
if mibBuilder.loadTexts: slp1TempHumidSensorHumidEvent.setStatus('current')
if mibBuilder.loadTexts: slp1TempHumidSensorHumidEvent.setDescription("Temperature/humidity sensor humidity event. If enabled, this trap is sent when the slp1TempHumidSensorHumidStatus indicates an error state ('humidLow' or 'humidHigh'). This trap is repeated periodically while the slp1TempHumidSensorHumidStatus remains in an error state. If the slp1TempHumidSensorHumidStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1TempHumidSensorHumidStatus, and then stops being repeated.")
slp1ContactClosureEvent = NotificationType((1, 3, 6, 1, 4, 1, 244, 1, 9, 1, 100, 0, 13)).setObjects(("SLP1-MIB", "slp1SystemLocation"), ("SLP1-MIB", "slp1ContactClosureID"), ("SLP1-MIB", "slp1ContactClosureName"), ("SLP1-MIB", "slp1ContactClosureStatus"))
if mibBuilder.loadTexts: slp1ContactClosureEvent.setStatus('current')
if mibBuilder.loadTexts: slp1ContactClosureEvent.setDescription("Contact closure event. If enabled, this trap is sent when the slp1ContactClosureStatus indicates an error state ('alarm'). This trap is repeated periodically while the slp1ContactClosureStatus remains in an error state. If the slp1ContactClosureStatus returns to a non-error state ('normal'), this trap is sent once more with the non-error slp1ContactClosureStatus, and then stops being repeated.")
mibBuilder.exportSymbols("SLP1-MIB", slp1InfeedEntry=slp1InfeedEntry, slp1InfeedLoadStatus=slp1InfeedLoadStatus, slp1SystemVersion=slp1SystemVersion, slp1InfeedStatus=slp1InfeedStatus, slp1ContactClosureName=slp1ContactClosureName, slp1SystemTowerCount=slp1SystemTowerCount, slp1OutletStatusEvent=slp1OutletStatusEvent, slp1OutletLoadStatus=slp1OutletLoadStatus, lantronix=lantronix, slp1InfeedLoadEvent=slp1InfeedLoadEvent, slp1OutletLoadEvent=slp1OutletLoadEvent, slp1TempHumidSensorTempStatus=slp1TempHumidSensorTempStatus, slp1TempHumidSensorTable=slp1TempHumidSensorTable, slp1TowerID=slp1TowerID, slp1TempHumidSensorHumidStatus=slp1TempHumidSensorHumidStatus, slp1ContactClosureStatus=slp1ContactClosureStatus, slp1TempHumidSensorID=slp1TempHumidSensorID, slp1TempHumidSensorTempLowThresh=slp1TempHumidSensorTempLowThresh, slp1OutletEntry=slp1OutletEntry, slp1EnvMonTable=slp1EnvMonTable, slp1TowerStatus=slp1TowerStatus, slp1OutletIndex=slp1OutletIndex, slp1InfeedOutletCount=slp1InfeedOutletCount, slp1TempHumidSensorEntry=slp1TempHumidSensorEntry, slp1ContactClosureEvent=slp1ContactClosureEvent, slp1TowerTable=slp1TowerTable, slp1OutletLoadHighThresh=slp1OutletLoadHighThresh, slp1TempHumidSensorTempValue=slp1TempHumidSensorTempValue, slp1ContactClosureID=slp1ContactClosureID, slp=slp, slp1SystemLocation=slp1SystemLocation, slp1Events=slp1Events, slp1TempHumidSensorStatus=slp1TempHumidSensorStatus, slp1EnvMonTempHumidSensorCount=slp1EnvMonTempHumidSensorCount, slp1EnvMonID=slp1EnvMonID, slp1InfeedStatusEvent=slp1InfeedStatusEvent, slp1=slp1, slp1EnvMonADCHighThresh=slp1EnvMonADCHighThresh, slp1InfeedTable=slp1InfeedTable, slp1TempHumidSensorIndex=slp1TempHumidSensorIndex, slp1TowerIndex=slp1TowerIndex, slp1EnvMonADCName=slp1EnvMonADCName, slp1ContactClosureTable=slp1ContactClosureTable, slp1SystemEnvMonCount=slp1SystemEnvMonCount, slp1OutletLoadLowThresh=slp1OutletLoadLowThresh, slp1TempHumidSensorName=slp1TempHumidSensorName, slp1TowerEntry=slp1TowerEntry, slp1OutletTable=slp1OutletTable, slp1TempHumidSensorHumidValue=slp1TempHumidSensorHumidValue, slp1OutletID=slp1OutletID, slp1TempHumidSensorHumidLowThresh=slp1TempHumidSensorHumidLowThresh, slp1InfeedLoadHighThresh=slp1InfeedLoadHighThresh, slp1TowerInfeedCount=slp1TowerInfeedCount, slp1EnvMonADCCount=slp1EnvMonADCCount, slp1TowerStatusEvent=slp1TowerStatusEvent, slp1EnvMonStatusEvent=slp1EnvMonStatusEvent, slp1EnvMonADCLowThresh=slp1EnvMonADCLowThresh, slp1EnvMonWaterSensorEvent=slp1EnvMonWaterSensorEvent, slp1OutletName=slp1OutletName, slp1EnvMonADCEvent=slp1EnvMonADCEvent, slp1TempHumidSensorTempEvent=slp1TempHumidSensorTempEvent, slp1SystemNICSerialNumber=slp1SystemNICSerialNumber, slp1InfeedLoadValue=slp1InfeedLoadValue, slp1OutletLoadValue=slp1OutletLoadValue, slp1InfeedIndex=slp1InfeedIndex, slp1SystemTables=slp1SystemTables, slp1EnvMonStatus=slp1EnvMonStatus, slp1Traps=slp1Traps, slp1EnvMonIndex=slp1EnvMonIndex, slp1EnvMonWaterSensorStatus=slp1EnvMonWaterSensorStatus, slp1OutletChangeEvent=slp1OutletChangeEvent, slp1TempHumidSensorStatusEvent=slp1TempHumidSensorStatusEvent, slp1TempHumidSensorTempHighThresh=slp1TempHumidSensorTempHighThresh, slp1OutletControlAction=slp1OutletControlAction, slp1EnvMonADCStatus=slp1EnvMonADCStatus, slp1SystemGroup=slp1SystemGroup, slp1EnvMonEntry=slp1EnvMonEntry, slp1EnvMonWaterSensorName=slp1EnvMonWaterSensorName, slp1InfeedID=slp1InfeedID, slp1EnvMonContactClosureCount=slp1EnvMonContactClosureCount, slp1TempHumidSensorHumidHighThresh=slp1TempHumidSensorHumidHighThresh, slp1EnvMonName=slp1EnvMonName, PYSNMP_MODULE_ID=slp1, slp1InfeedName=slp1InfeedName, slp1ContactClosureIndex=slp1ContactClosureIndex, slp1OutletCapabilities=slp1OutletCapabilities, slp1OutletControlState=slp1OutletControlState, products=products, slp1TempHumidSensorHumidEvent=slp1TempHumidSensorHumidEvent, slp1TowerName=slp1TowerName, slp1ContactClosureEntry=slp1ContactClosureEntry, slp1OutletStatus=slp1OutletStatus, slp1InfeedCapabilities=slp1InfeedCapabilities)
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
def test_basic():
a = te.var("a")
b = te.var("b")
c = te.var("c")
m = tvm.arith.detect_clip_bound(tvm.tir.all(a * 1 < b * 6,
a - 1 > 0), [a])
tvm.testing.assert_prim_expr_equal(m[1], b * 6 - 1)
assert m[0].value == 2
m = tvm.arith.detect_clip_bound(tvm.tir.all(a * 1 < b * 6,
a - 1 > 0), [a, b])
assert len(m) == 0
m = tvm.arith.detect_clip_bound(tvm.tir.all(a + 10 * c <= 20,
b - 1 > 0), [a, b])
tvm.testing.assert_prim_expr_equal(m[1], 20 - 10 * c)
tvm.testing.assert_prim_expr_equal(m[2], 2)
if __name__ == "__main__":
test_basic()
|
"""
Total Field Magnetic Anomaly from Great Britain
================================================
These data are a complete airborne survey of the entire Great Britain
conducted between 1955 and 1965. The data are made available by the
British Geological Survey (BGS) through their `geophysical data portal
<https://www.bgs.ac.uk/products/geophysics/aeromagneticRegional.html>`__.
License: `Open Government License
<http://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/>`__
The columns of the data table are longitude, latitude, total-field
magnetic anomaly (nanoTesla), observation height relative to the WGS84 datum
(in meters), survey area, and line number and line segment for each data point.
Latitude, longitude, and elevation data converted from original OSGB36
(epsg:27700) coordinate system to WGS84 (epsg:4326) using to_crs function in
GeoPandas.
See the original data for more processing information.
If the file isn't already in your data directory, it will be downloaded
automatically.
"""
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import verde as vd
import harmonica as hm
import numpy as np
# Fetch the data in a pandas.DataFrame
data = hm.datasets.fetch_britain_magnetic()
print(data)
# Plot the observations in a Mercator map using Cartopy
fig = plt.figure(figsize=(7.5, 10))
ax = plt.axes(projection=ccrs.Mercator())
ax.set_title("Magnetic data from Great Britain", pad=25)
maxabs = np.percentile(data.total_field_anomaly_nt, 99)
tmp = ax.scatter(
data.longitude,
data.latitude,
c=data.total_field_anomaly_nt,
s=0.001,
cmap="seismic",
vmin=-maxabs,
vmax=maxabs,
transform=ccrs.PlateCarree(),
)
plt.colorbar(
tmp,
ax=ax,
label="total field magnetic anomaly [nT]",
orientation="vertical",
aspect=50,
shrink=0.7,
pad=0.1,
)
ax.set_extent(vd.get_region((data.longitude, data.latitude)))
ax.gridlines(draw_labels=True)
ax.coastlines(resolution="50m")
plt.tight_layout()
plt.show()
|
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.sites.shortcuts import get_current_site
import pymysql
from django.urls import reverse
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.template.loader import render_to_string
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.core.mail import EmailMessage
from .tokens import account_activation_token
from django.utils.encoding import force_bytes, force_text
# Create your views here.
def login_view(request):
context = {}
return render(request, 'log/login.html', context)
def logout_view(request):
logout(request)
if request.GET and 'next' in request.GET:
return HttpResponseRedirect(request.GET['next'])
else:
return HttpResponseRedirect(reverse('shop:home_url'))
|
from baselines.pposgd.pposgd_simple import OBSERVATION_DIM, set_obstacles
# obstacle1.x = 1
# obstacle2.x = 2.5
# obstacle3.x = 2.5 (hidden behind obstacle2)
result = [0] * 42
dx = 0
step = 1
set_obstacles(result, dx, step)
assert set_obstacles.prev == [-2.0, 0.0, 0.1]
assert set_obstacles.next == [2.0, 0.0, 0.1]
assert result[2:36] == [0] * 34
assert result[36:42] == [-0.4, 0.0, 0.0, 0.4, 0.0, 0.0]
step = 2
result = [0] * 42
result[36] = 1
result[37] = 0.1
result[38] = 0.2
set_obstacles(result, dx, step)
assert set_obstacles.prev == [-2.0, 0.0, 0.1]
assert set_obstacles.next == [1.0, 0.1, 0.2]
assert result[2:36] == [0] * 34
assert result[36:42] == [-0.4, 0.0, 0.0, 0.2, 0.4, 0.1]
step = 3
dx = 0.1
result = [0] * 42
result[1] = 0.1
result[36] = 0.9
result[37] = 0.1
result[38] = 0.2
set_obstacles(result, dx, step)
assert set_obstacles.prev == [-2.1, 0.0, 0.1]
assert set_obstacles.next == [0.9, 0.1, 0.2]
assert result[2:36] == [0] * 34
assert result[36:42] == [-0.4, 0.0, 0.0, 0.18, 0.4, 0.1]
step = 4
dx = 1.1
result = [0] * 42
result[1] = 1.2
result[36] = -0.2
result[37] = 0.1
result[38] = 0.2
set_obstacles(result, dx, step)
assert set_obstacles.prev == [-3.2, 0.0, 0.1]
assert set_obstacles.next == [-0.20000000000000007, 0.1, 0.2]
assert result[2:36] == [0] * 34
assert result[36:42] == [-0.4, 0.0, 0.0, -0.040000000000000015, 0.4, 0.1]
step = 5
dx = 0.1
result = [0] * 42
result[1] = 1.3
result[36] = 1.2
result[37] = 0.3
result[38] = 0.4
set_obstacles(result, dx, step)
assert set_obstacles.prev == [-0.30000000000000004, 0.1, 0.2]
assert set_obstacles.next == [1.2, 0.3, 0.4]
assert result[2:36] == [0] * 34
assert result[36:42] == [-0.06000000000000001, 0.4, 0.1, 0.24, 1.2, 0.30000000000000004]
step = 6
dx = 1.6
result = [0] * 42
result[1] = 2.9
result[36] = 100
result[37] = 0
result[38] = 0
set_obstacles(result, dx, step)
assert set_obstacles.prev == [-0.40000000000000013, 0.3, 0.4]
assert set_obstacles.next == [-2.0, 0.0, 0.0]
assert result[2:36] == [0] * 34
assert result[36:42] == [-0.08000000000000003, 1.2, 0.30000000000000004, -0.4, 0.0, -0.1]
|
import math
def arrowprops(dx, dy, **kwargs):
m = math.sqrt(dx**2 + dy**2)
l = 1/8 * m
w = 2/3 * l
return {
'head_length': kwargs.get('head_length', l),
'head_width': kwargs.get('head_width', w),
'length_includes_head': True,
'overhang': kwargs.get('overhang', 0.0),
'fc': kwargs.get('fc', 'b'),
'ec': kwargs.get('ec', 'b')
}
def plot_rect(ax, p, fmt='b'):
x, y = p
ax.plot([0, x], [y, y], fmt) # horizontal line
ax.plot([x, x], [0, y], fmt) # vertical line
def setup_axes(ax, **kwargs):
ticklength = kwargs.get('ticklength', 15)
tickwidth = kwargs.get('tickwidth', 1)
tickdirection = kwargs.get('tickdirection', 'inout')
xticks = kwargs.get('xticks', [])
yticks = kwargs.get('yticks', [])
xticklabels = kwargs.get('xticklabels', [])
yticklabels = kwargs.get('yticklabels', [])
ax.tick_params('both', length=ticklength, width=tickwidth, direction=tickdirection)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_position(('data', 0))
ax.spines['bottom'].set_position(('data', 0))
ax.xaxis.set_ticks(kwargs.get('xticks', xticks))
ax.xaxis.set_ticklabels(kwargs.get('xticklabels', xticklabels))
ax.yaxis.set_ticks(kwargs.get('yticks', yticks))
ax.yaxis.set_ticklabels(kwargs.get('yticklabels', yticklabels))
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
ax.set_xlim(*kwargs.get('xlim', (-1, 1)))
ax.set_ylim(*kwargs.get('ylim', (-1, 1)))
ax.set_aspect(1.0) |
import csv
import os
from os.path import basename
import fileinput
import glob
import os.path
import itertools
#da modificare
path_store = 'D:/Projects/DANTE/annotation/'
path_save = 'D:/Projects/DANTE/annotation/expert/'
path_videos = 'D:/Projects/DANTE/video/alpha/'
def readValuesFromCSV(spamReader):
frame = list()
timestamp = list()
value = list()
csv_list = list()
#parto da -1 perché il primo valore voglio che sia la stringa nome della colonna
count = -1
for row in spamReader:
if (count==-1):
frame.append('Frame')
else:
frame.append(count)
timestamp.append(row[0])
value.append(row[4])
count+=1
csv_list.append(frame)
csv_list.append(timestamp)
csv_list.append(value)
return len(frame), csv_list
def readValuesFromExpert(file, spamReader):
valuesExpert = list()
for row in spamReader:
valuesExpert.append(row[2])
return valuesExpert
def calculateEWE(dictExpertCoeff, video, dim, value):
N = 0
D = 0
valuesAvg = list()
valuesAvg.append("Value")
#print("dim",dim, "video", video)
for i in range(1,dim-1):
#ciclo per tutti gli annotatori che non sono l'esperto
for annotator in os.listdir(path_store):
if os.path.isdir(os.path.join(path_store, annotator)) and annotator!='expert':
#ciclo su tutti i video di quell'annotatore
for vid in os.listdir(path_store+annotator):
#se esiste
base = (basename(video))
videoName = os.path.splitext(base)[0]
if vid == videoName + '_mp4':
#cambia a seconda se è valence o arousal
path_name = path_store+annotator+'/'+vid+'/'+'valence.csv' if value == 'valence' else path_store+annotator+'/'+vid+'/'+'arousal.csv'
if os.path.exists(path_name):
#print('\nvalori esistenti per annotatore: ' + annotator)
#open file and extract values
with open(path_name, 'rt') as csvfile:
spamReader = csv.reader(csvfile, delimiter=';', quotechar='|')
dim, csv_list = readValuesFromCSV(spamReader)
tmp = csv_list[2]
#print(dictExpertCoeff.get(annotator))
#print(i)
print("len",len(tmp), annotator, i)
N += float(tmp[i]) * float(abs(dictExpertCoeff.get(annotator)))
D += abs(dictExpertCoeff.get(annotator))
valuesAvg.append(float(N)/float(D))
#print(valuesAvg)
return valuesAvg
def getExpertValues(video, value):
values = list()
valuesTmp = list()
valuesAvg = list()
valueDiffExpert = 0
dim = 0
count = 0
frames = list()
timestamps = list()
dictExpertCoeff = {}
file = 'expert_'+ value + '_' + video + '.csv'
if (os.path.exists(path_save+file)):
print('esperto trovato')
#open file and extract values
with open(path_save + file, 'rt') as csvfile:
spamReaderExpert = csv.reader(csvfile, delimiter=';', quotechar='|')
values = readValuesFromExpert(file, spamReaderExpert)
#print('Expert', values)
else:
print('esperto NON trovato')
#ciclo per tutti gli annotatori che non sono l'esperto
for annotator in os.listdir(path_store):
if os.path.isdir(os.path.join(path_store, annotator)) and annotator!='expert':
#ciclo su tutti i video di quell'annotatore
for vid in os.listdir(path_store+annotator):
#se esiste
if vid == video + '_mp4':
#cambia a seconda se è valence o arousal
path_name = path_store+annotator+'/'+vid+'/'+'valence.csv' if value == 'valence' else path_store+annotator+'/'+vid+'/'+'arousal.csv'
if os.path.exists(path_name):
#print('\nvalori esistenti per annotatore: ' + annotator)
count += 1
#open file and extract values
with open(path_name, 'rt') as csvfile:
spamReader = csv.reader(csvfile, delimiter=';', quotechar='|')
dim, csv_list = readValuesFromCSV(spamReader)
tmp = csv_list[2]
if (not os.path.exists(path_save+file)):
values = tmp
if(count > 0):
for v,t in zip(values, tmp):
if v != 'Value' and t != 'Value':
#se annotazione valida
dif = float(v)/count - float(t)
valueDiffExpert += dif
else:
valuesTmp = values
frames = csv_list[0]
timestamps = csv_list[1]
valueDiffExpert = valueDiffExpert/len(frames)
dictExpertCoeff.update({annotator:valueDiffExpert})
else:
if(count == 0):
frames = []
timestamps = []
#print(dictExpertCoeff)
return dim, frames, timestamps, dictExpertCoeff
def writeValuesCSV(videoName, dim, csv_list, value):
path = path_save + 'expert_valence_' + videoName + '.csv' if value=='valence' else path_save + 'expert_arousal_' + videoName + '.csv'
with open(path, 'w') as outfile:
for i in range(0, dim-1):
k=0
for entries in csv_list:
if k!=0:
outfile.write(';')
outfile.write(str(entries[i]))
k+=1
outfile.write('\n')
def main():
videos = list()
#check sui video della cartella videos per sapere il nome dei video da elaborare
for video in os.listdir(path_videos):
valence = list()
arousal = list()
csv_listV = list()
csv_listA = list()
if os.path.join(path_videos, video) != os.path.join(path_videos, 'Example.mp4'):
#estraggo solo il file name senza estensione
base = (basename(video))
videoName = os.path.splitext(base)[0]
print("\n\nElabrating video: " + videoName)
#calcolo valence di ogni video
print('....is writing valence')
dim, frame, timestamp, dictExpertCoeff = getExpertValues(videoName, 'valence')
valence = calculateEWE(dictExpertCoeff, video,dim, "valence")
if dim !=0 and frame != [] and timestamp != [] and valence != []:
csv_listV.append(frame)
csv_listV.append(timestamp)
csv_listV.append(valence)
writeValuesCSV(videoName, dim, csv_listV, 'valence')
else:
print('no values')
#calcolo l'arousal di ogni video
print('.....is writing arousal')
dim, frame, timestamp, dictExpertCoeff = getExpertValues(videoName, 'arousal')
valence = calculateEWE(dictExpertCoeff, video, dim, "arousal")
if dim !=0 and frame != [] and timestamp != [] and arousal != []:
csv_listA.append(frame)
csv_listA.append(timestamp)
csv_listA.append(arousal)
writeValuesCSV(videoName, dim, csv_listA, 'arousal')
else:
print('no values')
#richamo il main
main()
|
import torch as tc
import moviepy.editor as mpy
import numpy as np
import uuid
import os
from collections import deque
import matplotlib.pyplot as plt
@tc.no_grad()
def movie(env, agent, args):
base_path = os.path.join(args.asset_dir, args.model_name)
os.makedirs(base_path, exist_ok=True)
fps = 64
max_frames = 2048
queue = deque(maxlen=max_frames)
def make_video():
def make_frame(t):
# t will range from 0 to (self.max_frames / self.fps).
frac_done = t / (max_frames // fps)
max_idx = len(queue) - 1
idx = int(max_idx * frac_done)
arr_fp = queue[idx]
x = plt.imread(arr_fp)
return (255 * x).astype(np.int32).astype(np.uint8)
filename = f"{uuid.uuid4()}.gif"
fp = os.path.join(base_path, filename)
clip = mpy.VideoClip(make_frame, duration=(max_frames // fps))
clip.write_gif(fp, fps=fps)
print(f"Saving video to {fp}")
t = 0
total_reward = 0.0
o_t = env.reset()
while t < queue.maxlen:
pi_dist, vpred = agent(tc.tensor(o_t).float().unsqueeze(0))
a_t = pi_dist.sample()
o_tp1, r_t, done_t, _ = env.step(a_t.squeeze(0).detach().numpy())
arr = env.render(mode='rgb_array')
arr_fp = f"/tmp/{str(uuid.uuid4())}.png"
plt.imsave(arr=arr, fname=arr_fp)
queue.append(arr_fp)
total_reward += r_t
t += 1
if done_t:
break
o_t = o_tp1
make_video()
|
from SHIMON.api.ping import ApiPing
from SHIMON.api.error import error
from testing.base import BaseTest
from testing.http import assertHttpResponse
from SHIMON import HttpResponse
class TestPing(BaseTest):
@BaseTest.request_context
def test_ping_with_redirect_returns_http_200(self) -> None:
assertHttpResponse(self.ping(True), 200)
@BaseTest.request_context
def test_ping_without_redirect_returns_http_200(self) -> None:
assertHttpResponse(self.ping(False), 200)
@BaseTest.request_context
def test_ping_with_redirect_returns_pong(self) -> None:
assert self.ping(True)[0].data == b'"pong"\n'
@BaseTest.request_context
def test_ping_without_redirect_returns_pong(self) -> None:
assert self.ping(False)[0].json["msg"] == "pong"
def ping(self, redirect: bool) -> HttpResponse:
return ApiPing().entry(self.shimon, None, redirect)
|
def encrypt(plaintext):
return plaintext, dict()
def decrypt(plaintext):
return plaintext
|
import base64
import logging
import sys
import time
from typing import TYPE_CHECKING, Dict, List, Optional, Union
try:
import certifi
from awscrt import io, mqtt
from awscrt.exceptions import AwsCrtError
from awsiot import mqtt_connection_builder
except ImportError: # pragma: no cover
pass
import requests
from Crypto.Cipher import AES
from Crypto.Hash import CMAC
from .const import APIGW_URL, IOT_EP, OFFICIALAPI_URL, AuthType
from .history import CHSesame2History
if TYPE_CHECKING:
from concurrent.futures import Future
from .auth import CognitoAuth, WebAPIAuth
from .const import CHSesame2CMD
from .device import SesameLocker
logger = logging.getLogger(__name__)
class SesameCloud:
def __init__(self, authenticator: Union["WebAPIAuth", "CognitoAuth"]) -> None:
"""Construct and send a Request to the cloud.
Args:
authenticator (Union[WebAPIAuth, CognitoAuth]): The authenticator
"""
self._authenticator = authenticator
def requestAPI(
self, method: str, url: str, json: Optional[dict] = None
) -> requests.Response:
"""A Wrapper of `requests.request`.
Args:
method (str): HTTP method to use: `GET`, `OPTIONS`, `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`.
url (str): URL to send.
json (Optional[dict], optional): JSON data for the body to attach to the request. Defaults to `None`.
Raises:
RuntimeError: An HTTP error occurred.
Returns:
requests.Response: The server's response to an HTTP request.
"""
try:
logger.debug("requestAPI method={}, url={}".format(method, url))
response = requests.request(
method,
url,
json=json,
auth=self._authenticator,
)
response.raise_for_status()
except requests.exceptions.HTTPError as e:
logger.exception("requestAPI exeption raised")
raise RuntimeError(e)
return response
def getSign(self, device: "SesameLocker") -> str:
"""Generate a AES-CMAC tag.
Returns:
str: AES-CMAC tag.
"""
unixtime = int(time.time())
secret = device.getSecretKey()
cobj = CMAC.new(secret, ciphermod=AES)
cobj.update(unixtime.to_bytes(4, "little")[1:4])
sign = cobj.hexdigest()
logger.debug("getSign cmac={}".format(sign))
return sign
def getMechStatus(self, device: "SesameLocker") -> Union[Dict, str]:
"""Retrive a mechanical status of a device.
Args:
device (SesameLocker): The device for which you want to query.
Returns:
Union[Dict, str]: Current mechanical status of the device. `Dict` if using WebAPIAuth, and `str` if using CognitoAuth.
"""
if self._authenticator.login_method == AuthType.WebAPI:
url = "{}/{}".format(OFFICIALAPI_URL, device.getDeviceUUID())
response = self.requestAPI("GET", url)
r_json = response.json()
return r_json
else:
url = "https://{}/things/sesame2/shadow?name={}".format(
IOT_EP, device.getDeviceUUID()
)
response = self.requestAPI("GET", url)
r_json = response.json()
return r_json["state"]["reported"]["mechst"]
def sendCmd(
self,
device: "SesameLocker",
cmd: "CHSesame2CMD",
history_tag: str = "pysesame3",
) -> bool:
"""Send a locking/unlocking command.
Args:
device (SesameLocker): The device for which you want to query.
cmd (CHSesame2CMD): Lock, Unlock and Toggle.
history_tag (CHSesame2CMD): The key tag to sent when locking and unlocking.
Returns:
bool: `True` if success, `False` if not.
"""
logger.debug(
"sendCmd UUID={}, cmd={}, history_tag={}".format(
device.getDeviceUUID(), cmd, history_tag
)
)
if self._authenticator.login_method == AuthType.WebAPI:
url = "{}/{}/cmd".format(OFFICIALAPI_URL, device.getDeviceUUID())
sign = self.getSign(device)
elif self._authenticator.login_method == AuthType.SDK:
url = "{}/device/v1/iot/sesame2/{}".format(
APIGW_URL, device.getDeviceUUID()
)
sign = self.getSign(device)[0:8]
payload = {
"cmd": int(cmd),
"history": base64.b64encode(history_tag.encode()).decode(),
"sign": sign,
}
try:
response = self.requestAPI("POST", url, payload)
logger.debug("sendCmd result={}".format(response.ok))
return response.ok
except RuntimeError:
logger.debug("sendCmd result=exception raised")
return False
def getHistoryEntries(self, device: "SesameLocker") -> List[CHSesame2History]:
"""Retrieve the history of all events with a device.
Args:
device (SesameLocker): The device for which you want to query.
Returns:
list[CHSesame2History]: A list of events.
"""
logger.debug("getHistoryEntries UUID={}".format(device.getDeviceUUID()))
if self._authenticator.login_method == AuthType.WebAPI:
url = "{}/{}/history?page=0&lg=10".format(
OFFICIALAPI_URL, device.getDeviceUUID()
)
elif self._authenticator.login_method == AuthType.SDK:
url = "{}/device/v1/sesame2/{}/history?page=0&lg=10&a={}".format(
APIGW_URL, device.getDeviceUUID(), self.getSign(device)[0:8]
)
ret = []
response = self.requestAPI("GET", url)
for entry in response.json():
ret.append(CHSesame2History(**entry))
return ret
class AWSIoT:
def __init__(self, authenticator: "CognitoAuth") -> None:
"""Construct and send a request to the AWS IoT.
Args:
authenticator (CognitoAuth): The authenticator
"""
if (
"awsiot" not in sys.modules or "certifi" not in sys.modules
): # pragma: no cover
raise RuntimeError(
"Failed to load awsiotsdk or certifi. Did you run `pip install pysesame3[cognito]`?"
)
self._authenticator = authenticator
self.mqtt_connection: mqtt.Connection
def _on_connection_interrupted(
self, connection: mqtt.Connection, error: AwsCrtError
) -> None:
"""Callback when connection is accidentally lost.
Args:
connection (mqtt.Connection): Connection this callback is for.
error (AwsCrtError): Exception which caused connection loss.
"""
logger.warn("AWS IoT connection interrupted. error: {}".format(error))
def _on_connection_resumed(
self,
connection: mqtt.Connection,
return_code: mqtt.ConnectReturnCode,
session_present: bool,
) -> None:
"""Callback when an interrupted connection is re-established.
Args:
connection (mqtt.Connection): Connection this callback is for.
return_code (mqtt.ConnectReturnCode): Connect return code received from the server.
session_present (bool): `True` if resuming existing session. `False` if new session.
"""
logger.warn(
"AWS IoT connection resumed. return_code: {} session_present: {}".format(
return_code, session_present
)
)
if return_code == mqtt.ConnectReturnCode.ACCEPTED and not session_present:
logger.warn(
"AWS IoT session did not persist. Resubscribing to existing topics..."
)
resubscribe_future, _ = connection.resubscribe_existing_topics()
# Cannot synchronously wait for resubscribe result because we're on the connection's event-loop thread,
# evaluate result with a callback instead.
resubscribe_future.add_done_callback(self._on_resubscribe_complete)
def _on_resubscribe_complete(self, resubscribe_future: "Future") -> None:
"""Callback when resubscribing to existing topics is done.
Args:
resubscribe_future (concurrent.futures.Future): Future which completes when resubscribing succeeds or fails.
Raises:
ConnectionRefusedError: If the server rejected resubscribe to a topic.
"""
resubscribe_results = resubscribe_future.result()
logger.debug("AWS IoT resubscribe results: {}".format(resubscribe_results))
for topic, qos in resubscribe_results["topics"]:
if qos is None:
raise ConnectionRefusedError(
"Server rejected resubscribe to topic: {}".format(topic)
)
def connect(self) -> None:
"""Open the actual connection to the server."""
if hasattr(self, "mqtt_connection"):
connect_future = self.mqtt_connection.connect()
return connect_future.result()
event_loop_group = io.EventLoopGroup(1)
host_resolver = io.DefaultHostResolver(event_loop_group)
client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)
logger.debug("Start connecting AWS IoT....")
self.mqtt_connection = mqtt_connection_builder.websockets_with_custom_handshake(
endpoint=IOT_EP,
client_bootstrap=client_bootstrap,
websocket_handshake_transform=self._authenticator.iot_websocket_handshake_transform,
ca_filepath=certifi.where(),
on_connection_interrupted=self._on_connection_interrupted,
on_connection_resumed=self._on_connection_resumed,
client_id=self._authenticator.client_id,
clean_session=False,
keep_alive_secs=6,
)
connect_future = self.mqtt_connection.connect()
connect_future.result()
logger.debug("Connection established to AWS IoT")
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import os
from functools import wraps
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from flask import Flask, Response, request, jsonify, make_response, json
from flask_cors import CORS
import pkg_resources
import smtplib
import time
import yaml
import jwt
from pywebpush import webpush
app = Flask(__name__)
cors = CORS()
cors.init_app(app, origins='*')
version = pkg_resources.require('mailapi')[0].version
# read config file which should be in the parent directory
with open("config.yml", 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.BaseLoader)
tokens = cfg['tokens']
jwt_secret = tokens['jwt_secret']
vapid = cfg['vapid']
vapid_secret_key = vapid['secret_key']
vapid_public_key = vapid['public_key']
vapid_private_key = vapid['private_key']
vapid_claims = vapid['claims']
def token_required(f):
@wraps(f)
def decorated(*args, **kwargs):
token = None
if 'Authorization' in request.headers:
token = request.headers['Authorization']
if not token:
return 'Token is missing', 401
try:
token_string = token.split()
data = jwt.decode(token_string[1], jwt_secret)
roles = data['role'].split(',')
if not ('AD' in roles or 'BS' in roles or 'JC' in roles or 'TR' in roles or 'LA' in roles or 'PM'):
return 'Invalid role. Please acquire a proper role.', 401
except jwt.ExpiredSignatureError:
return 'Signature expired. Please log in again.', 401
except jwt.exceptions.InvalidSignatureError:
return 'Invalid token. Please log in again.', 401
except jwt.InvalidTokenError:
return 'Invalid token. Please log in again.', 401
except Exception as exc:
return 'Token is invalid', 401
return f(*args, **kwargs)
return decorated
@app.route('/', methods=['GET'])
def i_am_alive_to_browser():
return 'WebSpace: the final frontier. These are the voyages of version ' + version + '. The mission: to explore strange new languages, <br>to seek out new life and new civilizations, to boldly go where no man has gone before.', 200
# @app.route('/', methods=['OPTIONS'])
# def cors_handler():
# print(resp)
# resp = make_response()
# resp.headers['Access-Control-Allow-Methods'] = 'POST, GET, PATCH, DELETE, OPTIONS'
# resp.headers['Access-Control-Allow-Headers'] = '*'
# return resp, 200
@app.route('/mail', methods=['POST'])
@token_required
def send_mail():
response = {}
data = request.get_json()
email_user = data['UserId']
email_password = data['Password']
try:
with smtplib.SMTP('server72.hosting2go.nl', 2525) as smtp:
smtp.starttls()
smtp.login(email_user, email_password)
for mailItem in data['MailItems']:
to_addresses = []
msg = MIMEMultipart()
msg['From'] = data['From']
if 'To' in mailItem and mailItem['To']:
msg['To'] = mailItem['To']
to_addresses.append(mailItem['To'])
if 'CC' in mailItem and mailItem['CC']:
msg['CC'] = mailItem['CC']
to_addresses.append(mailItem['CC'])
if 'BCC' in mailItem and mailItem['BCC']:
msg['BCC'] = mailItem['BCC']
to_addresses.append(mailItem['BCC'])
msg['Subject'] = mailItem['Subject']
body = ''
for line in mailItem['Message']:
body += line + '\n'
# to_addresses = [mailItem['To']] + [mailItem['CC']] + [mailItem['BCC']]
msg.attach(MIMEText(body, 'plain'))
smtp.sendmail(data['From'], to_addresses, msg.as_string())
time.sleep(1)
smtp.quit()
except smtplib.SMTPRecipientsRefused:
response['message'] = 'Mail Error. All recipients were refused. Nobody got the mail.'
return json.dumps(response), 503
except smtplib.SMTPHeloError:
response['message'] = 'Mail Error. The server did not reply properly to the HELO greeting.'
return json.dumps(response), 503
except smtplib.SMTPSenderRefused as exc:
response['message'] = 'Mail Error. The server did not accept the from_address.'
return json.dumps(response), 503
except smtplib.SMTPDataError as exc:
response['message'] = 'Mail Error. The server replied with an unexpected error code (other than a refusal of a recipient).'
return json.dumps(response), 503
except smtplib.SMTPNotSupportedError as exc:
response['message'] = 'Mail Error. SMTPUTF8 was given in the mail_options but is not supported by the server.'
return json.dumps(response), 503
except Exception as exc:
if hasattr(exc, 'message'):
response['message'] = 'Mail Error. Something went wrong. ' + exc.message
return json.dumps(response), 503
else:
print('We have an error: -----------------------------------------------------------------------')
print('-----------------------------------------------------------------------------------------')
print(exc)
print('-----------------------------------------------------------------------------------------')
response['message'] = 'Mail Error. Something went wrong.'
return json.dumps(response), 503
response['message'] = 'Success'
return json.dumps(response), 200
def send_web_push(subscription_information, message_body):
return webpush(
subscription_info=subscription_information,
data=message_body,
vapid_private_key=vapid_private_key,
vapid_claims=vapid_claims
)
@app.route('/notification.php', methods=['POST', 'GET'])
@token_required
def send_notification():
if request.method == "GET":
return Response(response=json.dumps({"public_key": vapid_public_key}),
headers={"Access-Control-Allow-Origin": "*"},
content_type="application/json")
print("is_json", request.is_json)
if not request.json or not request.json.get('sub_token'):
return jsonify({'failed': 1})
if not request.json.get('message'):
return jsonify({'failed': 1})
print("request.json", request.json)
token = request.json.get('sub_token')
notificationpayload = request.json.get('message')
try:
token = json.loads(token)
send_web_push(token, notificationpayload)
return jsonify({'success': 1})
except Exception as e:
print("error", e)
return jsonify({'failed': str(e)})
def main():
# os.environ['NO_PROXY'] = '0.0.0.0'
app.run(host='0.0.0.0', port=5000)
# , ssl_context='adhoc')
# app.run(debug=True)
return
if __name__ == '__main__':
main()
|
from wtforms import StringField, PasswordField, BooleanField, SubmitField, HiddenField, RadioField
from wtforms.validators import DataRequired, Length, EqualTo
from flask_wtf import FlaskForm
class FindPhoneForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(3, 64)])
phone_brand = StringField('PhoneBrand', validators=[DataRequired(), Length(3, 64)])
phone_type = BooleanField('Brand New')
submit = SubmitField('Find Phone')
|
'''
Created on Mar 5, 2012
@author: jabuhacx
'''
import unittest
import MtWilsonHttpAuthorization
import httplib
import time
import datetime
class Test(unittest.TestCase):
def setUp(self):
self.server = "localhost:8080"
self.url = "/AttestationService/resources"
self.mtwilson = MtWilsonHttpAuthorization.MtWilsonHttpAuthorization("root","root")
pass
def tearDown(self):
pass
def testDatetimeFormat(self):
print time.strftime('%Y-%m-%dT%H:%M:%S')
'''fromtimestamp(time.time())'''
d = datetime.datetime.now().replace(microsecond=0)
print d.isoformat('T')
pass
def testCreateAuthorizationHeader(self):
print self.mtwilson.getAuthorization("GET", "/hello/world", None)
pass
def testGetHostsTrust(self):
uripath = self.url+"/hosts/trust?hostName=10.1.71.103"
headers = {"Authorization":self.mtwilson.getAuthorization("GET", "http://"+self.server+uripath, None)}
print headers.get("Authorization")
h1 = httplib.HTTPConnection(self.server)
h1.connect()
h1.request("GET", uripath, None, headers)
response = h1.getresponse()
print "Response: {0}\n{1}".format(response.status, response.read())
h1.close()
pass
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() |
"""
Desi Tashan Kodi Addon
Copyright (C) 2016 gujal
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
from urlparse import parse_qsl
import xbmc
import xbmcgui
import xbmcplugin
import xbmcaddon
from BeautifulSoup import BeautifulSoup, SoupStrainer
import re, requests
import urlresolver
# Get the plugin url in plugin:// notation.
_url = sys.argv[0]
# Get the plugin handle as an integer number.
_handle = int(sys.argv[1])
_addon = xbmcaddon.Addon()
_addonname = _addon.getAddonInfo('name')
_icon = _addon.getAddonInfo('icon')
base_url = 'http://www.desitashan.me/'
MAINLIST = {'Indian': base_url,
'Pakistani': base_url + 'pakistan-tv/'}
def get_countries():
"""
Get the list of countries.
:return: list
"""
return MAINLIST.keys()
def get_channels(country):
"""
Get the list of channels.
:return: list
"""
channels = []
html = requests.get(MAINLIST[country]).text
mlink = SoupStrainer('div', {'class':'nav fusion-mobile-tab-nav'})
soup = BeautifulSoup(html, parseOnlyThese=mlink)
items = soup.findAll('li')
for item in items:
title = item.text
tref = item.a.get('href')[1:]
try:
icon = item.find('img')['src']
if icon.startswith('/'):
icon = base_url[:-1] + icon
else:
icon = base_url + icon
except:
icon = _icon
channels.append((title,icon,tref))
return channels
def get_shows(channel,country):
"""
Get the list of shows.
:return: list
"""
shows = []
html = requests.get(MAINLIST[country]).text
mlink = SoupStrainer('div', {'id':channel})
soup = BeautifulSoup(html, parseOnlyThese=mlink)
items = soup.findAll('div', {'class':'fusion-column-wrapper'})
for item in items:
title = item.text
url = item.a.get('href')
if url.startswith('/'):
url = base_url[:-1] + url
else:
url = base_url + url
try:
icon = item.find('img')['src']
if icon.startswith('/'):
icon = base_url[:-1] + icon
else:
icon = base_url + icon
except:
icon = base_icon
shows.append((title,icon,url))
return shows
def get_episodes(show):
"""
Get the list of episodes.
:return: list
"""
episodes = []
html = requests.get(show).text
mlink = SoupStrainer('div', {'id':'showList'})
soup = BeautifulSoup(html, parseOnlyThese=mlink)
items = soup.findAll('div', {'class':'fusion-column-wrapper'})
for item in items:
title = item.h4.a.text
if 'written' not in title.lower():
url = item.a.get('href')
if url.startswith('/'):
url = base_url[:-1] + url
else:
url = base_url + url
try:
icon = item.find('img')['src']
if icon.startswith('/'):
icon = base_url[:-1] + icon
else:
icon = base_url + icon
except:
icon = base_icon
episodes.append((title,icon,url))
plink = SoupStrainer('a', {'class':'pagination-next'})
soup = BeautifulSoup(html, parseOnlyThese=plink)
if 'Next' in str(soup):
icon = _icon
ep_link = soup.a.get('href')
if 'category' in ep_link:
url = base_url[:-1] + ep_link
else:
url = show + ep_link
title = 'Next Page: ' + url.split('page/')[1][:-1]
episodes.append((title, icon, url))
return episodes
def get_videos(episode):
"""
Get the list of videos.
:return: list
"""
videos = []
html = requests.get(episode).text
mlink = SoupStrainer('p', {'class':'vidLinksContent'})
soup = BeautifulSoup(html, parseOnlyThese=mlink)
items = soup.findAll('a')
for item in items:
try:
vid_name = item['title']
except:
vid_name = item.text
vid_url = item['href']
videos.append((vid_name, vid_url))
mlink = SoupStrainer('div', {'class':'post-content'})
soup = BeautifulSoup(html, parseOnlyThese=mlink)
#items = soup.findAll('div', {'class':'video-shortcode'})
items = soup.findAll('iframe')
for item in items:
try:
vid_name = item['title']
except:
vid_name = item['class']
vid_url = item['src']
videos.append((vid_name, vid_url))
return videos
def list_countries():
"""
Create the list of countries in the Kodi interface.
"""
countries = get_countries()
listing = []
for country in countries:
list_item = xbmcgui.ListItem(label=country+' Channels')
list_item.setInfo('video', {'title': country, 'genre': country})
list_item.setArt({'thumb': _icon,
'icon': _icon,
'fanart': _icon})
url = '{0}?action=list_country&country={1}'.format(_url, country)
is_folder = True
listing.append((url, list_item, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.addSortMethod(_handle, xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE)
xbmcplugin.endOfDirectory(_handle)
def list_channels(country):
"""
Create the list of countries in the Kodi interface.
"""
channels = get_channels(country)
listing = []
for channel in channels:
list_item = xbmcgui.ListItem(label=channel[0])
list_item.setArt({'thumb': channel[1],
'icon': channel[1],
'fanart': channel[1]})
list_item.setInfo('video', {'title': channel[0], 'genre': country})
url = '{0}?action=list_channel&channel={1}&country={2}'.format(_url, channel[2], country)
is_folder = True
listing.append((url, list_item, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.addSortMethod(_handle, xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE)
xbmcplugin.endOfDirectory(_handle)
def list_shows(channel,country):
"""
Create the list of channels in the Kodi interface.
"""
shows = get_shows(channel,country)
listing = []
for show in shows:
list_item = xbmcgui.ListItem(label=show[0])
list_item.setArt({'thumb': show[1],
'icon': show[1],
'fanart': show[1]})
list_item.setInfo('video', {'title': show[0], 'genre': 'Desi TV'})
url = '{0}?action=list_show&show={1}'.format(_url, show[2])
is_folder = True
listing.append((url, list_item, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
def list_episodes(show):
"""
Create the list of episodes in the Kodi interface.
"""
episodes = get_episodes(show)
listing = []
for episode in episodes:
list_item = xbmcgui.ListItem(label=episode[0])
list_item.setArt({'thumb': episode[1],
'icon': episode[1],
'fanart': episode[1]})
list_item.setInfo('video', {'title': episode[0], 'genre': 'Desi TV'})
if 'Next Page' not in episode[0]:
url = '{0}?action=list_episode&episode={1}&icon={2}'.format(_url, episode[2], episode[1])
else:
url = '{0}?action=list_show&show={1}'.format(_url, episode[2])
is_folder = True
listing.append((url, list_item, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
def list_videos(episode,icon):
"""
Create the list of playable videos in the Kodi interface.
:param category: str
"""
videos = get_videos(episode)
listing = []
for video in videos:
list_item = xbmcgui.ListItem(label=video[0])
list_item.setArt({'thumb': icon,
'icon': icon,
'fanart': icon})
list_item.setInfo('video', {'title': video[0], 'genre': 'Desi TV'})
list_item.setProperty('IsPlayable', 'true')
url = '{0}?action=play&video={1}'.format(_url, video[1])
is_folder = False
listing.append((url, list_item, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
def resolve_url(url):
duration=5000
try:
stream_url = urlresolver.HostedMediaFile(url=url).resolve()
# If urlresolver returns false then the video url was not resolved.
if not stream_url or not isinstance(stream_url, basestring):
try: msg = stream_url.msg
except: msg = url
xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%('URL Resolver',msg, duration, _icon))
return False
except Exception as e:
try: msg = str(e)
except: msg = url
xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%('URL Resolver',msg, duration, _icon))
return False
return stream_url
def play_video(path):
"""
Play a video by the provided path.
:param path: str
"""
# Create a playable item with a path to play.
play_item = xbmcgui.ListItem(path=path)
vid_link = play_item.getfilename()
#xbmc.log(msg = '=======> Item is : %s'%url, level = xbmc.LOGNOTICE)
if '/coming/' in vid_link:
stream_url = 'http://www.tashanplayer.com/upcoming.mp4'
elif 'tashanplayer' in vid_link:
vhtml = requests.get(vid_link).text
try:
vplink = SoupStrainer('iframe')
vsoup = BeautifulSoup(vhtml, parseOnlyThese=vplink)
vid_url = vsoup.find('iframe')['src']
except:
vplink = SoupStrainer('script', {'data-container':'myPlayer'})
vsoup = BeautifulSoup(vhtml, parseOnlyThese=vplink)
vid_url = vsoup.find('script')['data-config']
stream_url = resolve_url(vid_url)
else:
stream_url = resolve_url(vid_link)
play_item.setPath(stream_url)
# Pass the item to the Kodi player.
xbmcplugin.setResolvedUrl(_handle, True, listitem=play_item)
def router(paramstring):
"""
Router function that calls other functions
depending on the provided paramstring
:param paramstring:
"""
# Parse a URL-encoded paramstring to the dictionary of
# {<parameter>: <value>} elements
params = dict(parse_qsl(paramstring))
# Check the parameters passed to the plugin
if params:
if params['action'] == 'list_country':
list_channels(params['country'])
elif params['action'] == 'list_channel':
list_shows(params['channel'],params['country'])
elif params['action'] == 'list_show':
list_episodes(params['show'])
elif params['action'] == 'list_episode':
list_videos(params['episode'],params['icon'])
elif params['action'] == 'play':
play_video(params['video'])
else:
list_countries()
#list_channels('Indian')
if __name__ == '__main__':
# Call the router function and pass the plugin call parameters to it.
# We use string slicing to trim the leading '?' from the plugin call paramstring
router(sys.argv[2][1:])
|
#Imports
from functools import partial
#Create a key listener class
class KeyListener(object):
#List of keys
keys = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "space", "Up", "Down", "Left", "Right"]
#Constructor
def __init__(self, window):
#Store the window
self.window = window
#Run the setup function for the dictionary
self.setup()
#Setup the key listeners
self.createListeners()
#Function to initialize the key listener
def setup(self):
#Create a dictionary to store the keys that are currently being pressed
self.pressed = {}
#For each of the keys, set them to false in the dictionary
for k in KeyListener.keys:
self.pressed[k] = False
#Function to handle key down events
def keyDown(self, k):
#Add this key to the list of pressed keys
self.pressed[k] = True
#Function to handle key up events
def keyUp(self, k):
#Add this key to the list of pressed keys
self.pressed[k] = False
#Function to add key listeners for each key
def createListeners(self):
#For each of the keys, add a listener
for k in KeyListener.keys:
#Set the current key
fp = partial(self.keyDown, k)
fr = partial(self.keyUp, k)
#Add the on key listeners
self.window.onkeypress(fp, k)
self.window.onkeyrelease(fr, k)
#Start the listener
self.window.listen()
#Function to check whether a key is pressed
def isPressed(self, k):
#Return whether the key is pressed or not
return self.pressed[k]
|
"""
Support for interface with a Sony Bravia TV.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.braviatv_psk/
"""
import logging
import re
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
try:
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_VOLUME_MUTE, SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA, SUPPORT_VOLUME_STEP, SUPPORT_VOLUME_SET,
SUPPORT_SELECT_SOURCE, SUPPORT_STOP, MEDIA_TYPE_TVSHOW)
except ImportError:
from homeassistant.components.media_player import (
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_VOLUME_MUTE, SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA, SUPPORT_VOLUME_STEP, SUPPORT_VOLUME_SET,
SUPPORT_SELECT_SOURCE, SUPPORT_STOP, MEDIA_TYPE_TVSHOW)
from homeassistant.const import (
CONF_HOST, CONF_NAME, CONF_MAC, STATE_OFF, STATE_ON)
import homeassistant.helpers.config_validation as cv
from homeassistant.util.json import load_json, save_json
REQUIREMENTS = ['pySonyBraviaPSK==0.3.2b0']
_LOGGER = logging.getLogger(__name__)
SUPPORT_BRAVIA = SUPPORT_PAUSE | SUPPORT_VOLUME_STEP | \
SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET | \
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | \
SUPPORT_SELECT_SOURCE | SUPPORT_PLAY | SUPPORT_STOP
BRAVIA_CONFIG_FILE = 'bravia.conf'
CLIENTID_PREFIX = 'HomeAssistant'
NICKNAME = 'Home Assistant'
DEFAULT_NAME = 'Sony Bravia TV'
# Map ip to request id for configuring
_CONFIGURING = {}
# Config file
CONF_PSK = 'psk'
CONF_AMP = 'amp'
CONF_ANDROID = 'android'
CONF_SOURCE_FILTER = 'sourcefilter'
CONF_BROADCAST = 'broadcast'
# Some additional info to show specific for Sony Bravia TV
TV_WAIT = 'TV started, waiting for program info'
PLAY_MEDIA_OPTIONS = [
'Num1', 'Num2', 'Num3', 'Num4', 'Num5', 'Num6', 'Num7', 'Num8', 'Num9',
'Num0', 'Netflix', 'Display', 'ChannelUp', 'ChannelDown', 'Up', 'Down',
'Left', 'Right', 'Confirm', 'Home', 'EPG', 'Return', 'Options', 'Exit'
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PSK): cv.string, ###aanpassen naar str, https://github.com/home-assistant/home-assistant/pull/12934/files
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_BROADCAST, default="255.255.255.255"): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_AMP, default=False): cv.boolean,
vol.Optional(CONF_ANDROID, default=False): cv.boolean,
vol.Optional(CONF_SOURCE_FILTER, default=[]): vol.All(
cv.ensure_list, [cv.string])})
# pylint: disable=unused-argument
def _get_mac_address(ip_address):
"""Get the MAC address of the device."""
from subprocess import Popen, PIPE
pid = Popen(["arp", "-n", ip_address], stdout=PIPE)
pid_component = pid.communicate()[0]
match = re.search(r"(([a-f\d]{1,2}\:){5}[a-f\d]{1,2})".encode('UTF-8'),
pid_component)
if match is not None:
return match.groups()[0]
return None
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Sony Bravia TV platform."""
host = config.get(CONF_HOST)
psk = config.get(CONF_PSK)
mac = config.get(CONF_MAC)
broadcast = config.get(CONF_BROADCAST)
name = config.get(CONF_NAME)
amp = config.get(CONF_AMP)
android = config.get(CONF_ANDROID)
source_filter = config.get(CONF_SOURCE_FILTER)
pin = None
if host is None:
_LOGGER.error("No TV IP address found in configuration file")
return
# If no PSK is entered in the config file the PIN config is started
if psk is None:
bravia_config = load_json(hass.config.path(BRAVIA_CONFIG_FILE))
while bravia_config:
# Set up a configured TV
host_ip, host_config = bravia_config.popitem()
if host_ip == host:
pin = host_config['pin']
mac = host_config['mac']
name = config.get(CONF_NAME)
add_devices([BraviaTVDevice(host, psk, mac, broadcast, name,
pin, amp, android, source_filter)])
return
setup_bravia(config, pin, hass, add_devices)
else:
add_devices([BraviaTVDevice(host, psk, mac, broadcast, name,
pin, amp, android, source_filter)])
def setup_bravia(config, pin, hass, add_devices):
"""Set up a Sony Bravia TV based on host parameter."""
host = config.get(CONF_HOST)
psk = config.get(CONF_PSK)
mac = config.get(CONF_MAC)
broadcast = config.get(CONF_BROADCAST)
name = config.get(CONF_NAME)
amp = config.get(CONF_AMP)
android = config.get(CONF_ANDROID)
source_filter = config.get(CONF_SOURCE_FILTER)
if pin is None:
request_configuration(config, hass, add_devices)
return
else:
mac = _get_mac_address(host) # TODO or get mac from config file
if mac is not None:
mac = mac.decode('utf8')
# If we came here and configuring this host, mark as done
if host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = hass.components.configurator
configurator.request_done(request_id)
_LOGGER.info("Discovery configuration done")
# Save config
save_json(
hass.config.path(BRAVIA_CONFIG_FILE),
{host: {'pin': pin, 'host': host, 'mac': mac}})
add_devices([BraviaTVDevice(host, psk, mac, broadcast, name,
pin, amp, android, source_filter)])
def request_configuration(config, hass, add_devices):
"""Request configuration steps from the user."""
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
configurator = hass.components.configurator
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], "Failed to register, please try again.")
return
def bravia_configuration_callback(data):
"""Handle the entry of user PIN."""
from braviapsk import sony_bravia_psk
pin = data.get('pin')
braviarc = sony_bravia_psk.BraviaRC(host)
braviarc.connect(pin, CLIENTID_PREFIX, NICKNAME)
if braviarc.is_connected():
setup_bravia(config, pin, hass, add_devices)
else:
request_configuration(config, hass, add_devices)
_CONFIGURING[host] = configurator.request_config(
name, bravia_configuration_callback,
description='Enter the Pin shown on your Sony Bravia TV.' +
'If no Pin is shown, enter 0000 to let TV show you a Pin.',
description_image="/static/images/smart-tv.png",
submit_caption="Confirm",
fields=[{'id': 'pin', 'name': 'Enter the pin', 'type': ''}]
)
class BraviaTVDevice(MediaPlayerDevice):
"""Representation of a Sony Bravia TV."""
def __init__(self, host, psk, mac, broadcast, name, pin, amp, android,
source_filter):
"""Initialize the Sony Bravia device."""
_LOGGER.info("Setting up Sony Bravia TV")
from braviapsk import sony_bravia_psk
self._braviarc = sony_bravia_psk.BraviaRC(host, psk, mac)
self._name = name
self._psk = psk
self._pin = pin
self._broadcast = broadcast
self._amp = amp
self._android = android
self._source_filter = source_filter
self._state = STATE_OFF
self._muted = False
self._program_name = None
self._channel_name = None
self._channel_number = None
self._source = None
self._source_list = []
###self._original_content_list = []
self._content_mapping = {}
self._duration = None
self._content_uri = None
###self._id = None
self._playing = False
self._start_date_time = None
self._program_media_type = None
self._min_volume = None
self._max_volume = None
self._volume = None
self._start_time = None
self._end_time = None
if mac is not None: ### Make MAC mandatory for this
self._unique_id = '{}-{}'.format(mac, name)
if self._psk:
_LOGGER.debug(
"Set up Sony Bravia TV with IP: %s, PSK: %s, MAC: %s",
host, psk, mac)
self.update()
else:
self._braviarc.connect(pin, CLIENTID_PREFIX, NICKNAME)
if self._braviarc.is_connected():
self.update()
else:
self._state = STATE_OFF
def update(self):
"""Update TV info."""
if not self._psk:
if not self._braviarc.is_connected():
if self._braviarc.get_power_status() != 'off':
self._braviarc.connect(
self._pin, CLIENTID_PREFIX, NICKNAME)
if not self._braviarc.is_connected():
return
try:
power_status = self._braviarc.get_power_status()
if power_status == 'active':
self._state = STATE_ON
self._refresh_volume()
self._refresh_channels()
playing_info = self._braviarc.get_playing_info()
self._reset_playing_info()
if playing_info is None or not playing_info:
self._program_name = 'No info (resumed after pause or \
app opened)'
else:
self._program_name = playing_info.get('programTitle')
self._channel_name = playing_info.get('title')
self._program_media_type = playing_info.get(
'programMediaType')
self._channel_number = playing_info.get('dispNum')
self._source = playing_info.get('source')
self._content_uri = playing_info.get('uri')
self._duration = playing_info.get('durationSec')
self._start_date_time = playing_info.get('startDateTime')
# Get time info from TV program
if self._start_date_time and self._duration:
time_info = self._braviarc.playing_time(
self._start_date_time, self._duration)
self._start_time = time_info.get('start_time')
self._end_time = time_info.get('end_time')
else:
if self._program_name == TV_WAIT:
# TV is starting up, takes some time before it responds
_LOGGER.info("TV is starting, no info available yet")
else:
self._state = STATE_OFF
except Exception as exception_instance: # pylint: disable=broad-except
_LOGGER.error("No data received from TV. Error message: %s",
exception_instance)
self._state = STATE_OFF
def _reset_playing_info(self):
self._program_name = None
self._channel_name = None
self._program_media_type = None
self._channel_number = None
self._source = None
self._content_uri = None
self._duration = None
self._start_date_time = None
self._start_time = None
self._end_time = None
def _refresh_volume(self):
"""Refresh volume information."""
volume_info = self._braviarc.get_volume_info()
if volume_info is not None:
self._volume = volume_info.get('volume')
self._min_volume = volume_info.get('minVolume')
self._max_volume = volume_info.get('maxVolume')
self._muted = volume_info.get('mute')
def _refresh_channels(self):
if not self._source_list:
self._content_mapping = self._braviarc.load_source_list()
self._source_list = []
if not self._source_filter: # list is empty
for key in self._content_mapping:
self._source_list.append(key)
else:
filtered_dict = {title: uri for (title, uri) in
self._content_mapping.items()
if any(filter_title in title for filter_title
in self._source_filter)}
for key in filtered_dict:
self._source_list.append(key)
@property
def unique_id(self):
"""Return the unique ID of the device."""
return self._unique_id
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def source(self):
"""Return the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._source_list
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
if self._volume is not None:
return self._volume / 100
return None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
supported = SUPPORT_BRAVIA
# Remove volume slider if amplifier is attached to TV
if self._amp:
supported = supported ^ SUPPORT_VOLUME_SET
return supported
@property
def media_content_type(self):
"""Content type of current playing media.
Used for program information below the channel in the state card.
"""
return MEDIA_TYPE_TVSHOW
@property
def media_title(self):
"""Title of current playing media.
Used to show TV channel info.
"""
return_value = None
if self._channel_name is not None:
if self._channel_number is not None:
return_value = '{0!s}: {1}'.format(
self._channel_number.lstrip('0'), self._channel_name)
else:
return_value = self._channel_name
return return_value
@property
def media_series_title(self):
"""Title of series of current playing media, TV show only.
Used to show TV program info.
"""
return_value = None
if self._program_name is not None:
if self._start_time is not None and self._end_time is not None:
return_value = '{0} [{1} - {2}]'.format(
self._program_name, self._start_time, self._end_time)
else:
return_value = self._program_name
else:
if not self._channel_name: # This is empty when app is opened
return_value = 'App opened'
return return_value
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self._channel_name
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._braviarc.set_volume_level(volume)
def turn_on(self):
"""Turn the media player on.
Use a different command for Android as WOL is not working.
"""
if self._android:
self._braviarc.turn_on_command()
else:
self._braviarc.turn_on(self._broadcast)
# Show that TV is starting while it takes time
# before program info is available
self._reset_playing_info()
self._state = STATE_ON
self._program_name = TV_WAIT
def turn_off(self):
"""Turn off media player."""
self._braviarc.turn_off()
self._state = STATE_OFF
def volume_up(self):
"""Volume up the media player."""
self._braviarc.volume_up()
def volume_down(self):
"""Volume down media player."""
self._braviarc.volume_down()
def mute_volume(self, mute):
"""Send mute command."""
self._braviarc.mute_volume()
def select_source(self, source):
"""Set the input source."""
if source in self._content_mapping:
uri = self._content_mapping[source]
self._braviarc.play_content(uri)
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def media_play(self):
"""Send play command."""
self._playing = True
self._braviarc.media_play()
def media_pause(self):
"""Send media pause command to media player.
Will pause TV when TV tuner is on.
"""
self._playing = False
if self._program_media_type == 'tv' or self._program_name is not None:
self._braviarc.media_tvpause()
else:
self._braviarc.media_pause()
def media_next_track(self):
"""Send next track command.
Will switch to next channel when TV tuner is on.
"""
if self._program_media_type == 'tv' or self._program_name is not None:
self._braviarc.send_command('ChannelUp')
else:
self._braviarc.media_next_track()
def media_previous_track(self):
"""Send the previous track command.
Will switch to previous channel when TV tuner is on.
"""
if self._program_media_type == 'tv' or self._program_name is not None:
self._braviarc.send_command('ChannelDown')
else:
self._braviarc.media_previous_track()
def play_media(self, media_type, media_id, **kwargs):
"""Play media."""
_LOGGER.debug("Play media: %s (%s)", media_id, media_type)
if media_id in PLAY_MEDIA_OPTIONS:
self._braviarc.send_command(media_id)
else:
_LOGGER.warning("Unsupported media_id: %s", media_id)
|
from typing import List
class Solution:
def minSubArrayLen(self, s: int, nums: List[int]) -> int:
res, l, sum, N = float('inf'), 0, 0, len(nums)
for i in range(N):
sum += nums[i]
while sum >= s:
res = min(res, i+1-l)
sum -= nums[l]
l += 1
return 0 if res == float('inf') else res
if __name__ == "__main__":
s = Solution()
result = s.minSubArrayLen(7, [2, 3, 1, 2, 4, 3])
print(result)
|
from delorean import Delorean
from gryphon.data_service.auditors.orderbook_auditor import OrderbookAuditor
class CoinbaseOrderbookAuditor(OrderbookAuditor):
def __init__(self):
super(CoinbaseOrderbookAuditor, self).__init__()
self.exchange_name = 'COINBASE_BTC_USD'
product_id = 'BTC-USD'
self.orderbook_url = 'https://api.pro.coinbase.com/products/%s/book?level=3' % product_id
self.audit_time = 60
self.acceptable_changes_threshold = 40
def get_timestamp(self, new_orderbook):
return Delorean().datetime
|
# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import requests
import ujson
from testutils import prefix, api_v0
@prefix('test_v0_rosters')
def test_api_v0_rosters(team):
team_name = team.create()
roster_name = 'test_v0_rosters_roster_0'
roster_name2 = 'test_v0_rosters_roster_1'
def clean_up():
requests.delete(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)))
requests.delete(api_v0('teams/%s/rosters/%s' % (team_name, roster_name2)))
clean_up()
# test create rosters
re = requests.post(api_v0('teams/%s/rosters' % team_name),
json={'name': roster_name})
assert re.status_code == 201
re = requests.post(api_v0('teams/%s/rosters' % team_name),
json={'name': roster_name2})
assert re.status_code == 201
# test fetch rosters
re = requests.get(api_v0('teams/%s/rosters' % team_name))
assert re.status_code == 200
rosters = re.json()
assert roster_name in rosters
assert roster_name2 in rosters
re = requests.get(api_v0('teams/%s/rosters?name__contains=%s&name__startswith=%s&name__endswith=%s'
% (team_name, roster_name, roster_name, roster_name)))
assert re.status_code == 200
rosters = re.json()
assert roster_name in rosters
roster_id = rosters[roster_name]['id']
re = requests.get(api_v0('teams/%s/rosters?id=%s' % (team_name, roster_id)))
assert re.status_code == 200
rosters = re.json()
assert roster_name in rosters
# test fetch single roster
re = requests.get(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)))
assert re.status_code == 200
roster = re.json()
assert set(['users', 'schedules']) == set(roster.keys())
# test rename roster to an existing roster
re = requests.put(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)),
json={'name': roster_name2})
assert re.status_code == 422
assert re.json() == {
'title': 'IntegrityError',
'description': "roster '%s' already existed for team '%s'" % (roster_name2, team_name),
}
# test delete rosters
re = requests.delete(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)))
assert re.status_code == 200
re = requests.get(api_v0('teams/%s/rosters' % team_name))
assert re.status_code == 200
rosters = re.json()
assert roster_name not in rosters
assert roster_name2 in rosters
re = requests.delete(api_v0('teams/%s/rosters/%s' % (team_name, roster_name2)))
assert re.status_code == 200
re = requests.get(api_v0('teams/%s/rosters' % team_name))
assert re.status_code == 200
rosters = re.json()
assert len(rosters.keys()) == 0
clean_up()
@prefix('test_v0_create_invalid_roster')
def test_api_v0_create_invalid_roster(team, roster):
team_name = team.create()
roster_name = "v0_create_<inv@lid/_roster"
re = requests.post(api_v0('teams/%s/rosters' % team_name),
json={"name": roster_name})
assert re.status_code == 400
roster_name = roster.create(team_name)
invalid_name = "v0_create_<inv@lid/_roster"
re = requests.put(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)),
json={"name": invalid_name})
assert re.status_code == 400
@prefix('test_v0_roster_users')
def test_api_v0_rosters_users(team, roster, user):
team_name = team.create()
roster_name = roster.create(team_name)
user_name = user.create()
def setup():
requests.post(api_v0('teams/%s/users' % team_name),
json={'name': user_name})
setup()
# test adding user to a roster
re = requests.post(api_v0('teams/%s/rosters/%s/users' % (team_name, roster_name)),
json={'name': user_name})
assert re.status_code == 201
# test fetching users for a roster
re = requests.get(api_v0('teams/%s/rosters/%s/users' % (team_name, roster_name)))
assert re.status_code == 200
users = re.json()
assert isinstance(users, list)
assert set(users) == set([user_name])
# test deleting user from a roster
re = requests.delete(
api_v0('teams/%s/rosters/%s/users/%s' % (team_name, roster_name, user_name)))
assert re.status_code == 200
re = requests.get(api_v0('teams/%s/rosters/%s/users' % (team_name, roster_name)))
assert re.status_code == 200
users = re.json()
assert isinstance(users, list)
assert len(users) == 0
@prefix('test_v0_aut_add_roster')
def test_api_v0_auto_add_rosters_users_to_team(team, user, roster):
'''
User should be automatically added to team when added to a roster
'''
team_name = team.create()
roster_name = roster.create(team_name)
user_name = user.create()
# make sure user is not in the team
team = requests.get(api_v0('teams/' + team_name)).json()
assert user_name not in team['users']
# add user to roster
requests.post(api_v0('teams/%s/rosters/%s/users' % (team_name, roster_name)),
json={'name': user_name})
# check to make sure user is also added to the team
team = requests.get(api_v0('teams/' + team_name)).json()
assert user_name in team['users']
@prefix('test_v0_rotation')
def test_api_v0_rotation(team, user, roster):
team_name = team.create()
user_name = user.create()
user.add_to_team(user_name, team_name)
roster_name = roster.create(team_name)
roster_name_2 = roster.create(team_name)
# test adding user to a roster out of rotation
re = requests.post(api_v0('teams/%s/rosters/%s/users' % (team_name, roster_name)),
json={'name': user_name, 'in_rotation': False})
assert re.status_code == 201
requests.post(api_v0('teams/%s/rosters/%s/users' % (team_name, roster_name_2)),
json={'name': user_name, 'in_rotation': False})
# test fetching in-rotation users for a roster, verify user is not there
re = requests.get(api_v0('teams/%s/rosters/%s/users?in_rotation=1' % (team_name, roster_name)))
assert re.status_code == 200
users = re.json()
assert isinstance(users, list)
assert len(users) == 0
# test updating user to put into rotation
re = requests.put(api_v0('teams/%s/rosters/%s/users/%s' % (team_name, roster_name, user_name)),
data=ujson.dumps({'in_rotation': True}))
assert re.status_code == 200
# verify user is now in rotation
re = requests.get(api_v0('teams/%s/rosters/%s/users?in_rotation=1' % (team_name, roster_name)))
assert re.status_code == 200
users = re.json()
assert isinstance(users, list)
assert set(users) == set([user_name])
# verify other rosters unaffected
re = requests.get(api_v0('teams/%s/rosters/%s/users?in_rotation=0' % (team_name, roster_name_2)))
assert re.status_code == 200
users = re.json()
assert isinstance(users, list)
assert set(users) == set([user_name])
@prefix('test_v0_roster_order')
def test_api_v0_roster_order(team, user, roster):
team_name = team.create()
user_name = user.create()
user_name_2 = user.create()
user_name_3 = user.create()
roster_name = roster.create(team_name)
user.add_to_roster(user_name, team_name, roster_name)
user.add_to_roster(user_name_2, team_name, roster_name)
user.add_to_roster(user_name_3, team_name, roster_name)
re = requests.get(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)))
data = re.json()
users = {u['name']: u for u in data['users']}
assert users[user_name]['roster_priority'] == 0
assert users[user_name_2]['roster_priority'] == 1
assert users[user_name_3]['roster_priority'] == 2
re = requests.put(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)),
json={'roster_order': [user_name_3, user_name_2, user_name]})
assert re.status_code == 200
re = requests.get(api_v0('teams/%s/rosters/%s' % (team_name, roster_name)))
data = re.json()
users = {u['name']: u for u in data['users']}
assert users[user_name]['roster_priority'] == 2
assert users[user_name_2]['roster_priority'] == 1
assert users[user_name_3]['roster_priority'] == 0
# TODO: test invalid user or team
# TODO: test out of rotation
# TODO: test / in roster name
|
from __future__ import absolute_import
from django.db import IntegrityError, transaction
from rest_framework import serializers
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
from sentry.models import AuditLogEntryEvent, OrganizationAccessRequest, OrganizationMemberTeam
class AccessRequestPermission(OrganizationPermission):
scope_map = {
"GET": [
"org:read",
"org:write",
"org:admin",
"team:read",
"team:write",
"team:admin",
"member:read",
"member:write",
"member:admin",
],
"POST": [],
"PUT": [
"org:write",
"org:admin",
"team:write",
"team:admin",
"member:write",
"member:admin",
],
"DELETE": [],
}
class AccessRequestSerializer(serializers.Serializer):
isApproved = serializers.BooleanField()
class OrganizationAccessRequestDetailsEndpoint(OrganizationEndpoint):
permission_classes = [AccessRequestPermission]
# TODO(dcramer): this should go onto AccessRequestPermission
def _can_access(self, request, access_request):
if request.access.has_scope("org:admin"):
return True
if request.access.has_scope("org:write"):
return True
if request.access.has_scope("member:admin"):
return True
if request.access.has_scope("member:write"):
return True
if request.access.has_team_scope(access_request.team, "team:admin"):
return True
if request.access.has_team_scope(access_request.team, "team:write"):
return True
return False
def get(self, request, organization):
"""
Get list of requests to join org/team
"""
if request.access.has_scope("org:write"):
access_requests = list(
OrganizationAccessRequest.objects.filter(
team__organization=organization, member__user__is_active=True
).select_related("team", "member__user")
)
elif request.access.has_scope("team:write") and request.access.teams:
access_requests = list(
OrganizationAccessRequest.objects.filter(
member__user__is_active=True, team__in=request.access.teams
).select_related("team", "member__user")
)
else:
# Return empty response if user does not have access
return Response([])
return Response(serialize(access_requests, request.user))
def put(self, request, organization, request_id):
"""
Approve or deny a request
Approve or deny a request.
{method} {path}
"""
try:
access_request = OrganizationAccessRequest.objects.get(
id=request_id, team__organization=organization
)
except OrganizationAccessRequest.DoesNotExist:
raise ResourceDoesNotExist
if not self._can_access(request, access_request):
return Response(status=403)
serializer = AccessRequestSerializer(data=request.data, partial=True)
if not serializer.is_valid():
return Response(serializer.errors, status=400)
is_approved = serializer.validated_data.get("isApproved")
if is_approved is None:
return Response(status=400)
if is_approved:
try:
with transaction.atomic():
omt = OrganizationMemberTeam.objects.create(
organizationmember=access_request.member, team=access_request.team
)
except IntegrityError:
pass
else:
self.create_audit_entry(
request=request,
organization=organization,
target_object=omt.id,
target_user=access_request.member.user,
event=AuditLogEntryEvent.MEMBER_JOIN_TEAM,
data=omt.get_audit_log_data(),
)
access_request.send_approved_email()
access_request.delete()
return Response(status=204)
|
#!/usr/bin/env python
from os.path import exists
try:
# Use setup() from setuptools(/distribute) if available
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='zouk',
version='0.0.1',
author='Angel ILIEV, John Jacobsen',
author_email='[email protected]',
packages=['zouk'],
scripts=[],
url='https://github.com/AngelVI13/zouk',
license='MIT',
description='Simple continuous task execution tool',
long_description=open('README.md').read() if exists("README.md") else "",
entry_points=dict(console_scripts=['zouk=zouk.zouk:main']),
install_requires=[])
|
def three_split(seq):
target=sum(seq)/3
cur=None
res=[]
for index,i in enumerate(seq):
if cur==None:
cur=i
else:
cur+=i
if cur==target:
res.append((seq[:index+1], index+1))
res2=[]
for i,j in res:
cur=None
for k,l in enumerate(seq[j:]):
if cur==None:
cur=l
else:
cur+=l
if cur==target:
res2.append(j+k+1)
return sum(sum(seq[j:])==target for j in res2 if j!=len(seq)) |
# -*- coding: utf-8 -*-
from clue.misc import random_integer
class IllegalMove(BaseException):
pass
class MovementBoard:
HALLWAY = 1
def __init__(self, board, special_moves):
self.board = board
self.special_moves = special_moves
def tile_no(self, pos):
return self.board[pos[0]][pos[1]]
def room_no(self, pos):
tileno = self.tile_no(pos)
return tileno % 100
def move_allowed(self, oldpos, newpos):
if self.room_no(oldpos) == self.room_no(newpos):
return True
if self.is_door(oldpos) and self.is_door(newpos):
return True
return False
def is_door(self, pos):
return self.tile_no(pos) // 100 == 1
def is_special_pos(self, pos):
return pos in self.special_moves
def is_hallway(self, pos):
return self.room_no(pos) == self.HALLWAY
def move_to(self, oldpos, newpos):
if self.move_allowed(oldpos, newpos):
if self.is_special_pos(newpos):
return self.special_moves[newpos]
return newpos
else:
raise(IllegalMove())
def up(self, pos):
newpos = (pos[0]-1, pos[1])
return self.move_to(pos, newpos)
def down(self, pos):
newpos = (pos[0]+1, pos[1])
return self.move_to(pos, newpos)
def left(self, pos):
newpos = (pos[0], pos[1]-1)
return self.move_to(pos, newpos)
def right(self, pos):
newpos = (pos[0], pos[1]+1)
return self.move_to(pos, newpos)
def get_tile_pos(self, room):
res = []
for row in range(len(self.board)):
for col in range(len(self.board[0])):
if self.board[row][col] == room:
res.append((row, col))
return res
class Mob:
def __init__(self, startpos):
self.pos = startpos
class Gameboard:
def __init__(self, layout, characters):
self._layout = layout
self._mobs = {}
self._active_mob_name = u""
self._start_room = 0
self._number_of_moves_remaining = 0
for mob_name, start_pos in characters:
self._mobs[mob_name] = Mob(start_pos)
def get_mobs_pos(self):
return [mob.pos for mob in self._mobs.values()]
def get_room_no(self, mobname):
return self._layout.room_no(self.get_mob(mobname).pos)
def pos_occupied(self, pos):
return pos in self.get_mobs_pos()
def get_random_free_pos_in_room(self, room):
freetiles = [tile for tile in self._layout.get_tile_pos(room)
if tile not in self.get_mobs_pos()]
random_idx = random_integer(len(freetiles) - 1)
return freetiles[random_idx]
def enter_room(self, name, room):
self._mobs[name].pos = self.get_random_free_pos_in_room(room)
def get_mob(self, name):
return self._mobs[name]
def get_active_mob_name(self):
return self._active_mob_name
def set_active_mob(self, name, number_of_steps):
self._start_room = self._layout.room_no(self._mobs[name].pos)
self._active_mob_name = name
self._number_of_moves_remaining = number_of_steps
def execute_movement(self, pos, direction):
mv = None
if direction == u"up":
mv = self._layout.up
elif direction == u"down":
mv = self._layout.down
elif direction == u"left":
mv = self._layout.left
elif direction == u"right":
mv = self._layout.right
else:
raise ValueError()
return mv(pos)
def move_mob(self, name, direction):
if name != self._active_mob_name:
return False
mob = self._mobs[name]
oldpos = mob.pos
oldroom = self._layout.room_no(oldpos)
try:
newpos = self.execute_movement(oldpos, direction)
newroom = self._layout.room_no(newpos)
except IllegalMove:
return False
if self._layout.is_hallway(newpos) and self.pos_occupied(newpos):
return False
if self._layout.is_hallway(newpos):
self.decrease_movement_counter()
mob.pos = newpos
elif newroom == oldroom:
mob.pos = newpos
elif newroom == self._start_room:
return False
else:
self.enter_room(name, newroom)
self.movement_done()
return True
def movement_done(self):
self._active_mob_name = u""
self._number_of_moves_remaining = 0
self._start_room = 0
def decrease_movement_counter(self):
self._number_of_moves_remaining -= 1
if self._number_of_moves_remaining <= 0:
self.movement_done()
def is_in_hallway(self, mobname):
return self._layout.is_hallway(self.get_mob(mobname).pos)
def todict(self):
return dict([(name, self._mobs[name].pos)
for name in self._mobs.keys()]) |
import pandas
import pandas as pd
import yfinance as yf
yf
r = yf.Ticker('EQNR.OL')
r
r.info
r = yf.Ticker('EQNR.OL')
r = yf.Ticker?
r
r.info?
r.history(proxy='http://www-proxy.statoil.no:80')
r.history()
h = r.history()
len(h)
h
len(yf.Ticker('EQNR.OL').history())
len(yf.Ticker('MSFT').history())
len(yf.Ticker('MSFT').history(period='max'))
len(yf.Ticker('EQNR.OL').history(period='max'))
h = yf.Ticker('EQNR.OL').history(period='max')
h
h.plot()
h.plot(['Close'])
h.plot('Date',['Close'])
h
h.plot?
h.plot(x='Date',y=['Close'])
h
h.Date
h.keys
h.keys()
h.plot(y=['Close'])
c = h.Close
c = h.Close.values
c
max(c)
figure()
plot(c)
c = h.Low.values
l = h.Low.values
c = h.Close.values
plot(l)
figure()
plot(l)
h
h.max
h.max()
'atle'.split('r')
'atle'.split('r')[1]
pwd
ls
run -i tickers.py stocklist.osl
run -i tickers.py stocklist.osl
tickers
run -i tickers.py stocklist.osl
history
len(yf.Ticker('EQNR.OL').history(period='max'))
[len(yf.Ticker(ticker).history(period='max')) for ticker in tickers[:10]]
hs = [yf.Ticker(ticker).history(period='max') for ticker in tickers]
len(hs)
hs
h
h.columns
h.index
run -i tickers.py stocklist.osl
len(hs)
len(tickers)
run -i tickers.py stocklist.osl
h
c
c
len(c)
c = h.Close.values
type(c)
c = h.Close
type(c)
c.is_monotonic_increasing?
c.is_monotonic?
c.is_monotonic??
ygr = 20 # percent yield per year
yy = 20 # percent yield per year
period = 30 # days
growth = yy/100 * period/365
growth
h
h.index
t0 = h.index[-100]
t0
t0 = h.index[-300]
t0
h[t0]
h
h.at[t0]
h
h.iat[t0]
t0
h.at?
h.loc(t0)
h.loc?
h.loc[t0]
h.loc[t0+period]
t0+period
datetime.timedelta(period)
datetime.timedelta(period) + t0
h.loc[datetime.timedelta(period) + t0]
h.loc[datetime.timedelta(period) + t0].Close
h.loc[datetime.timedelta(0) + t0].Close
h.interpolate?
h.resample?
h
h.resample('1d').mean()
h.resample('1d').mean?
h.resample('1d').mean
h.resample('1d').ffill()
h.resample('1d')
h.resample('1d').mean()
hh = h.resample('1d').mean()
hh.interpolate()
h.resample('1d').mean().interpolate()
h = h.resample('1d').mean().interpolate()
len(h)
history -f t.py
c = h.loc[t0]
c = h.loc[t0].Close
c0 = h.loc[t0].Close
c0 = h.loc[t0].Close
period = datetime.timedelta(30) # days
c1 = h.loc[t0+period].Close
(c1-c0)/c0
growth
c1
c0
h
h[1:20]
h.loc[t0:t0+period]
h.loc[t0:t0+period].Close
h.loc[t0:t0+period].Close.values
plot(h.loc[t0:t0+period].Close.values)
figure()
figure()
plot(h.loc[t0:t0+period].Close.values)
yy = c0 + (c1-c0)/period*arange(period)
period.days
yy = c0 + (c1-c0)/period*arange(period.days)
yy = c0 + (c1-c0)/period.days*arange(period.days)
yy
plot(yy)
h.loc[t0:t0+period].Close.values
close('all')
plot(h.loc[t0:t0+period].Close.values)
plot(yy)
len(h.loc[t0:t0+period])
len(yy)
yy = c0 + (c1-c0)/period.days*arange(period.days)
period.days
len(h.loc[t0:t0+period])
len(h.loc[t0:t0+period-1])
oneday = datetime.timedelta(1)
len(h.loc[t0:t0+period-oneday])
clf()
plot(h.loc[t0:t0+period-oneday].Close.values)
plot(yy)
yy
yy = c0 + (c1-c0)/period.days*arange(period.days)
yy[0]
c0
c1
yy[-1]
c1 = h.loc[t0+period].Close
h.loc[t0+period].Close
c1
h.loc[t0:t0+period-oneday].Close.values
h.loc[t0+period].Close
h.loc[t0:t0+period].Close.values
c1 = h.loc[t0+period-oneday].Close
c1
yy = c0 + (c1-c0)/period.days*arange(period.days)
yy[-1]
yy
c0,c1
yy
len(yy)
yy = c0 + (c1-c0)/(period.days-1)*arange(period.days)
yy
clf()
plot(h.loc[t0:t0+period-oneday].Close.values)
plot(yy)
dc = c1 - c0
std?
var?
std?
history
c = h.loc[t0:t0+period-oneday].Close.values
cc = c0 + (c1-c0)/(period.days-1)*arange(period.days)
clf()
plot(c)
plot(cc)
std(c -cc)
c1 - c0
history -f t.py
|
import math
def uniform(size, tensor):
stdv = 1.0 / math.sqrt(size)
if tensor is not None:
tensor.data.uniform_(-stdv, stdv)
def glorot(tensor):
stdv = math.sqrt(6.0 / (tensor.size(0) + tensor.size(1)))
if tensor is not None:
tensor.data.uniform_(-stdv, stdv)
def zeros(tensor):
if tensor is not None:
tensor.data.zero_()
|
import unittest
from multiprocessing import Process
import requests
import json
import time
from ..exchange import Exchange
from ..config import DefaultConfig
from ..auth import Auth
from ..error import TimeoutError,ResponseError
from grabbag.list import first_not_none
from grabbag.dict import merge
TEST_METHODS = ('GET','POST','PUT','DELETE')
TEST_DATAS = ('','{"json":true}','<html></html>')
TEST_PARAMS = ({'a':'1'},{'a':'z'},{},{'c':'3'})
TEST_HEADERS = ({'a':'1'},{'a':'z'},{},{'c':'3'})
class TestAuth(Auth): pass
class TestExchange(Exchange):
protocol = 'http'
domain = 'localhost:8087'
base_path = '/blah/v1'
sub_path = 'mirror/create'
def process_response(self, response): return json.loads(response.content)
class TestExchange2(Exchange): pass
def webserver():
from bottle import route, run, request, error
@route('/blah/v1/mirror/:extra', method='GET')
def get(extra): return mirror(extra)
@route('/blah/v1/mirror/:extra', method='POST')
def post(extra): return mirror(extra)
@route('/blah/v1/mirror/:extra', method='PUT')
def putextra(extra): return mirror(extra)
@route('/blah/v1/mirror/:extra', method='DELETE')
def delete(extra): return mirror(extra)
@error(404)
def bad(code): return 'bad'
@route('/sleep/:ms', method='GET')
def sleep(ms):
time.sleep(int(ms)/1000.0)
return json.dumps( {'sleep': ms} )
def mirror(extra):
return json.dumps( dict(
method = request.method,
protocol = request.urlparts[0],
domain = request.urlparts[1],
path = request.urlparts[2],
body = request.body.getvalue(),
params = dict((k,request.query.getall(k)) for k in request.query.keys()),
headers = dict((k,request.headers.get(k)) for k in request.headers.keys())))
run(host='localhost', port=8087)
class ExchangeTest(unittest.TestCase):
@classmethod
def setUpClass(kls):
kls.webserver_process = Process(target=webserver)
kls.webserver_process.start()
working = False
while not working:
time.sleep(0.02)
try:
working = requests.get('http://localhost:8087/blah/v1/mirror/whatever').status_code == 200
except: pass
@classmethod
def tearDownClass(kls):
kls.webserver_process.terminate()
kls.webserver_process.join()
def setUp(self): pass
def tearDown(self): pass
def _test_expected_attr(self, attr, possibles, default=None, add_none=True, final_possibles=None):
final_possibles = list(final_possibles or possibles)
if add_none:
possibles = [None] + list(possibles)
final_possibles = [None] + list(final_possibles)
for k,x in zip(possibles,final_possibles):
for l in (True, False):
for m,z in zip(possibles,final_possibles):
for n,w in zip(possibles, final_possibles):
class TestExchangeX(TestExchange2): pass
if l:
if k is not None: setattr(TestExchangeX, attr, k)
else:
if k is not None: setattr(TestExchangeX, attr, lambda self: k)
auth = Auth()
if n is not None: setattr(auth,attr,n)
self.assertEquals(default if n is None else n, getattr(auth,attr,None))
ex = TestExchangeX(auth, **{attr:m})
self.assertEquals(first_not_none( (z,x,w), default), getattr(ex, attr))
def _test_additive_attr(self, attr, possibles, add_none=True):
if add_none:
possibles = [None] + list(possibles)
for k in possibles:
for l in (True,False):
for m in possibles:
for n in possibles:
class TestExchangeX(TestExchange): pass
auth = Auth()
setattr(auth,attr,n)
if l:
if k is not None: setattr(TestExchangeX, attr, k)
else:
if k is not None: setattr(TestExchangeX, attr, lambda self: k)
ex = TestExchangeX(auth, **{attr:m})
self.assertEquals( merge({}, n or {}, k or {}, m or {}), getattr(ex, attr))
def test_calcs(self):
self._test_expected_attr('method', TEST_METHODS, DefaultConfig.method)
self._test_expected_attr('protocol', ('http','https'), DefaultConfig.protocol)
self._test_expected_attr('domain', ('app.localhost','app.hubspotqa.com','app.hubspot.com'), add_none=False)
self._test_expected_attr('base_path', ('/v1/whatever/','/base/path','') , None, final_possibles=('v1/whatever','base/path',None))
self._test_expected_attr('sub_path', ('/create','','show/'), None, final_possibles=('create',None,'show'))
self._test_expected_attr('data', TEST_DATAS)
self._test_expected_attr('timeout', (10,20,30), DefaultConfig.timeout)
self._test_expected_attr('max_retries', (0,1,2), DefaultConfig.max_retries)
###TODO: make it possible to use params as they can be used (i.e. multiple values per key -- i.e. MultiDict)
self._test_additive_attr('params', TEST_PARAMS)
self._test_additive_attr('headers', TEST_HEADERS)
def test_timeouts(self):
self.assertTrue(TestExchange(TestAuth(), timeout=0.5).result)
with self.assertRaises(TimeoutError):
self.assertTrue(TestExchange(TestAuth(), timeout=0.00001).result)
def test_methods(self):
for method in TEST_METHODS:
self.assertEquals(method, TestExchange(TestAuth(), method=method).result['method'])
def test_datas(self):
for data in TEST_DATAS:
self.assertEquals(data, TestExchange(TestAuth(), data=data).result['body'])
def test_sub_paths(self):
for sub_path in ('create','show','list'):
self.assertEquals("/blah/v1/mirror/%s"%sub_path, TestExchange(TestAuth(), base_path='blah/v1/mirror', sub_path=sub_path).result['path'])
def test_params(self):
for params in TEST_PARAMS:
self.assertEquals(dict((k,[v]) for k,v in params.iteritems()), TestExchange(TestAuth(), params=params).result['params'])
def test_headers(self):
for headers in TEST_HEADERS:
self.assertEquals(dict((k.upper(),v) for k,v in headers.iteritems()), dict((k.upper(),v) for k,v in TestExchange(TestAuth(), headers=headers).result['headers'].iteritems() if k.lower() in headers.keys()))
def test_max_retries(self):
for max_retries in (0,1,2):
try:
self.assertTrue(TestExchange(TestAuth(), timeout=0.00001, max_retries=max_retries).result)
except TimeoutError as err:
self.assertEquals(max_retries+1, len(err.exchange.failures))
for f in err.exchange.failures:
self.assertTrue(isinstance(f, TimeoutError))
continue
except:
self.fail("should not get to here")
self.fail("should not get to here")
def test_bulk_exchange(self):
count = 5
for async in (True,False):
exs = [TestExchange(TestAuth(), params={'i':str(i), 'async':str(async)}) for i in xrange(count)]
for ex,i in zip(Exchange.async_exchange(exs), xrange(count)):
self.assertEquals([str(i)],ex.result['params']['i'])
self.assertEquals([str(async)],ex.result['params']['async'])
def test_different_auth(self):
class TestAuth1(Auth):
def params(self): return {'key1':'value1'}
class TestAuth2(Auth):
def params(self): return {'key2':'value2'}
class TestExchange1(Exchange): pass
class TestExchange2(Exchange): pass
self.assertEquals({'key1':'value1'},TestExchange1(TestAuth1()).params)
self.assertEquals({'key2':'value2'},TestExchange1(TestAuth2()).params)
def test_bad_url(self):
class TestExchange(Exchange):
protocol = 'http'
domain = 'localhost:8087'
base_path = 'bad'
ok404 = True
def process_error(self, error, response):
if response is not None:
if response.status_code==404:
return self.ok404
return False
def process_response(self, response): return response.text
self.assertEquals('bad',TestExchange(TestAuth()).result)
TestExchange.ok404=False
with self.assertRaises(ResponseError):
self.assertTrue(TestExchange(TestAuth()).result)
|
import unittest
from interpreter import instructions
from interpreter.registers import Registers
class TestAnd(unittest.TestCase):
def test_and(self):
'''
Test the and instruction setting destination register to the
bitwise and of the two source register
'''
reg = Registers()
reg.set_register("$t0", 0x5d9d128d)
reg.set_register("$t1", 0x30be0a88)
instructions._and("$t2", "$t0", "$t1", reg)
ret = reg.get_register("$t2")
self.assertEqual(0x109c0288, ret) |
import requests
from rest_framework import filters, viewsets, status
from rest_framework.decorators import action
from rest_framework.response import Response
from apps.reviews.models import Review, REVIEW_MARK_NEGATIVE, REVIEW_MARK_POSITIVE, REVIEW_MARK_NEUTRAL
from apps.reviews.serializers import ReviewSerializer
class ReviewViewSet(viewsets.ModelViewSet):
search_fields = ['title', 'author_name']
filter_backends = (filters.SearchFilter, filters.OrderingFilter)
queryset = Review.objects.all()
serializer_class = ReviewSerializer
@action(methods=['POST'], detail=False)
def add(self, request):
author_name = request.data.get('author_name', None)
title = request.data.get('title', None)
review_text = request.data.get('text', None)
review_mark = request.data.get('mark', None)
review = Review.objects.create(
title=title,
author_name=author_name,
review_text=review_text,
review_mark=review_mark
)
return Response(ReviewSerializer(review).data, status=status.HTTP_201_CREATED)
@action(methods=['GET'], detail=False)
def stats(self, request, format=None):
positive_review_count = Review.objects.filter(review_mark=REVIEW_MARK_POSITIVE).count()
negative_review_count = Review.objects.filter(review_mark=REVIEW_MARK_NEGATIVE).count()
neutral_review_count = Review.objects.filter(review_mark=REVIEW_MARK_NEUTRAL).count()
content = [
{
'mark': REVIEW_MARK_POSITIVE,
'count': positive_review_count
},
{
'mark': REVIEW_MARK_NEGATIVE,
'count': negative_review_count
},
{
'mark': REVIEW_MARK_NEUTRAL,
'count': neutral_review_count
}
]
return Response(content) |
import sqlite3
from discord.ext import commands
class BotEvents(commands.Cog, name = "Bot events"):
def __init__(self, bot):
self.bot = bot
self.bot.loop.create_task(self.update_guilds())
async def update_guilds(self):
await self.bot.wait_until_ready()
for guild in self.bot.guilds:
await self.bot.dbc.execute(
"INSERT OR IGNORE INTO guilds VALUES (?, ?, ?, ?, ?, ?, ?);",
(guild.id, guild.name, 0, 0, 1, None, None)
)
await self.bot.dbc.commit()
@commands.Cog.listener()
async def on_guild_join(self, guild):
await self.bot.dbc.execute(
"INSERT INTO guilds VALUES (?, ?, ?, ?, ?, ?, ?);",
(guild.id, guild.name, 0, 0, 1, None, None)
)
await self.bot.dbc.commit()
await self.bot.refresh_cache_guild(guild.id)
@commands.Cog.listener()
async def on_guild_remove(self, guild):
await self.bot.dbc.execute("DELETE FROM guilds WHERE discord_id = ?;",(guild.id,))
await self.bot.dbc.commit()
del self.bot.guild_cache[guild.id]
@commands.Cog.listener()
async def on_member_join(self, member):
await self.post_welcome_message(member, False)
@commands.Cog.listener()
async def on_member_remove(self, member):
await self.post_welcome_message(member, True)
async def post_welcome_message(self, member, leave):
guild_db = await self.bot.dbc.execute_fetchone("SELECT * FROM guilds WHERE discord_id == ?;", (member.guild.id,))
if not guild_db or not guild_db[4]:
#if guild not in db (shouldn't happen) or if disabled
return
if leave: message, emoji = guild_db[6], ":outbox_tray:"
else: message, emoji = guild_db[5], ":inbox_tray:"
if not message:
message = f"We're sorry to see you leaving, **MEMBER_NAME**." if leave else f"Welcome to the **GUILD_NAME** server, MENTION."
formatted = self.welcome_fmt(member, message)
await member.guild.system_channel.send(f"{emoji} | {formatted}")
def welcome_fmt(self, member, subst_text):
if not subst_text: return None
special_vars = {
"GUILD_NAME": member.guild.name,
"MEMBER_NAME": f"{member.name}#{member.discriminator}",
"MENTION": member.mention
}
for name, value in special_vars.items():
subst_text = subst_text.replace(name,value)
return subst_text
def setup(bot):
bot.add_cog(BotEvents(bot)) |
DATA_PATHA = "../data"
cascades = DATA_PATHA+"/dataset_weibo.txt"
cascade_train = DATA_PATHA+"/cascade_train.txt"
cascade_val = DATA_PATHA+"/cascade_val.txt"
cascade_test = DATA_PATHA+"/cascade_test.txt"
shortestpath_train = DATA_PATHA+"/shortestpath_train.txt"
shortestpath_val = DATA_PATHA+"/shortestpath_val.txt"
shortestpath_test = DATA_PATHA+"/shortestpath_test.txt"
observation = 3*60*60-1
pre_times = [24 * 3600] |
import click
import numpy as np
from problem19 import LeaderboardCyclopeptideSequencing
def spectrum_mass(spectrum, m):
spectrum = sorted(spectrum)
spectral_convolution = [
spectrum[i] - spectrum[j]
for i in range(len(spectrum))
for j in range(i)]
spectral_convolution.extend(spectrum)
spectral_convolution = list(filter(lambda x: 57 <= x <= 200, spectral_convolution))
spectral_convolution_counted = list(map(
lambda x: (x, spectral_convolution.count(x)),
set(spectral_convolution)))
spectral_convolution_counted = sorted(
spectral_convolution_counted, key=lambda x: x[1], reverse=True)
threshold = spectral_convolution_counted[m][1] \
if m < len(spectral_convolution_counted) \
else -np.inf
spectral_mass = [s[0] for s in spectral_convolution_counted if s[1] >= threshold]
return spectral_mass
def ConvolutionCyclopeptideSequencing(m, n, spectrum):
new_mass = spectrum_mass(spectrum, m)
result = LeaderboardCyclopeptideSequencing(spectrum, n, new_mass)
return result
@click.command()
@click.option(
"--fin",
type=str,
default="problem20_input.txt")
def main(fin):
with open(fin) as fin:
data = fin.readlines()
m = tuple(map(int, data[0].split()))[0]
n = tuple(map(int, data[1].split()))[0]
spectrum = list(map(int, data[2].split()))
result = ConvolutionCyclopeptideSequencing(m, n, spectrum)
print("-".join(str(i) for i in result[0]))
if __name__ == '__main__':
main()
|
# _ _ _
# / \ _ __ __| (_)_ __ ___ _ __ _ _
# / _ \ | '_ \ / _` | | '_ \ / _ \| '_ \| | | |
# / ___ \| | | | (_| | | | | | (_) | |_) | |_| |
# /_/ \_\_| |_|\__,_|_|_| |_|\___/| .__/ \__, |
# |_| |___/
# by Jakob Groß
import abc
class andino_hardware_interface(abc.ABC):
def __init__(self, broad_cast_function: callable(str)):
self.broad_cast_function: callable(str) = broad_cast_function
@abc.abstractmethod
def start(self):
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def stop(self):
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def reset(self) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def info(self) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def hardware(self, mode: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_polling(self, polling_time: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_skip(self, skip_count: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_edge_detection(self, value: bool) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_send_time(self, send_time: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_send_broadcast_timer(self, value: bool) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_debounce(self, debouncing: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_power(self, value: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_send_relays_status(self, value: bool) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_relay(self, relay_num: int, value: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def pulse_relay(self, relay_num: int, value: int) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def set_broadcast_on_change(self, value: bool) -> str:
raise NotImplementedError("meta class method not overwritten")
@abc.abstractmethod
def get_counters(self, mode: int) -> str:
raise NotImplementedError("meta class method not overwritten")
|
# ****************************************************************
# Author: Sheldon Dorgelo
# Date (last edited): 02/11/2020
# Purpose: creates a plot with the detections highlighted for
# easier viewing
# ****************************************************************
import DataObject
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
import numpy as np
def publication(dataObject):
# store annotated figure into data object
dataObject.setAnnotatedImage(createAnnotatedImage(dataObject))
# creates an image with annotations to highlight detections
def createAnnotatedImage(dataObject):
diffImg = dataObject.getDiffImg() # get the data from dataObject
imgStd = dataObject.getImgStd()
more = dataObject.getCandGt()
less = dataObject.getCandLt()
newFigure = plt.figure(figsize=(12,8))
plt.imshow(diffImg,clim=(-imgStd, imgStd)) # set limit on displayed colours
for i in range(len(more[0])): #from 0 to 1-number of bright detections
text = (more[1][i],more[0][i]) # coordinates of detection in (x, y) form
plt.annotate(text,(more[1][i],more[0][i]),color='black',fontsize='large',fontweight='bold',ha='center')
for i in range(len(less[0])): #from 0 to 1-number of dark detections
text = (less[1][i],less[0][i])
plt.annotate(text,(less[1][i],less[0][i]),color='black',fontsize='large',fontweight='bold',ha='center')
plt.plot(less[1],less[0],'bo') # plots blue dots for each dark detection
plt.plot(more[1],more[0],'ro') # plots red dots for each bright detection
plt.xlabel("Pixel number")
plt.ylabel("Pixel number")
plt.colorbar()
return newFigure |
import numpy as np
import skimage.io as io
import sys
from PIL import Image
import glob
import ntpath
import os
from nltk.tokenize import RegexpTokenizer
def read_hmap(hmap_path):
hmap = Image.open(hmap_path)
hmap = np.asarray(hmap)
hmap = np.squeeze(hmap[:,:,0])
return hmap
x_index, y_index, w_index, h_index, l_index, c_index = 0, 1, 2, 3, 4, 5
def path_leaf(path):
return ntpath.basename(path)
def calc_sort_size(boxes_arr):
# boxes_arr (type = numpy array): boxes_num x 6 (x, y, w, h, l, crowd_l)
# calculate the product of width and height
sizes = np.multiply(boxes_arr[:, w_index], boxes_arr[:, h_index])
# sort sizes in the ascending order
sorted_indices = np.argsort(sizes)[::-1].tolist()
sorted_boxes_arr = boxes_arr[sorted_indices,:]
return sorted_boxes_arr
def reorg_boxes_arr(boxes_arr):
# new_boxes_arr (type = numpy array): boxes_num x 6 (centern_x, center_y, w, h/w, l, crowd_l)
new_boxes_arr = np.zeros(boxes_arr.shape)
# centern_x
new_boxes_arr[:,0] = boxes_arr[:,x_index]+boxes_arr[:,w_index]/2.0
# centern_y
new_boxes_arr[:,1] = boxes_arr[:,y_index]+boxes_arr[:,h_index]/2.0
# h
new_boxes_arr[:,3] = np.divide(boxes_arr[:,h_index], boxes_arr[:,w_index])
# w, l
new_boxes_arr[:,[2,4]] = boxes_arr[:,[w_index,l_index]]
return new_boxes_arr
# COCO_train2014_
dataDir = '../data/coco'
dataType = 'COCO_train2014_'
dataType_out = 'train2014'
outputDirname = 'bbox_label'
textDir = '%s/text/%s*.txt'%(dataDir, dataType)
mainBoxDir = '%s/masks'%(dataDir)
text_files = glob.glob(textDir)
text_nms = [path_leaf(path) for path in text_files]
text_nms_wo_ext = [name.split(".")[0] for name in text_nms]
max_bbox_num = 10
boxes_dim = 6
std_img_size = 256
use_crowd = False
display_step = 500
xs_total, ys_total, ws_total, hs_total = [], [], [], []
fout_filename = open('%s/%s/filenames_%s.txt'%(dataDir, outputDirname, dataType_out), 'w')
fout_bbox_label = open('%s/%s/input_%s.txt'%(dataDir, outputDirname, dataType_out), 'w')
fout_mean_std = open('%s/%s/mean_std_%s.txt'%(dataDir, outputDirname, dataType_out), 'w')
for img_ind in xrange(0,len(text_nms_wo_ext)):
if img_ind % display_step == 0:
print('%07d / %07d'%(img_ind, len(text_nms_wo_ext)))
with open(text_files[img_ind], "r") as f:
caps = f.readlines()
hmapDir = '%s/%s/*.jpg'%(mainBoxDir, text_nms_wo_ext[img_ind])
hmap_files = glob.glob(hmapDir)
# check if hmap_files exist
if len(hmap_files) == 0:
continue
boxFile = '%s/%s/boxes.txt'%(mainBoxDir, text_nms_wo_ext[img_ind])
with open(boxFile, "r") as f:
boxes = f.readlines()
boxes_arr = np.zeros((len(boxes), boxes_dim), dtype=float)
noncrowd_mask = []
for box_ind in xrange(len(boxes)):
box = boxes[box_ind].split(',')
box = [int(float(num)) for num in box]
boxes_arr[box_ind,:] = np.array(box)
if box[c_index] == 0:
noncrowd_mask.append(box_ind)
if not use_crowd and len(noncrowd_mask) > 0:
boxes_arr = boxes_arr[noncrowd_mask, :]
elif not use_crowd and len(noncrowd_mask) == 0:
continue
assert np.sum(boxes_arr[:,c_index]) == 0
hmap = read_hmap(hmap_files[0])
height, width = hmap.shape[0], hmap.shape[1]
height_scale = std_img_size/float(height)
width_scale = std_img_size/float(width)
boxes_arr[:, x_index] = boxes_arr[:, x_index]*width_scale
boxes_arr[:, x_index] = np.clip(boxes_arr[:, x_index], 1, std_img_size)
boxes_arr[:, y_index] = boxes_arr[:, y_index]*height_scale
boxes_arr[:, y_index] = np.clip(boxes_arr[:, y_index], 1, std_img_size)
boxes_arr[:, w_index] = boxes_arr[:, w_index]*width_scale
boxes_arr[:, w_index] = np.clip(boxes_arr[:, w_index], 1, std_img_size)
boxes_arr[:, h_index] = boxes_arr[:, h_index]*height_scale
boxes_arr[:, h_index] = np.clip(boxes_arr[:, h_index], 1, std_img_size)
# calculate and sort the distance between each bbox and image's bottom center point
boxes_arr = calc_sort_size(boxes_arr)
tmp_max_bbox_num = min(max_bbox_num, boxes_arr.shape[0])
boxes_arr = boxes_arr[:max_bbox_num,:]
boxes_arr = reorg_boxes_arr(boxes_arr)
xs_total = np.concatenate((xs_total, boxes_arr[:,x_index]))
ys_total = np.concatenate((ys_total, boxes_arr[:,y_index]))
ws_total = np.concatenate((ws_total, boxes_arr[:,w_index]))
hs_total = np.concatenate((hs_total, boxes_arr[:,h_index]))
xs = boxes_arr[:, x_index].tolist()
xs = ['%.2f'%(x) for x in xs]
ys = boxes_arr[:, y_index].tolist()
ys = ['%.2f'%(y) for y in ys]
ws = boxes_arr[:, w_index].tolist()
ws = ['%.2f'%(w) for w in ws]
hs = boxes_arr[:, h_index].tolist()
hs = ['%.2f'%(h) for h in hs]
ls = boxes_arr[:, l_index].tolist()
ls = [str(int(l)) for l in ls]
line = "\t".join([" ".join(xs), " ".join(ys), " ".join(ws), " ".join(hs), " ".join(ls)])
real_caps_num = 0
for cap_ind in xrange(len(caps)):
cap = caps[cap_ind].decode('utf8').split(',')[0].split('\n')[0].replace("\ufffd\ufffd", " ")
tokenizer = RegexpTokenizer(r'\w+')
tokens = tokenizer.tokenize(cap.lower())
tokens_new = []
for t in tokens:
t = t.encode('ascii', 'ignore').decode('ascii')
if len(t) > 0:
tokens_new.append(t)
if len(tokens_new) == 0:
continue
real_caps_num += 1
line_new = "\t".join([" ".join(tokens_new), line])
fout_bbox_label.write(line_new+'\n')
fout_filename.write(text_nms_wo_ext[img_ind]+'\n')
fout_filename.close()
fout_bbox_label.close()
xs_mean, xs_std = np.mean(xs_total), np.std(xs_total)
ys_mean, ys_std = np.mean(ys_total), np.std(ys_total)
ws_mean, ws_std = np.mean(ws_total), np.std(ws_total)
hs_mean, hs_std = np.mean(hs_total), np.std(hs_total)
fout_mean_std.write('%f %f\n'%(xs_mean, xs_std))
fout_mean_std.write('%f %f\n'%(ys_mean, ys_std))
fout_mean_std.write('%f %f\n'%(ws_mean, ws_std))
fout_mean_std.write('%f %f\n'%(hs_mean, hs_std))
fout_mean_std.close()
|
"""
This module provides definitions and functionality related to foreign-exchange conversions.
"""
__all__ = ["FXRate", "FXRateLookupError", "FXRateService"]
from abc import ABCMeta, abstractmethod
from decimal import Decimal
from typing import Iterable, NamedTuple, Optional, Tuple
from .commons.numbers import ONE, ZERO
from .commons.zeitgeist import Date
from .currencies import Currency
class FXRateLookupError(LookupError):
"""
Provides an exception indicating that the foreign exchange rate is not found.
"""
def __init__(self, ccy1: Currency, ccy2: Currency, asof: Date) -> None:
"""
Initializes the foreign exchange rate lookup error.
"""
## Keep the slots:
self.ccy1 = ccy1
self.ccy2 = ccy2
self.asof = asof
## Set the message:
super().__init__(f"Foreign exchange rate for {ccy1}/{ccy2} not found as of {asof}")
class FXRate(NamedTuple):
"""
Defines a foreign exchange (FX) rate model.
Note that the constructor of this class is not safe: It does not check input. :method:`FXRate.of`, on the
other hand provides a safer way of creating :class:`FXRate` instances.
**Implementation Note:**
I wanted to use an immutable, compact object model with fast creation and property access. Options were
tweaked plain-vanilla Python class, NamedTuple and dataclasses.
NamedTuple has slightly slower property access, whereby immutable dataclasses are slow for creation.
Furthermore, as of the implementation of this class, mypy does not have proper dataclass support. Therefore,
I am sticking to NamedTuple implementation.
Last but not least, as objects are essentially tuples, indexed access to properties is possible and slightly
faster.
>>> import datetime
>>> from decimal import Decimal
>>> from pypara.currencies import Currencies
>>> rate = FXRate(Currencies["EUR"], Currencies["USD"], datetime.date.today(), Decimal("2"))
>>> ccy1, ccy2, date, value = rate
>>> ccy1 == Currencies["EUR"]
True
>>> ccy2 == Currencies["USD"]
True
>>> date == datetime.date.today()
True
>>> value == Decimal("2")
True
"""
#: Defines the first currency of the FX rate.
ccy1: Currency
#: Defines the second currency of the FX rate.
ccy2: Currency
#: Defines the date the FX rate is effective as of.
date: Date
#: Defines the value of the FX rate.
value: Decimal
def __invert__(self) -> "FXRate":
"""
Returns the inverted foreign exchange rate.
>>> import datetime
>>> from decimal import Decimal
>>> from pypara.currencies import Currencies
>>> nrate = FXRate(Currencies["EUR"], Currencies["USD"], datetime.date.today(), Decimal("2"))
>>> rrate = FXRate(Currencies["USD"], Currencies["EUR"], datetime.date.today(), Decimal("0.5"))
>>> ~nrate == rrate
True
"""
return FXRate(self[1], self[0], self[2], self[3] ** -1)
@classmethod
def of(cls, ccy1: Currency, ccy2: Currency, date: Date, value: Decimal) -> "FXRate":
"""
Creates and returns an FX rate instance by validating arguments.
>>> import datetime
>>> from decimal import Decimal
>>> from pypara.currencies import Currencies
>>> urate = FXRate(Currencies["EUR"], Currencies["USD"], datetime.date.today(), Decimal("2"))
>>> srate = FXRate.of(Currencies["EUR"], Currencies["USD"], datetime.date.today(), Decimal("2"))
>>> urate == srate
True
"""
## All argument must be of the respective specified type:
if not isinstance(ccy1, Currency):
raise ValueError("CCY/1 must be of type `Currency`.")
if not isinstance(ccy2, Currency):
raise ValueError("CCY/2 must be of type `Currency`.")
if not isinstance(ccy1, Currency):
raise ValueError("FX rate value must be of type `Decimal`.")
if not isinstance(ccy1, Currency):
raise ValueError("FX rate date must be of type `date`.")
## Check the value:
if value <= ZERO:
raise ValueError("FX rate value can not be equal to or less than `zero`.")
## Check consistency:
if ccy1 == ccy2 and value != ONE:
raise ValueError("FX rate to the same currency must be `one`.")
## Create and return the FX rate instance:
return cls(ccy1, ccy2, date, value)
class FXRateService(metaclass=ABCMeta):
"""
Provides an abstract class for serving foreign exchange rates.
"""
#: Defines the default foreign exchange rate service for the runtime.
default: Optional["FXRateService"] = None # noqa: E704
#: Defines an FX rate query tuple.
TQuery = Tuple[Currency, Currency, Date]
@abstractmethod
def query(self, ccy1: Currency, ccy2: Currency, asof: Date, strict: bool = False) -> Optional[FXRate]:
"""
Returns the foreign exchange rate of a given currency pair as of a given date.
:param ccy1: The first currency of foreign exchange rate.
:param ccy2: The second currency of foreign exchange rate.
:param asof: Temporal dimension the foreign exchange rate is effective as of.
:param strict: Indicates if we should raise a lookup error if that the foreign exchange rate can not be found.
:return: The foreign exhange rate as a :class:`Decimal` instance or None.
"""
pass
@abstractmethod
def queries(self, queries: Iterable[TQuery], strict: bool = False) -> Iterable[Optional[FXRate]]:
"""
Returns foreign exchange rates for a given collection of currency pairs and dates.
:param queries: An iterable of :class:`Currency`, :class:`Currency` and :class:`Temporal` tuples.
:param strict: Indicates if we should raise a lookup error if that the foreign exchange rate can not be found.
:return: An iterable of rates.
"""
pass
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 8 10:51:21 2017
@author: jlashner
"""
from pylab import *
import tmm
import numpy as np
import matplotlib.pyplot as plt
import scipy.integrate as intg
#Constants
c =2.99792458 * 10**8 #speed of light [m/s]
#Units
GHz = 10 ** 9 # GHz -> Hz
def ARCoat(n, lam0, layers = 2):
"""Gets Index of refraction and thickness for AR coating centered around lam0"""
ni= .00008
if layers == 2:
nAR = [real(n)**(1./3) + ni*1j, real(n)**(2./3) + ni * 1j]
elif layers == 1:
nAR = [real(n)**(1./2)]
dAR = [lam0 / (4 * real(n)) for n in nAR]
return nAR, dAR
def getCoeffs(n, d, freq, theta, pol):
"""Returns T, R, A coefficients for an optical stack of polarization pol ('s' or 'p')"""
assert pol in ['s', 'p']
lam_vac= c / freq * 1000 #vacuum wavelength in mm
s = tmm.coh_tmm(pol, n, d, theta, lam_vac)
return [s['T'], s['R'], 1 - s['T'] - s['R']]
def getDiffCoeffs(name, band_center, fbw, theta):
"""Returns the differential transmission and differential coefficient of an optical element"""
#Determines band and calculates corresponding optimized wavelength
if band_center < 50 * GHz: #LF Band
nu0 = 33 * GHz
lam0 = 3e3 / nu0 * 1000 #[mm]]
lam0 = 9.09 #[mm]
layers = 2
elif band_center < 200 * GHz: #MF Band
nu0 = 120 * GHz
lam0 = 2.5 #[mm]
layers = 2
elif band_center < 300 * GHz: #UHF Band
nu0 = 267 * GHz
lam0 = 1.123 #[mm]
layers = 1
else:
print("Frequency not in any band.")
raise ValueError
flo = band_center * (1 - fbw/ 2.)
fhi = band_center * (1 + fbw/ 2.)
if name == "Window":
n0 = 1.5 + .0001j
d0 = 10.0
elif name == "AluminaF":
n0 = 3.1 + .00008j
d0 = 3.0
else:
return (0,0)
nAR, dAR = ARCoat(n0, lam0, layers = layers)
n_stack = [1.0] + nAR + [n0] + nAR[::-1] + [1.0]
d_stack = [Inf] + dAR + [d0] + dAR[::-1] + [Inf]
#Creates Frequency Array and gets T,R, and A coefficients accross bandwidth
freqs = np.linspace(flo, fhi, 300)
s_coeffs = [getCoeffs(n_stack, d_stack, f, theta, 's') for f in freqs]
p_coeffs = [getCoeffs(n_stack, d_stack, f, theta, 'p') for f in freqs]
Ts, Rs, As = np.transpose(s_coeffs)
Tp, Rp, Ap = np.transpose(p_coeffs)
#Band-averages differential transmission, reflection and absorption
diffTrans = abs(intg.simps((Ts - Tp)/2, freqs)/(band_center * fbw))
diffRefl = abs(intg.simps((Rs - Rp)/2, freqs)/(band_center * fbw))
diffAbs = abs(intg.simps((As - Ap)/2, freqs)/(band_center * fbw))
# print("Absorption: ", abs(intg.simps((As + Ap)/2, freqs)/(band_center * fbw)))
return (diffTrans, diffRefl, diffAbs)
if __name__ == "__main__":
bc = np.array([27., 39., 93.0,145., 225., 278.]) * GHz # Band center [Hz]
fbw = np.array([.222, .462, .376, .276, .267, .278]) #Fractional bandwidth
theta = np.deg2rad(20.0)
for index in [2,3]:
T_filter, _, _ = getDiffCoeffs("AluminaF", bc[index], fbw[index], theta)
T_window, _, _ =getDiffCoeffs("Window", bc[index], fbw[index], theta)
print(f'Window ({bc[index]/GHz}): {T_window}')
print(f'Filter ({bc[index]/GHz}): {T_filter}')
|
#!/usr/bin/env python
import typer
from typing import Dict, List, Tuple
from lib import aoc
SAMPLE = ["""\
mxmxvkd kfcds sqjhc nhms (contains dairy, fish)
trh fvjkl sbzzf mxmxvkd (contains dairy)
sqjhc fvjkl (contains soy)
sqjhc mxmxvkd sbzzf (contains fish)
"""]
class Day21(aoc.Challenge):
TIMER_ITERATIONS = (20000, 200000)
TESTS = (
aoc.TestCase(inputs=SAMPLE[0], part=1, want=5),
aoc.TestCase(inputs=SAMPLE[0], part=2, want='mxmxvkd,sqjhc,fvjkl'),
)
def allergen_to_food(self, data: List[Tuple[List[str], List[str]]]) -> Dict[str, str]:
"""Map allergens to the ingredient that contains it.
Map an allergen to a list of potential ingredients and pair down that list until
it contains exactly one ingredient.
"""
solved = {}
# Map allergens to the intersections of candidates.
candidates = {}
for pair in data:
ingredients, allergens = pair
for allergen in allergens:
if allergen not in candidates:
candidates[allergen] = set(ingredients) # Copy the set.
else:
candidates[allergen] &= set(ingredients)
# Pair up allergens to ingredients and remove candidates.
while candidates:
# Find an allergen with only one candidate.
allergen, foods = [(i, j) for i, j in candidates.items() if len(j) == 1][0]
food = foods.pop()
solved[allergen] = food
# Drop the candidate and remove the ingredient from all other candidate lists.
del candidates[allergen]
for i in candidates.values():
if food in i:
i.remove(food)
return solved
def part1(self, data: List[Tuple[List[str], List[str]]]) -> int:
"""Count the ingredients (including repeats) of food without allergens."""
solved = self.allergen_to_food(data)
all_ingredients = [b for line in data for b in line[0]]
bad_ingredients = solved.values()
return sum(True for i in all_ingredients if i not in bad_ingredients)
def part2(self, data: List[Tuple[List[str], List[str]]]) -> str:
"""Return the foods containing allergens, sorted by allergen."""
solved = self.allergen_to_food(data)
return ",".join(solved[i] for i in sorted(solved.keys()))
def parse_input(self, puzzle_input: str):
"""Parse input lines into tuple(list[ingredients], list[allergens])."""
out = []
for line in puzzle_input.split('\n'):
ingredients_raw, allergens_raw = line.split(' (contains ')
ingredients = ingredients_raw.split(' ')
allergens = allergens_raw[:-1].split(', ')
out.append((set(ingredients), set(allergens)))
return out
if __name__ == '__main__':
typer.run(Day21().run)
# vim:ts=2:sw=2:expandtab
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Date
from sqlalchemy.orm import sessionmaker
# Kita harus import setiap tipe data yang ingin kita gunakan
from sqlalchemy import create_engine
# engine = create_engine('mysql+mysqlconnector://root:ge3k@localhost:3306/pygui')
engine = create_engine('sqlite:///pinjamin.db')
Base = declarative_base()
# setiap class yang akan dipetakan harus inherit dari instance declarative_base
class Pinjam(Base): #inherit from Base
__tablename__ = 'pinjam'
# tablename disini hanya sebagai metadata
id = Column(Integer, nullable=False, primary_key=True)
nama = Column(String(25), nullable=False)
barang = Column(String(25), nullable=False)
tanggal = Column(Date, nullable=False)
tipe = Column(String(1), nullable=False, default="1")
def __repr__(self):
return "<Pinjam(peminjam='{0}', barang='{1}', tanggal='{2}', tipe='{3}')>".format(
self.peminjam, self.barang, self.tanggal, self.tipe)
Base.metadata.create_all(engine)
# # menjalankan perintah pemetaan/membuat database
DBSession = sessionmaker()
DBSession.configure(bind=engine)
session = DBSession() |
#!/usr/bin/env python
################################################################################
##
## MIT License
##
## Copyright (c) 2018 Team Roborex, NIT Rourkela
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in all
## copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
## SOFTWARE.
##
################################################################################
################################################################################
##
## AUTHORS: Prabin Rath
##
################################################################################
#Test file for Roborex Chess game features simulation
'''
****************** TAB_WIDTH=8
README:The following code is just for testing the game features. Dont check for the robustness of the code. The program terminates for unusual exceptions which is supposed to be handled during insertion of the code into the final ROS node.
Change the saving directory accordingly before running.
'''
import os
import chess
import chess.pgn
import chess.uci
import time
flag=True
choice=0
eng=chess.uci.popen_engine("stockfish")
inp=''
############################################### Helping Functions #######################################################
def nth_retrival(event,white,black,round_):
new_pgn=open("/home/prabin/chess/Final/gamedata.pgn")
i=0
for i,headers in chess.pgn.scan_headers(new_pgn):
if(headers["Event"]==event) and (headers["White"]==white) and (headers["Black"]==black) and (headers["Round"]==round_):
game=chess.pgn.read_game(new_pgn)
game.headers["Event"]=headers["Event"]
game.headers["White"]=headers["White"]
game.headers["Black"]=headers["Black"]
game.headers["Round"]=headers["Round"]
game.headers["Date" ]=headers["Date" ]
print('Found saved game')
brd=game.board()
for move in game.main_line():
brd.push(move)
print(str(brd)+'\n')
ret_data=[game,brd]
return ret_data
return 0
def nth_deletion(event,white,black,round_):
new=open("/home/prabin/chess/Final/gamedata.pgn")
i=0
game_data=open("/home/prabin/chess/Final/tempo.pgn","w")
for i,headers in chess.pgn.scan_headers(new):
if(headers["Event"]==event) and (headers["White"]==white) and (headers["Black"]==black) and (headers["Round"]==round_):
continue
else:
game=chess.pgn.read_game(new)
game.headers["Event"]=headers["Event"]
game.headers["White"]=headers["White"]
game.headers["Black"]=headers["Black"]
game.headers["Round"]=headers["Round"]
game.headers["Date"]=headers["Date"]
exporter=chess.pgn.FileExporter(game_data)
game.accept(exporter)
game_data.close()
os.remove("/home/prabin/chess/Final/gamedata.pgn")
os.rename("/home/prabin/chess/Final/tempo.pgn","/home/prabin/chess/Final/gamedata.pgn")
def logger(game,string):
log=open("/home/prabin/chess/Final/log.txt","a")
log.write(game.headers["Event"]+','+game.headers["White"]+','+game.headers["Black"]+','+game.headers["Round"]+','+time.ctime()+','+string+'\n')
log.close()
def kyle():
current_time=time.ctime()
parser={'Jan':'01','Feb':'02','Mar':'03','Apr':'04','May':'05','Jun':'06','Jul':'07','Aug':'08','Sep':'09','Oct':'10','Nov':'11','Dec':'12'}
ll=current_time.split()
return ll[-1]+':'+parser[ll[1]]+':'+ll[2]
def ui_list_initializer(game):
new_text = open("/home/prabin/catkin_ws/src/chess_bot/test_files/Final/uci_.txt","w")
if(game.headers["Black"]=="Stockfish"):
tagA="0 ";tagB="1 ";
else:
tagA="1 ";tagB="0 ";
node = game.root().variations[0];node_ = game.end();
while(node.move.uci()!=node_.move.uci()):
try:
new_text.write( tagA+node.move.uci()+"\n" )
node = node.variations[0]
new_text.write( tagB+node.move.uci()+"\n" )
node = node.variations[0]
except:
pass
new_text.close()
##########################################################################################################################
#Game initialization
if os.path.exists("/home/prabin/chess/Final/temp.pgn"): #add turn setup after recovery
print("Previous game crashed\n")
new_pgn=open("/home/prabin/chess/Final/temp.pgn")
game=chess.pgn.read_game(new_pgn)
ui_list_initializer(game)
node=game.end()
brd=game.board()
for move in game.main_line():
brd.push(move)
print(str(brd)+'\n')
logger(game,"Crashed Game")
'''
if ' b ' in brd.fen():
flag=False
'''
else:
print("\nRoborex Chess game features simulation\n")
choice=input("\nEnter 1 for new game\nEnter 2 to load saved game\nEnter 3 to quit\n\n")
if choice==1:
print("New game\n")
brd=chess.Board()
game=chess.pgn.Game()
game.headers["Event"]=raw_input("Enter game Event: ")
game.headers["White"]=raw_input("Enter Player name for White: ")
game.headers["Black"]=raw_input("Enter Player name for Black: ")
game.headers["Round"]=raw_input("Enter Round number: ")
game.headers["Date" ] =kyle()
logger(game,"New game")
if input('\nEnter 1 to have first move else any other number: ')==1:
flag=True
else:
flag=False
elif choice==2: #game quits for wrong load data inputs
loaded=nth_retrival(raw_input("Enter game Event: "),raw_input("Enter Player name for White: "),raw_input("Enter Player name for Black: "),raw_input("Enter Round number: "))
if loaded==0:
choice=3
else:
game=loaded[0]
ui_list_initializer(game)
node=game.end()
logger(game,"retrived")
brd=loaded[1]
else:
pass
#Infinite loop for game cycles
while(True and choice!=3):
inp=''
save_flag=False
print('\nPossible inputs: move,undo,rst,save,quit')
if flag==True: #user turn
print("user turn")
inp=raw_input('Enter your move: ')
if inp=='save':
if os.path.exists("/home/prabin/chess/Final/gamedata.pgn"): #checking the presence of a previously saved game with equal parameters
pgn_=open("/home/prabin/chess/Final/gamedata.pgn")
for i,headers in chess.pgn.scan_headers(pgn_):
if(headers["Event"]==game.headers["Event"]) and (headers["White"]==game.headers["White"]) and (headers["Black"]==game.headers["Black"]) and (headers["Round"]==game.headers["Round"]):
pgn_.close()
nth_deletion(game.headers["Event"],game.headers["White"],game.headers["Black"],game.headers["Round"])
break
pgn_.close()
pgn_=open("/home/prabin/chess/Final/gamedata.pgn","a")
exporter = chess.pgn.FileExporter(pgn_)
game.accept(exporter)
pgn_.close()
logger(game,'save')
elif inp=='quit':
eng.quit()
logger(game,'quit')
break
elif inp=='undo':
if len(game.variations)==1:
node.parent.remove_variation(brd.pop())
del node
node=game.end()
node.parent.remove_variation(brd.pop())
del node
node=game.end()
print(brd)
logger(game,'undo')
else:
print('Nothing To Undo!')
elif inp=='rst':
temp=chess.pgn.Game()
temp.headers['Event']=game.headers['Event']
temp.headers['White']=game.headers['White']
temp.headers['Black']=game.headers['Black']
temp.headers['Round']=str(int(game.headers['Round'])+1)
temp.headers['Date']=kyle()
game=temp
brd=game.board()
logger(game,'reset')
print('Game Restart')
print(brd)
if input('\nEnter 1 to have first move else any other number: ')==1:
flag=True
else:
flag=False
elif chess.Move.from_uci(inp) in brd.legal_moves: #Dont get blank string here
brd.push(chess.Move.from_uci(inp))
print(brd)
print('\n')
print("Current FEN: "+brd.fen())
print('\n')
save_flag=True
flag=False
else:
print("illegal move re-enter")
else: #engine turn
print("engine turn")
eng.position(brd)
bst,pon=eng.go(movetime=300)
brd.push(bst)
inp=bst.uci()
print(brd)
print('\n')
print("Engine moved : ",inp)
print("Current FEN: "+brd.fen())
print('\n')
save_flag=True
flag=True
if save_flag==True: #saving the move to the temporary pgn file
pgn=open("/home/prabin/chess/Final/temp.pgn","w")
node=game.end()
node = node.add_variation(chess.Move.from_uci(inp))
exporter = chess.pgn.FileExporter(pgn)
game.accept(exporter)
pgn.close()
#Game Termination
if os.path.exists("/home/prabin/chess/Final/temp.pgn"):
os.remove("/home/prabin/chess/Final/temp.pgn")
print("END GAME")
|
import hmac
from hashlib import sha1
from time import time
from yarl import URL
from isilon import exceptions
def generate_presigned_uri(
user_key: str, object_uri: str, method: str = "GET", duration: int = 60
) -> str:
"""Generate a presigned url to download objects.
For more information, please see: https://docs.openstack.org/swift/latest/api/temporary_url_middleware.html
"""
uri = URL(object_uri)
if len(uri.parts) < 5 or "" in uri.parts:
raise exceptions.InvalidObjectURI(
f"Invalid URI format: {object_uri}. Please see: https://docs.openstack.org/swift/latest/api/temporary_url_middleware.html"
)
expires = int(time() + duration)
hmac_body = f"{method}\n{expires}\n{uri.path}".encode("utf8")
sig = hmac.new(user_key.encode("utf8"), hmac_body, sha1).hexdigest()
return f"{str(uri.origin())}{uri.path}?temp_url_sig={sig}&temp_url_expires={expires}&filename={uri.name}"
|
from time import sleep
def contagem(i, f, p):
print('~' * 40)
if p == 0: # Segunda opção para p == 0
print('passo = 1')
p = 1 # end
if p < 0:
p = -p # p *= -1
print(f'Contagem de {i} até {f} de {p} em {p}')
sleep(1)
if i <= f:
while i <= f:
print(i, end=' ')
i += p
sleep(0.3)
else:
while i >= f:
print(i, end=' ')
i -= p
sleep(0.3)
print('FIM')
print()
# Principal
contagem(1, 10, 1)
contagem(10, 0, 2)
print('='*50)
print('Agora é sua vez de personalizar a contagem!')
ini = int(input('Início: '))
fim = int(input('Fim: '))
while True: # Opção para p == 0
pas = int(input('Passo: '))
if pas != 0:
break
print(' * ERRO: digite um valor diferente de 0!')
contagem(ini, fim, pas)
|
"""
The lower-level driver for the QICK library. Contains classes for interfacing with the SoC.
"""
import os
import json
from pynq import Overlay, DefaultIP, allocate
try:
import xrfclk
import xrfdc
except:
pass
import numpy as np
from pynq.lib import AxiGPIO
import time
from .parser import *
from .streamer import DataStreamer
from . import bitfile_path
# Some support functions
def format_buffer(buff):
"""
Return the I and Q values associated with a buffer value. The lower 16 bits correspond to the I value, and the upper 16 bits correspond to the Q value.
:param buff: Buffer location
:type buff: int
:returns:
- dataI (:py:class:`int`) - I data value
- dataQ (:py:class:`int`) - Q data value
"""
data = buff
dataI = data & 0xFFFF
dataI = dataI.astype(np.int16)
dataQ = data >> 16
dataQ = dataQ.astype(np.int16)
return dataI,dataQ
class SocIp(DefaultIP):
"""
SocIp class
"""
REGISTERS = {}
def __init__(self, description, **kwargs):
"""
Constructor method
"""
#print("SocIp init", description)
super().__init__(description)
#self.ip = description
def write(self, offset, value):
"""
Writes a value to a register specified by an offset
:param offset: Offset value (register)
:type offset: int
:param value: value to be written
:type value: int
"""
super().write(offset, value)
def read(self, offset):
"""
Reads an offset
:param offset: Offset value
:type offset: int
"""
return super().read(offset)
def __setattr__(self, a ,v):
"""
Sets the arguments associated with a register
:param a: Register specified by an offset value
:type a: int
:param v: value to be written
:type v: int
:return: Register arguments
:rtype: *args object
"""
if a in self.__class__.REGISTERS:
super().write(4*self.__class__.REGISTERS[a], int(v))
else:
return super().__setattr__(a,v)
def __getattr__(self, a):
"""
Gets the arguments associated with a register
:param a: register name
:type a: str
:return: Register arguments
:rtype: *args object
"""
if a in self.__class__.REGISTERS:
return super().read(4*self.__class__.REGISTERS[a])
else:
return super().__getattr__(a)
class AxisSignalGenV4(SocIp):
"""
AxisSignalGenV4 class
AXIS Signal Generator V4 Registers.
START_ADDR_REG
WE_REG
* 0 : disable writes.
* 1 : enable writes.
"""
bindto = ['user.org:user:axis_signal_gen_v4:1.0']
REGISTERS = {'start_addr_reg':0, 'we_reg':1, 'rndq_reg':2}
def __init__(self, description, **kwargs):
"""
Constructor method
"""
super().__init__(description)
# Default registers.
self.start_addr_reg=0
self.we_reg=0
self.rndq_reg = 10
# Generics
self.N = int(description['parameters']['N'])
self.NDDS = int(description['parameters']['N_DDS'])
# Maximum number of samples
self.MAX_LENGTH = 2**self.N*self.NDDS
# Get the channel number from the IP instance name.
self.ch = int(description['fullpath'].split('_')[-1])
# Configure this driver with links to the other drivers, and the signal gen channel number.
def configure(self, axi_dma, axis_switch):
# dma
self.dma = axi_dma
# Switch
self.switch = axis_switch
# Load waveforms.
def load(self, xin_i, xin_q ,addr=0):
"""
Load waveform into I,Q envelope
:param xin_i: real part of envelope
:type xin_i: list
:param xin_q: imaginary part of envelope
:type xin_q: list
:param addr: starting address
:type addr: int
"""
# Check for equal length.
if len(xin_i) != len(xin_q):
print("%s: I/Q buffers must be the same length." % self.__class__.__name__)
return
# Check for max length.
if len(xin_i) > self.MAX_LENGTH:
print("%s: buffer length must be %d samples or less." % (self.__class__.__name__,self.MAX_LENGTH))
return
# Check for even transfer size.
if len(xin_i) %2 != 0:
raise RuntimeError("Buffer transfer length must be even number.")
# Check for max length.
if np.max(xin_i) > np.iinfo(np.int16).max or np.min(xin_i) < np.iinfo(np.int16).min:
raise ValueError("real part of envelope exceeds limits of int16 datatype")
if np.max(xin_q) > np.iinfo(np.int16).max or np.min(xin_q) < np.iinfo(np.int16).min:
raise ValueError("imaginary part of envelope exceeds limits of int16 datatype")
# Route switch to channel.
self.switch.sel(mst=self.ch)
#time.sleep(0.050)
# Format data.
xin_i = xin_i.astype(np.int16)
xin_q = xin_q.astype(np.int16)
xin = np.zeros(len(xin_i))
for i in range(len(xin)):
xin[i] = xin_i[i] + (xin_q[i] << 16)
xin = xin.astype(np.int32)
# Define buffer.
self.buff = allocate(shape=len(xin), dtype=np.int32)
np.copyto(self.buff, xin)
################
### Load I/Q ###
################
# Enable writes.
self.wr_enable(addr)
# DMA data.
self.dma.sendchannel.transfer(self.buff)
self.dma.sendchannel.wait()
# Disable writes.
self.wr_disable()
def wr_enable(self,addr=0):
"""
Enable WE reg
"""
self.start_addr_reg = addr
self.we_reg = 1
def wr_disable(self):
"""
Disable WE reg
"""
self.we_reg = 0
def rndq(self, sel_):
"""
TODO: remove this function. This functionality was removed from IP block.
"""
self.rndq_reg = sel_
class AxisReadoutV2(SocIp):
"""
AxisReadoutV2 class
Registers.
FREQ_REG : 32-bit.
PHASE_REG : 32-bit.
NSAMP_REG : 16-bit.
OUTSEL_REG : 2-bit.
* 0 : product.
* 1 : dds.
* 2 : bypass.
MODE_REG : 1-bit.
* 0 : NSAMP.
* 1 : Periodic.
WE_REG : enable/disable to perform register update.
:param ip: IP address
:type ip: str
:param fs: sampling frequency in MHz
:type fs: float
"""
bindto = ['user.org:user:axis_readout_v2:1.0']
REGISTERS = {'freq_reg':0, 'phase_reg':1, 'nsamp_reg':2, 'outsel_reg':3, 'mode_reg': 4, 'we_reg':5}
# Bits of DDS.
B_DDS = 32
def __init__(self, description, **kwargs):
"""
Constructor method
"""
super().__init__(description)
# Default registers.
self.freq_reg = 0
self.phase_reg = 0
self.nsamp_reg = 10
self.outsel_reg = 0
self.mode_reg = 1
# Register update.
self.update()
# Get the channel number from the IP instance name.
self.ch = int(description['fullpath'].split('_')[-1])
# Configure this driver with the sampling frequency.
def configure(self, fstep, regmult):
# Frequency step for rounding.
self.fstep = fstep
# Integer multiplier.
self.regmult = regmult
# Sampling frequency.
self.fs = 2**self.B_DDS * fstep/regmult
def update(self):
"""
Update register values
"""
self.we_reg = 1
self.we_reg = 0
def set_out(self,sel="product"):
"""
Select readout signal output
:param sel: select mux control
:type sel: int
"""
self.outsel_reg={"product":0,"dds":1,"input":2}[sel]
# if sel is "product":
# self.outsel_reg = 0
# elif sel is "dds":
# self.outsel_reg = 1
# elif sel is "input":
# self.outsel_reg = 2
# else:
# print("AxisReadoutV2: %s output unknown" % sel)
# Register update.
self.update()
def set_freq(self, f):
"""
Set frequency register
:param f: frequency in MHz
:type f: float
"""
# Sanity check.
if f<self.fs:
k_i = np.int64(f/self.fstep)
self.freq_reg = k_i * self.regmult
# Register update.
self.update()
def set_freq_int(self, f_int):
"""
Set frequency register (integer version)
:param f_int: frequency value register
:type f_int: int
"""
self.freq_reg = f_int
# Register update.
self.update()
class AxisAvgBuffer(SocIp):
"""
AxisAvgBuffer class
Registers.
AVG_START_REG
* 0 : Averager Disabled.
* 1 : Averager Enabled (started by external trigger).
AVG_ADDR_REG : start address to write results.
AVG_LEN_REG : number of samples to be added.
AVG_DR_START_REG
* 0 : do not send any data.
* 1 : send data using m0_axis.
AVG_DR_ADDR_REG : start address to read data.
AVG_DR_LEN_REG : number of samples to be read.
BUF_START_REG
* 0 : Buffer Disabled.
* 1 : Buffer Enabled (started by external trigger).
BUF_ADDR_REG : start address to write results.
BUF_LEN_REG : number of samples to be buffered.
BUF_DR_START_REG
* 0 : do not send any data.
* 1 : send data using m1_axis.
BUF_DR_ADDR_REG : start address to read data.
BUF_DR_LEN_REG : number of samples to be read.
:param ip: IP address
:type ip: str
:param axi_dma_avg: dma block for average buffers
:type axi_dma_avg: str
:param switch_avg: switch block for average buffers
:type switch_avg: str
:param axi_dma_buf: dma block for raw buffers
:type axi_dma_buf: str
:param switch_buf: switch block for raw buffers
:type switch_buf: str
:param channel: readout channel selection
:type channel: int
"""
bindto = ['user.org:user:axis_avg_buffer:1.0']
REGISTERS = {'avg_start_reg' : 0,
'avg_addr_reg' : 1,
'avg_len_reg' : 2,
'avg_dr_start_reg' : 3,
'avg_dr_addr_reg' : 4,
'avg_dr_len_reg' : 5,
'buf_start_reg' : 6,
'buf_addr_reg' : 7,
'buf_len_reg' : 8,
'buf_dr_start_reg' : 9,
'buf_dr_addr_reg' : 10,
'buf_dr_len_reg' : 11}
def __init__(self, description, **kwargs):
"""
Constructor method
"""
super().__init__(description)
# Default registers.
self.avg_start_reg = 0
self.avg_dr_start_reg = 0
self.buf_start_reg = 0
self.buf_dr_start_reg = 0
# Generics
self.B = int(description['parameters']['B'])
self.N_AVG = int(description['parameters']['N_AVG'])
self.N_BUF = int(description['parameters']['N_BUF'])
# Maximum number of samples
self.AVG_MAX_LENGTH = 2**self.N_AVG
self.BUF_MAX_LENGTH = 2**self.N_BUF
# Preallocate memory buffers for DMA transfers.
self.avg_buff = allocate(shape=self.AVG_MAX_LENGTH, dtype=np.int64)
self.buf_buff = allocate(shape=self.BUF_MAX_LENGTH, dtype=np.int32)
# Get the channel number from the IP instance name.
self.ch = int(description['fullpath'].split('_')[-1])
# Configure this driver with links to the other drivers.
def configure(self, axi_dma_avg, switch_avg, axi_dma_buf, switch_buf):
# DMAs.
self.dma_avg = axi_dma_avg
self.dma_buf = axi_dma_buf
# Switches.
self.switch_avg = switch_avg
self.switch_buf = switch_buf
def config(self,address=0,length=100):
"""
Configure both average and raw buffers
:param addr: Start address of first capture
:type addr: int
:param length: window size
:type length: int
"""
# Configure averaging and buffering to the same address and length.
self.config_avg(address=address,length=length)
self.config_buf(address=address,length=length)
def enable(self):
"""
Enable both average and raw buffers
"""
# Enable both averager and buffer.
self.enable_avg()
self.enable_buf()
def config_avg(self,address=0,length=100):
"""
Configure average buffer data from average and buffering readout block
:param addr: Start address of first capture
:type addr: int
:param length: window size
:type length: int
"""
# Disable averaging.
self.disable_avg()
# Set registers.
self.avg_addr_reg = address
self.avg_len_reg = length
def transfer_avg(self,address=0,length=100):
"""
Transfer average buffer data from average and buffering readout block.
:param addr: starting reading address
:type addr: int
:param length: number of samples
:type length: int
:return: I,Q pairs
:rtype: list
"""
if length %2 != 0:
raise RuntimeError("Buffer transfer length must be even number.")
if length >= self.AVG_MAX_LENGTH:
raise RuntimeError("length=%d longer than %d"%(length, self.AVG_MAX_LENGTH))
# Route switch to channel.
self.switch_avg.sel(slv=self.ch)
# Set averager data reader address and length.
self.avg_dr_addr_reg = address
self.avg_dr_len_reg = length
# Start send data mode.
self.avg_dr_start_reg = 1
# DMA data.
buff = self.avg_buff
self.dma_avg.recvchannel.transfer(buff,nbytes=length*8)
self.dma_avg.recvchannel.wait()
if self.dma_avg.recvchannel.transferred != length*8:
raise RuntimeError("Requested %d samples but only got %d from DMA" % (length, self.dma_avg.recvchannel.transferred//8))
# Stop send data mode.
self.avg_dr_start_reg = 0
# Format:
# -> lower 32 bits: I value.
# -> higher 32 bits: Q value.
data = buff[:length]
dataI = data & 0xFFFFFFFF
dataQ = data >> 32
return np.stack((dataI,dataQ)).astype(np.int32)
def enable_avg(self):
"""
Enable average buffer capture
"""
self.avg_start_reg = 1
def disable_avg(self):
"""
Disable average buffer capture
"""
self.avg_start_reg = 0
def config_buf(self,address=0,length=100):
"""
Configure raw buffer data from average and buffering readout block
:param addr: Start address of first capture
:type addr: int
:param length: window size
:type length: int
"""
# Disable buffering.
self.disable_buf()
# Set registers.
self.buf_addr_reg = address
self.buf_len_reg = length
def transfer_buf(self,address=0,length=100):
"""
Transfer raw buffer data from average and buffering readout block
:param addr: starting reading address
:type addr: int
:param length: number of samples
:type length: int
:return: I,Q pairs
:rtype: list
"""
if length %2 != 0:
raise RuntimeError("Buffer transfer length must be even number.")
if length >= self.BUF_MAX_LENGTH:
raise RuntimeError("length=%d longer or equal to %d"%(length, self.BUF_MAX_LENGTH))
# Route switch to channel.
self.switch_buf.sel(slv=self.ch)
#time.sleep(0.050)
# Set buffer data reader address and length.
self.buf_dr_addr_reg = address
self.buf_dr_len_reg = length
# Start send data mode.
self.buf_dr_start_reg = 1
# DMA data.
buff = self.buf_buff
self.dma_buf.recvchannel.transfer(buff,nbytes=length*4)
self.dma_buf.recvchannel.wait()
if self.dma_buf.recvchannel.transferred != length*4:
raise RuntimeError("Requested %d samples but only got %d from DMA" % (length, self.dma_buf.recvchannel.transferred//4))
# Stop send data mode.
self.buf_dr_start_reg = 0
# Format:
# -> lower 16 bits: I value.
# -> higher 16 bits: Q value.
data = buff[:length]
dataI = data & 0xFFFF
dataQ = data >> 16
return np.stack((dataI,dataQ)).astype(np.int16)
def enable_buf(self):
"""
Enable raw buffer capture
"""
self.buf_start_reg = 1
def disable_buf(self):
"""
Disable raw buffer capture
"""
self.buf_start_reg = 0
class AxisTProc64x32_x8(SocIp):
"""
AxisTProc64x32_x8 class
AXIS tProcessor registers:
START_SRC_REG
* 0 : internal start.
* 1 : external start.
START_REG
* 0 : stop.
* 1 : start.
MEM_MODE_REG
* 0 : AXIS Read (from memory to m0_axis)
* 1 : AXIS Write (from s0_axis to memory)
MEM_START_REG
* 0 : Stop.
* 1 : Execute operation (AXIS)
MEM_ADDR_REG : starting memory address for AXIS read/write mode.
MEM_LEN_REG : number of samples to be transferred in AXIS read/write mode.
DMEM: The internal data memory is 2^DMEM_N samples, 32 bits each.
The memory can be accessed either single read/write from AXI interface. The lower 256 Bytes are reserved for registers.
The memory is then accessed in the upper section (beyond 256 bytes). Byte to sample conversion needs to be performed.
The other method is to DMA in and out. Here the access is direct, so no conversion is needed.
There is an arbiter to ensure data coherency and avoid blocking transactions.
:param ip: IP address
:type ip: str
:param mem: memory address
:type mem: int
:param axi_dma: axi_dma address
:type axi_dma: int
"""
bindto = ['user.org:user:axis_tproc64x32_x8:1.0']
REGISTERS = {'start_src_reg' : 0,
'start_reg' : 1,
'mem_mode_reg' : 2,
'mem_start_reg' : 3,
'mem_addr_reg' : 4,
'mem_len_reg' : 5}
# Reserved lower memory section for register access.
DMEM_OFFSET = 256
def __init__(self, description):
"""
Constructor method
"""
super().__init__(description)
# Default registers.
# start_src_reg = 0 : internal start.
# start_reg = 0 : stopped.
# mem_mode_reg = 0 : axis read.
# mem_start_reg = 0 : axis operation stopped.
# mem_addr_reg = 0 : start address = 0.
# mem_len_reg = 100 : default length.
self.start_src_reg = 0
self.start_reg = 0
self.mem_mode_reg = 0
self.mem_start_reg = 0
self.mem_addr_reg = 0
self.mem_len_reg = 100
# Generics.
self.DMEM_N = int(description['parameters']['DMEM_N'])
self.PMEM_N = int(description['parameters']['PMEM_N'])
# Configure this driver with links to its memory and DMA.
def configure(self, mem, axi_dma):
# Program memory.
self.mem = mem
# dma
self.dma = axi_dma
def start_src(self,src=0):
"""
Sets the start source of tProc
:param src: start source
:type src: int
"""
self.start_src_reg = src
def start(self):
"""
Start tProc from register
"""
self.start_reg = 1
def stop(self):
"""
Stop tProc from register
"""
self.start_reg = 0
def load_bin_program(self, binprog):
for ii,inst in enumerate(binprog):
dec_low = inst & 0xffffffff
dec_high = inst >> 32
self.mem.write(8*ii, value=int(dec_low))
self.mem.write(4*(2*ii+1), value=int(dec_high))
def load_qick_program(self, prog, debug= False):
"""
:param prog: the QickProgram to load
:type prog: str
:param debug: Debug option
:type debug: bool
"""
self.load_bin_program(prog.compile(debug=debug))
def load_program(self,prog="prog.asm",fmt="asm"):
"""
Loads tProc program. If asm progam, it compiles first
:param prog: program file name
:type prog: string
:param fmt: file format
:type fmt: string
"""
# Binary file format.
if fmt == "bin":
# Read binary file from disk.
fd = open(prog,"r")
# Write memory.
addr = 0
for line in fd:
line.strip("\r\n")
dec = int(line,2)
dec_low = dec & 0xffffffff
dec_high = dec >> 32
self.mem.write(addr,value=int(dec_low))
addr = addr + 4
self.mem.write(addr,value=int(dec_high))
addr = addr + 4
# Asm file.
elif fmt == "asm":
# Compile program.
progList = parse_to_bin(prog)
# Load Program Memory.
addr = 0
for dec in progList:
#print ("@" + str(addr) + ": " + str(dec))
dec_low = dec & 0xffffffff
dec_high = dec >> 32
self.mem.write(addr,value=int(dec_low))
addr = addr + 4
self.mem.write(addr,value=int(dec_high))
addr = addr + 4
def single_read(self, addr):
"""
Reads one sample of tProc data memory using AXI access
:param addr: reading address
:type addr: int
:param data: value to be written
:type data: int
:return: requested value
:rtype: int
"""
# Address should be translated to upper map.
addr_temp = 4*addr + self.DMEM_OFFSET
# Read data.
data = self.read(addr_temp)
return data
def single_write(self, addr=0, data=0):
"""
Writes one sample of tProc data memory using AXI access
:param addr: writing address
:type addr: int
:param data: value to be written
:type data: int
"""
# Address should be translated to upper map.
addr_temp = 4*addr + self.DMEM_OFFSET
# Write data.
self.write(addr_temp,value=int(data))
def load_dmem(self, buff_in, addr=0):
"""
Writes tProc data memory using DMA
:param buff_in: Input buffer
:type buff_in: int
:param addr: Starting destination address
:type addr: int
"""
# Length.
length = len(buff_in)
# Configure dmem arbiter.
self.mem_mode_reg = 1
self.mem_addr_reg = addr
self.mem_len_reg = length
# Define buffer.
self.buff = allocate(shape=length, dtype=np.int32)
# Copy buffer.
np.copyto(self.buff,buff_in)
# Start operation on block.
self.mem_start_reg = 1
# DMA data.
self.dma.sendchannel.transfer(self.buff)
self.dma.sendchannel.wait()
# Set block back to single mode.
self.mem_start_reg = 0
def read_dmem(self, addr=0, length=100):
"""
Reads tProc data memory using DMA
:param addr: Starting address
:type addr: int
:param length: Number of samples
:type length: int
:return: List of memory data
:rtype: list
"""
# Configure dmem arbiter.
self.mem_mode_reg = 0
self.mem_addr_reg = addr
self.mem_len_reg = length
# Define buffer.
buff = allocate(shape=length, dtype=np.int32)
# Start operation on block.
self.mem_start_reg = 1
# DMA data.
self.dma.recvchannel.transfer(buff)
self.dma.recvchannel.wait()
# Set block back to single mode.
self.mem_start_reg = 0
return buff
class AxisSwitch(SocIp):
"""
AxisSwitch class to control Xilinx AXI-Stream switch IP
:param ip: IP address
:type ip: str
:param nslave: Number of slave interfaces
:type nslave: int
:param nmaster: Number of master interfaces
:type nmaster: int
"""
bindto = ['xilinx.com:ip:axis_switch:1.1']
REGISTERS = {'ctrl': 0x0, 'mix_mux': 0x040}
def __init__(self, description, **kwargs):
"""
Constructor method
"""
super().__init__(description)
# Number of slave interfaces.
self.NSL = int(description['parameters']['NUM_SI'])
# Number of master interfaces.
self.NMI = int(description['parameters']['NUM_MI'])
# Init axis_switch.
self.ctrl = 0
self.disable_ports()
def disable_ports(self):
"""
Disables ports
"""
for ii in range(self.NMI):
offset = self.REGISTERS['mix_mux'] + 4*ii
self.write(offset,0x80000000)
def sel(self, mst=0, slv=0):
"""
Digitally connects a master interface with a slave interface
:param mst: Master interface
:type mst: int
:param slv: Slave interface
:type slv: int
"""
# Sanity check.
if slv>self.NSL-1:
print("%s: Slave number %d does not exist in block." % __class__.__name__)
return
if mst>self.NMI-1:
print("%s: Master number %d does not exist in block." % __class__.__name__)
return
# Disable register update.
self.ctrl = 0
# Disable all MI ports.
self.disable_ports()
# MI[mst] -> SI[slv]
offset = self.REGISTERS['mix_mux'] + 4*mst
self.write(offset,slv)
# Enable register update.
self.ctrl = 2
class QickConfig():
"""
Uses the QICK configuration to convert frequencies and clock delays.
If running on the QICK, you don't need to use this class - the QickSoc class has all of the same methods.
If running remotely, you may want to initialize a QickConfig from a JSON file.
:param cfg: config dictionary, or path to JSON file
:type cfg: dict or str
"""
def __init__(self, cfg):
if isinstance(cfg,str):
with open(cfg) as f:
self.cfg = json.load(f)
else:
self.cfg = cfg
self._prepare_freq2reg()
def __repr__(self):
return self.description()
def description(self):
"""
Generate a printable description of the QICK configuration.
:return: description
:rtype: str
"""
lines=[]
lines.append("\n\tBoard: " + self.cfg['board'])
lines.append("\n\tGlobal clocks (MHz): DAC fabric %.3f, ADC fabric %.3f, reference %.3f"%(
self.cfg['fs_proc'], self.cfg['adc_fabric_freq'], self.cfg['refclk_freq']))
if isinstance(self,QickSoc):
lines.append("\n\tGenerator switch: %d to %d"%(
self.switch_gen.NSL, self.switch_gen.NMI))
lines.append("\n\tAverager switch: %d to %d"%(
self.switch_avg.NSL, self.switch_avg.NMI))
lines.append("\n\tBuffer switch: %d to %d"%(
self.switch_buf.NSL, self.switch_buf.NMI))
lines.append("\n\t%d DAC channels:"%(len(self.dac_blocks)))
for iCh, (iTile,iBlock,fs) in enumerate(self.dac_blocks):
lines.append("\t%d:\ttile %d, channel %d, fs=%.3f MHz"%(iCh,iTile,iBlock,fs))
lines.append("\n\t%d ADC channels:"%(len(self.adc_blocks)))
for iCh, (iTile,iBlock,fs) in enumerate(self.adc_blocks):
lines.append("\t%d:\ttile %d, channel %d, fs=%.3f MHz"%(iCh,iTile,iBlock,fs))
else:
lines.append("\n\tSampling freqs (MHz): DAC %.3f, ADC %.3f"%(
self.cfg['fs_dac'], self.cfg['fs_adc']))
lines.append("\n\tRefclk multiplier factors: %d (DAC), %d (ADC)"%(
self.fsmult_dac, self.fsmult_adc))
lines.append("\tFrequency resolution step: %.3f Hz"%(
self.fstep_lcm*1e6))
if isinstance(self,QickSoc):
lines.append("\n\t%d signal generators: max length %d samples"%(len(self.gens),
self.gens[0].MAX_LENGTH))
lines.append("\n\t%d readout blocks"%(len(self.readouts)))
lines.append("\n\t%d average+buffer blocks: max length %d samples (averages), %d (decimated buffer)"%(len(self.cfg['avg_bufs']),
self.cfg['avg_bufs'][0]['avg_maxlen'],
self.cfg['avg_bufs'][0]['buf_maxlen']))
lines.append("\n\ttProc: %d words program memory, %d words data memory"%(2**self.tproc.PMEM_N, 2**self.tproc.DMEM_N))
lines.append("\t\tprogram RAM: %d bytes"%(self.tproc.mem.mmio.length))
return "\nQICK configuration:\n"+"\n".join(lines)
def dump_cfg(self):
"""
Generate a JSON description of the QICK configuration.
You can save this string to a file and load it to recreate the QickConfig.
:return: configuration in JSON format
:rtype: str
"""
return json.dumps(self.cfg, indent=4)
def _prepare_freq2reg(self):
# Calculate least common multiple of DAC and ADC sampling frequencies.
# Typically fs_dac = fs_adc*2, so this is just the DAC frequency.
# This is used when converting frequencies to integers.
# clock multipliers from refclk to DAC/ADC - always integer
self.fsmult_dac = round(self.cfg['fs_dac']/self.cfg['refclk_freq'])
self.fsmult_adc = round(self.cfg['fs_adc']/self.cfg['refclk_freq'])
# reg = f/fstep
#fstep_dac = self.fs_proc * mult_dac / 2**b_dac
#fstep_adc = self.fs_proc * mult_adc / 2**b_adc
# Calculate a common fstep_lcm, which is divisible by both the DAC and ADC step sizes.
# We should only use frequencies that are evenly divisible by fstep_lcm.
b_max = max(self.cfg['b_dac'],self.cfg['b_adc'])
mult_lcm = np.lcm(self.fsmult_dac * 2**(b_max - self.cfg['b_dac']),
self.fsmult_adc * 2**(b_max - self.cfg['b_adc']))
self.fstep_lcm = self.cfg['refclk_freq'] * mult_lcm / 2**b_max
# Calculate the integer factors relating fstep_lcm to the DAC and ADC step sizes.
self.regmult_dac = int(2**(b_max-self.cfg['b_dac']) * round(mult_lcm/self.fsmult_dac))
self.regmult_adc = int(2**(b_max-self.cfg['b_adc']) * round(mult_lcm/self.fsmult_adc))
#print(self.fstep_lcm, self.regmult_dac, self.regmult_adc)
def freq2reg(self, f):
"""
Converts frequency in MHz to tProc DAC register value.
:param f: frequency (MHz)
:type f: float
:return: Re-formatted frequency
:rtype: int
"""
k_i = np.int64(f/self.fstep_lcm)
return k_i * self.regmult_dac
def reg2freq(self, r):
"""
Converts frequency from format readable by tProc DAC to MHz.
:param r: frequency in tProc DAC format
:type r: float
:return: Re-formatted frequency in MHz
:rtype: float
"""
return r*self.fstep_lcm/self.regmult_dac
def reg2freq_adc(self, r):
"""
Converts frequency from format readable by tProc ADC to MHz.
:param r: frequency in tProc ADC format
:type r: float
:return: Re-formatted frequency in MHz
:rtype: float
"""
return r*self.fstep_lcm/self.regmult_adc
def adcfreq(self, f):
"""
Takes a frequency and casts it to an (even) valid ADC DDS frequency.
:param f: frequency (MHz)
:type f: float
:return: Re-formatted frequency
:rtype: int
"""
return np.round(f/self.fstep_lcm) * self.fstep_lcm
def cycles2us(self, cycles):
"""
Converts tProc clock cycles to microseconds.
:param cycles: Number of tProc clock cycles
:type cycles: int
:return: Number of microseconds
:rtype: float
"""
return cycles/self.cfg['fs_proc']
def us2cycles(self, us):
"""
Converts microseconds to integer number of tProc clock cycles.
:param cycles: Number of microseconds
:type cycles: float
:return: Number of tProc clock cycles
:rtype: int
"""
return int(us*self.cfg['fs_proc'])
class QickSoc(Overlay, QickConfig):
"""
QickSoc class. This class will create all object to access system blocks
:param bitfile: Name of the bitfile
:type bitfile: str
:param force_init_clks: Whether the board clocks are re-initialized
:type force_init_clks: bool
:param ignore_version: Whether version discrepancies between PYNQ build and firmware build are ignored
:type ignore_version: bool
"""
# The following constants are no longer used. Some of the values may not match the bitfile.
#fs_adc = 384*8 # MHz
#fs_dac = 384*16 # MHz
#pulse_mem_len_IQ = 65536 # samples for I, Q
#ADC_decim_buf_len_IQ = 1024 # samples for I, Q
#ADC_accum_buf_len_IQ = 16384 # samples for I, Q
#tProc_instruction_len_bytes = 8
#tProc_prog_mem_samples = 8000
#tProc_prog_mem_size_bytes_tot = tProc_instruction_len_bytes*tProc_prog_mem_samples
#tProc_data_len_bytes = 4
#tProc_data_mem_samples = 4096
#tProc_data_mem_size_bytes_tot = tProc_data_len_bytes*tProc_data_mem_samples
#tProc_stack_len_bytes = 4
#tProc_stack_samples = 256
#tProc_stack_size_bytes_tot = tProc_stack_len_bytes*tProc_stack_samples
#phase_resolution_bits = 32
#gain_resolution_signed_bits = 16
# Constructor.
def __init__(self, bitfile=None, force_init_clks=False,ignore_version=True, **kwargs):
"""
Constructor method
"""
# Load bitstream.
if bitfile==None:
Overlay.__init__(self, bitfile_path(), ignore_version=ignore_version, **kwargs)
else:
Overlay.__init__(self, bitfile, ignore_version=ignore_version, **kwargs)
# Configuration dictionary
self.cfg = {}
self.cfg['board'] = os.environ["BOARD"]
# RF data converter (for configuring ADCs and DACs)
self.rf = self.usp_rf_data_converter_0
# Read the config to get a list of enabled ADCs and DACs, and the sampling frequencies.
self.list_rf_blocks(self.ip_dict['usp_rf_data_converter_0']['parameters'])
# Configure PLLs if requested, or if any ADC/DAC is not locked.
if force_init_clks:
self.set_all_clks()
else:
dac_locked = [self.rf.dac_tiles[iTile].PLLLockStatus==2 for iTile in self.dac_tiles]
adc_locked = [self.rf.adc_tiles[iTile].PLLLockStatus==2 for iTile in self.adc_tiles]
if not (all(dac_locked) and all(adc_locked)):
self.set_all_clks()
dac_locked = [self.rf.dac_tiles[iTile].PLLLockStatus==2 for iTile in self.dac_tiles]
adc_locked = [self.rf.adc_tiles[iTile].PLLLockStatus==2 for iTile in self.adc_tiles]
if not (all(dac_locked) and all(adc_locked)):
print("Not all DAC and ADC PLLs are locked. You may want to repeat the initialization of the QickSoc.")
# AXIS Switch to upload samples into Signal Generators.
self.switch_gen = self.axis_switch_gen
# AXIS Switch to read samples from averager.
self.switch_avg = self.axis_switch_avg
# AXIS Switch to read samples from buffer.
self.switch_buf = self.axis_switch_buf
# Signal generators.
self.gens = []
# Readout blocks.
self.readouts = []
# Average + Buffer blocks.
self.avg_bufs = []
# Populate the lists with the registered IP blocks.
for key,val in self.ip_dict.items():
if (val['driver'] == AxisSignalGenV4):
self.gens.append(getattr(self,key))
elif (val['driver'] == AxisReadoutV2):
self.readouts.append(getattr(self,key))
elif (val['driver'] == AxisAvgBuffer):
self.avg_bufs.append(getattr(self,key))
# Sanity check: we should have the same number of signal generators as DACs.
if len(self.dac_blocks) != len(self.gens):
raise RuntimeError("We have %d DACs but %d signal generators."%(len(self.dac_blocks),len(self.gens)))
# Sanity check: we should have the same number of readouts and buffer blocks as ADCs.
if len(self.adc_blocks) != len(self.readouts):
raise RuntimeError("We have %d ADCs but %d readout blocks."%(len(self.adc_blocks),len(self.readouts)))
if len(self.adc_blocks) != len(self.avg_bufs):
raise RuntimeError("We have %d ADCs but %d avg/buffer blocks."%(len(self.adc_blocks),len(self.avg_bufs)))
# Sort the lists by channel number.
# Typically they are already in order, but good to make sure?
self.gens.sort(key=lambda x: x.ch)
self.readouts.sort(key=lambda x: x.ch)
self.avg_bufs.sort(key=lambda x: x.ch)
# Fill the config dictionary with driver parameters.
self.cfg['b_dac'] = 32
self.cfg['b_adc'] = self.readouts[0].B_DDS #typically 32
self.cfg['avg_bufs'] = [{'avg_maxlen':buf.AVG_MAX_LENGTH,
'buf_maxlen':buf.BUF_MAX_LENGTH}
for buf in self.avg_bufs]
# tProcessor, 64-bit instruction, 32-bit registers, x8 channels.
self._tproc = self.axis_tproc64x32_x8_0
self._tproc.configure(self.axi_bram_ctrl_0, self.axi_dma_tproc)
#print(self.description())
self._streamer = DataStreamer(self)
# Initialize the configuration (this will calculate the frequency plan)
QickConfig.__init__(self, self.cfg)
# Configure the drivers.
for gen in self.gens:
gen.configure(self.axi_dma_gen, self.switch_gen)
for readout in self.readouts:
readout.configure(self.fstep_lcm, self.regmult_adc)
for buf in self.avg_bufs:
buf.configure(self.axi_dma_avg, self.switch_avg,
self.axi_dma_buf, self.switch_buf)
@property
def tproc(self):
return self._tproc
@property
def streamer(self):
return self._streamer
def list_rf_blocks(self, rf_config):
"""
Lists the enabled ADCs and DACs and get the sampling frequencies.
XRFdc_CheckBlockEnabled in xrfdc_ap.c is not accessible from the Python interface to the XRFdc driver.
This re-implements that functionality.
"""
hs_adc = rf_config['C_High_Speed_ADC']=='1'
self.dac_tiles = []
self.dac_blocks = []
self.adc_tiles = []
self.adc_blocks = []
dac_fabric_freqs = []
adc_fabric_freqs = []
refclk_freqs = []
for iTile,tile in enumerate(self.rf.dac_tiles):
if rf_config['C_DAC%d_Enable'%(iTile)]!='1':
continue
self.dac_tiles.append(iTile)
dac_fabric_freqs.append(float(rf_config['C_DAC%d_Fabric_Freq'%(iTile)]))
refclk_freqs.append(float(rf_config['C_DAC%d_Refclk_Freq'%(iTile)]))
for iBlock,block in enumerate(tile.blocks):
if rf_config['C_DAC_Slice%d%d_Enable'%(iTile,iBlock)]!='true':
continue
fs = block.BlockStatus['SamplingFreq']*1000
self.dac_blocks.append((iTile,iBlock,fs))
for iTile,tile in enumerate(self.rf.adc_tiles):
if rf_config['C_ADC%d_Enable'%(iTile)]!='1':
continue
self.adc_tiles.append(iTile)
adc_fabric_freqs.append(float(rf_config['C_ADC%d_Fabric_Freq'%(iTile)]))
refclk_freqs.append(float(rf_config['C_ADC%d_Refclk_Freq'%(iTile)]))
for iBlock,block in enumerate(tile.blocks):
if hs_adc:
if iBlock>=2 or rf_config['C_ADC_Slice%d%d_Enable'%(iTile,2*iBlock)]!='true':
continue
else:
if rf_config['C_ADC_Slice%d%d_Enable'%(iTile,iBlock)]!='true':
continue
fs = block.BlockStatus['SamplingFreq']*1000
self.adc_blocks.append((iTile,iBlock,fs))
def get_common_freq(freqs):
"""
Check that all elements of the list are equal, and return the common value.
"""
if len(set(freqs))!=1:
raise RuntimeError("Unexpected frequencies:",freqs)
return freqs[0]
self.cfg['fs_dac'] = get_common_freq([block[2] for block in self.dac_blocks])
self.cfg['fs_adc'] = get_common_freq([block[2] for block in self.adc_blocks])
# Assume the tProc has the same frequency as the DAC fabric.
self.cfg['fs_proc'] = get_common_freq(dac_fabric_freqs)
self.cfg['adc_fabric_freq'] = get_common_freq(adc_fabric_freqs)
self.cfg['refclk_freq'] = get_common_freq(refclk_freqs)
def set_all_clks(self):
"""
Resets all the board clocks
"""
if self.cfg['board']=='ZCU111':
print("resetting clocks:",self.refclk_freq)
xrfclk.set_all_ref_clks(self.refclk_freq)
elif self.cfg['board']=='ZCU216':
lmk_freq = self.refclk_freq
lmx_freq = self.refclk_freq*2
print("resetting clocks:",lmk_freq, lmx_freq)
xrfclk.set_ref_clks(lmk_freq=lmk_freq, lmx_freq=lmx_freq)
def get_decimated(self, ch, address=0, length=None):
"""
Acquires data from the readout decimated buffer
:param ch: ADC channel
:type ch: int
:param address: Address of data
:type address: int
:param length: Buffer transfer length
:type length: int
:return: List of I and Q decimated arrays
:rtype: list
"""
if length is None:
# this default will always cause a RuntimeError
# TODO: remove the default, or pick a better fallback value
length = self.avg_bufs[ch].BUF_MAX_LENGTH
# we must transfer an even number of samples, so we pad the transfer size
transfer_len = length + length%2
data = self.avg_bufs[ch].transfer_buf(address,transfer_len)
# we remove the padding here
return data[:,:length].astype(float)
def get_accumulated(self, ch, address=0, length=None):
"""
Acquires data from the readout accumulated buffer
:param ch: ADC channel
:type ch: int
:param address: Address of data
:type address: int
:param length: Buffer transfer length
:type length: int
:returns:
- di[:length] (:py:class:`list`) - list of accumulated I data
- dq[:length] (:py:class:`list`) - list of accumulated Q data
"""
if length is None:
# this default will always cause a RuntimeError
# TODO: remove the default, or pick a better fallback value
length = self.avg_bufs[ch].AVG_MAX_LENGTH
# we must transfer an even number of samples, so we pad the transfer size
transfer_len = length + length%2
data = self.avg_bufs[ch].transfer_avg(address=address,length=transfer_len)
# we remove the padding here
return data[:,:length]
def configure_readout(self, ch, output, frequency):
"""Configure readout channel output style and frequency
:param ch: Channel to configure
:type ch: int
:param output: output type from 'product', 'dds', 'input'
:type output: str
"""
self.readouts[ch].set_out(sel=output)
self.readouts[ch].set_freq(frequency)
def config_avg(self, ch, address=0, length=1, enable=True):
"""Configure and optionally enable accumulation buffer
:param ch: Channel to configure
:type ch: int
:param address: Starting address of buffer
:type address: int
:param length: length of buffer (how many samples to take)
:type length: int
:param enable: True to enable buffer
:type enable: bool
"""
self.avg_bufs[ch].config_avg(address, length)
if enable:
self.enable_avg(ch)
def enable_avg(self, ch):
self.avg_bufs[ch].enable_avg()
def config_buf(self, ch, address=0, length=1, enable=True):
"""Configure and optionally enable decimation buffer
:param ch: Channel to configure
:type ch: int
:param address: Starting address of buffer
:type address: int
:param length: length of buffer (how many samples to take)
:type length: int
:param enable: True to enable buffer
:type enable: bool
"""
self.avg_bufs[ch].config_buf(address, length)
if enable:
self.enable_buf(ch)
def enable_buf(self, ch):
self.avg_bufs[ch].enable_buf()
def get_avg_max_length(self, ch=0):
"""Get accumulation buffer length for channel
:param ch: Channel
:type ch: int
:return: Length of accumulation buffer for channel 'ch'
:rtype: int
"""
return self.cfg['avg_bufs'][ch]['avg_maxlen']
def load_pulse_data(self, ch, idata, qdata, addr):
"""Load pulse data into signal generators
:param ch: Channel
:type ch: int
:param idata: data for ichannel
:type idata: ndarray(dtype=int16)
:param qdata: data for qchannel
:type qdata: ndarray(dtype=int16)
:param addr: address to start data at
:type addr: int
"""
return self.gens[ch-1].load(xin_i=idata, xin_q=qdata, addr=addr)
def set_nyquist(self, ch, nqz):
"""
Sets DAC channel ch to operate in Nyquist zone nqz mode.
Channels are indexed as they are on the tProc outputs: in other words, the first DAC channel is channel 1.
(tProc output 0 is reserved for readout triggers and PMOD outputs)
Channel 1 : connected to Signal Generator V4, which drives DAC 228 CH0.
Channel 2 : connected to Signal Generator V4, which drives DAC 228 CH1.
Channel 3 : connected to Signal Generator V4, which drives DAC 228 CH2.
Channel 4 : connected to Signal Generator V4, which drives DAC 229 CH0.
Channel 5 : connected to Signal Generator V4, which drives DAC 229 CH1.
Channel 6 : connected to Signal Generator V4, which drives DAC 229 CH2.
Channel 7 : connected to Signal Generator V4, which drives DAC 229 CH3.
tiles: DAC 228: 0, DAC 229: 1
channels: CH0: 0, CH1: 1, CH2: 2, CH3: 3
:param ch: DAC channel
:type ch: int
:param nqz: Nyquist zone
:type nqz: int
:return: 'True' or '1' if the task was completed successfully
:rtype: bool
"""
#ch_info={1: (0,0), 2: (0,1), 3: (0,2), 4: (1,0), 5: (1,1), 6: (1, 2), 7: (1,3)}
tile, channel, _ = self.dac_blocks[ch-1]
dac_block=self.rf.dac_tiles[tile].blocks[channel]
dac_block.NyquistZone=nqz
return dac_block.NyquistZone
|
# Exercício Python 115: Crie um pequeno sistema modularizado
# Crie um Pequeno sistema Modularizado
# que permita cadastrar pessoas pelo seu nome e idade em um arquivo de texto simples.
# O sistemas terá 3 opções:
# Listar, Cadastrar, Sair
from ex115.menu.menu import *
from time import sleep
arquivo_diretorio = "./banco_de_dados.txt"
while True:
response = menu()
if response == 1:
try:
with open(arquivo_diretorio, "r") as arquivo:
for linha in arquivo:
informacao = linha.split(",")
informacao[1] = informacao[1].replace("\n", "")
print(f"{informacao[0]:<30} Idade: {informacao[1]:>3} anos")
sleep(5)
except:
print(f"Ainda Não existem pessoas cadastradas")
elif response == 2:
try:
nome = str(input("Digite o nome da pessoa: "))
idade = int(input("Digite a idade da pessoa (em anos): "))
with open(arquivo_diretorio, 'a') as arquivo:
arquivo.write(f"{nome},{idade}" + "\n")
print("Cadastro Realizado com sucesso!")
except:
print("Erro, entrada inválida")
elif response == 3:
break
|
#!/usr/bin/env python
"""
Hello world script for Session API (https://www.tropo.com/docs/webapi/sessionapi.htm)
Upon launch, it will trigger a message to be sent via Jabber to the addess specified in
'number'.
"""
from itty import *
from ciscotropowebapi import Tropo, Session
from urllib import urlencode
from urllib2 import urlopen
@post('/index.json')
def index(request):
session = Session(request.body)
t = Tropo()
t.call(to=session.parameters['numberToDial'], network='JABBER')
t.say(session.parameters['message'])
return t.RenderJson()
base_url = 'http://api.tropo.com/1.0/sessions'
token = 'xxxxxxxxxx'
action = 'create'
number = 'username@domain'
message = 'hello from the session API!'
params = urlencode([('action', action), ('token', token), ('numberToDial', number), ('message', message)])
data = urlopen('%s?%s' % (base_url, params)).read()
run_itty(server='wsgiref', host='0.0.0.0', port=8888)
|
from spaceone.core.error import *
class ERROR_SCHEDULE_OPTION(ERROR_INVALID_ARGUMENT):
_message = 'Only one schedule option can be set. (cron | interval | minutes | hours)'
|
# Generated by Django 3.1.7 on 2021-03-24 05:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('atd', '0003_uploadfile'),
]
operations = [
migrations.CreateModel(
name='FileUpload',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('overview', models.CharField(blank=True, max_length=255, verbose_name='Overview')),
('file', models.FileField(upload_to='media')),
('uploaded_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.DeleteModel(
name='UploadFile',
),
]
|
from discord.ext import commands
import discord.utils
def is_owner_check(message):
""""is it me?"""
return message.author.id == '149281074437029890'
def is_owner():
"""is me but a decorator"""
return commands.check(lambda ctx: is_owner_check(ctx.message))
def is_user_check(message, user_id):
"""Is is a user?"""
return message.author.id == user_id
def is_user(user_id):
"""Is user but decorator"""
return commands.check(lambda ctx: is_user_check(ctx.message, user_id))
"""
There is now a permissions system!
if you don't meet a permission to do a command, nothing really happens
but if you are the owner or a so called bot admin or a server admin or
if you are the owner (treefroog) you can do any command
"'it just works'
-todd howard"
-micheal scott
"""
def check_permissions(ctx, perms):
"""does some permission checking. I can do anything"""
msg = ctx.message
if is_owner_check(msg):
return True
ch = msg.channel
author = msg.author
resolved = ch.permissions_for(author)
return all(getattr(resolved, name, None) == value for name, value in perms.items())
def role_or_permissions(ctx, check, **perms):
"""checks roles or permissions, since roles can do some stuff"""
if check_permissions(ctx, perms):
return True
ch = ctx.message.channel
author = ctx.message.author
if ch.is_private:
return False # can't have roles in PMs
role = discord.utils.find(check, author.roles)
return role is not None
def mod_or_permissions(**perms):
"""checks if mod, or permissions to do stuff"""
def predicate(ctx):
return role_or_permissions(ctx, lambda r: r.name in ('Bot Mod', 'Bot Admin'), **perms)
return commands.check(predicate)
def admin_or_permissions(**perms):
"""checks if admin, or if permission allows to do stuff"""
def predicate(ctx):
return role_or_permissions(ctx, lambda r: r.name == 'Bot Admin', **perms)
return commands.check(predicate)
def is_in_servers(*server_ids):
"""if is in server"""
def predicate(ctx):
server = ctx.message.server
if server is None:
return False
return server.id in server_ids
return commands.check(predicate) |
import asyncio
import os
import shutil
from datetime import datetime
from pathlib import Path
from typing import List
from fastapi import APIRouter, Depends, HTTPException, Response
from fastapi.security.api_key import APIKey
from sqlalchemy.orm import Session
from app import schemas
from app.api.deps import get_api_key, get_db, oauth2_password_bearer_or_api_key
from app.core.config import settings
from app.core.logging import logger
from app.crud import scan as crud
from app.kafka.producer import (send_remove_scan_files_event_to_kafka,
send_scan_event_to_kafka)
from app.models import Scan
from app.schemas.events import RemoveScanFilesEvent
from app.schemas.scan import ScanCreatedEvent
router = APIRouter()
@router.post("", response_model=schemas.Scan)
async def create_scan(
scan: schemas.ScanCreate,
db: Session = Depends(get_db),
api_key: APIKey = Depends(get_api_key),
):
scan = crud.create_scan(db=db, scan=scan)
# See if we have HAADF image for this scan
upload_path = Path(settings.HAADF_IMAGE_UPLOAD_DIR) / f"scan{scan.scan_id}.png"
if upload_path.exists():
# Move it to the right location to be served statically
loop = asyncio.get_event_loop()
await loop.run_in_executor(
None,
shutil.move,
upload_path,
Path(settings.HAADF_IMAGE_STATIC_DIR) / f"{scan.id}.png",
)
# Finally update the haadf path
(_, scan) = crud.update_scan(
db, scan.id, haadf_path=f"{settings.HAADF_IMAGE_URL_PREFIX}/{scan.id}.png"
)
await send_scan_event_to_kafka(
ScanCreatedEvent(**schemas.Scan.from_orm(scan).dict())
)
return scan
@router.get(
"",
response_model=List[schemas.Scan],
dependencies=[Depends(oauth2_password_bearer_or_api_key)],
)
def read_scans(
response: Response,
skip: int = 0,
limit: int = 100,
scan_id: int = -1,
state: schemas.ScanState = None,
created: datetime = None,
has_haadf: bool = None,
db: Session = Depends(get_db),
):
scans = crud.get_scans(
db,
skip=skip,
limit=limit,
scan_id=scan_id,
state=state,
created=created,
has_haadf=has_haadf,
)
count = crud.get_scans_count(
db,
skip=skip,
limit=limit,
scan_id=scan_id,
state=state,
created=created,
has_haadf=has_haadf,
)
response.headers["X-Total-Count"] = str(count)
return scans
@router.get(
"/{id}",
response_model=schemas.Scan,
dependencies=[Depends(oauth2_password_bearer_or_api_key)],
)
def read_scan(response: Response, id: int, db: Session = Depends(get_db)):
db_scan = crud.get_scan(db, id=id)
if db_scan is None:
raise HTTPException(status_code=404, detail="Scan not found")
(prev_scan, next_scan) = crud.get_prev_next_scan(db, id)
if prev_scan is not None:
response.headers["X-Previous-Scan"] = str(prev_scan)
if next_scan is not None:
response.headers["X-Next-Scan"] = str(next_scan)
return db_scan
@router.patch(
"/{id}",
response_model=schemas.Scan,
dependencies=[Depends(oauth2_password_bearer_or_api_key)],
)
async def update_scan(
id: int, payload: schemas.ScanUpdate, db: Session = Depends(get_db)
):
db_scan = crud.get_scan(db, id=id)
if db_scan is None:
raise HTTPException(status_code=404, detail="Scan not found")
(updated, scan) = crud.update_scan(
db,
id,
log_files=payload.log_files,
locations=payload.locations,
notes=payload.notes,
)
if updated:
scan_updated_event = schemas.ScanUpdateEvent(id=id)
if scan.log_files == payload.log_files:
scan_updated_event.log_files = scan.log_files
scan_updated_event.locations = [
schemas.scan.Location.from_orm(l) for l in scan.locations
]
if scan.notes is not None and scan.notes == payload.notes:
scan_updated_event.notes = scan.notes
await send_scan_event_to_kafka(scan_updated_event)
return scan
async def _remove_scan_files(db_scan: Scan, host: str = None):
if host is None:
comput_hosts = [m.name for m in settings.MACHINES]
hosts = set([l.host for l in db_scan.locations if l.host not in comput_hosts])
else:
hosts = [host]
for host in hosts:
# Verify that we have scan files on this host
locations = [l for l in db_scan.locations if l.host == host]
if len(locations) == 0:
raise HTTPException(status_code=400, detail="Invalid request")
scan = schemas.Scan.from_orm(db_scan)
await send_remove_scan_files_event_to_kafka(
RemoveScanFilesEvent(scan=scan, host=host)
)
@router.delete("/{id}", dependencies=[Depends(oauth2_password_bearer_or_api_key)])
async def delete_scan(id: int, remove_scan_files: bool, db: Session = Depends(get_db)):
db_scan = crud.get_scan(db, id=id)
if db_scan is None:
raise HTTPException(status_code=404, detail="Scan not found")
if remove_scan_files:
logger.info("Removing scan files.")
await _remove_scan_files(db_scan)
crud.delete_scan(db, id)
haadf_path = Path(settings.HAADF_IMAGE_STATIC_DIR) / f"{id}.png"
logger.info(f"Checking if HAADF image exists: {haadf_path}")
if haadf_path.exists():
logger.info(f"Removing HAADF image: {haadf_path}")
# Remove it
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, os.remove, haadf_path)
@router.put(
"/{id}/remove",
dependencies=[Depends(oauth2_password_bearer_or_api_key)],
)
async def remove_scan_files(id: int, host: str, db: Session = Depends(get_db)):
db_scan = crud.get_scan(db, id=id)
if db_scan is None:
raise HTTPException(status_code=404, detail="Scan not found")
await _remove_scan_files(db_scan, host)
@router.delete(
"/{id}/locations/{location_id}",
dependencies=[Depends(oauth2_password_bearer_or_api_key)],
)
def delete_location(id: int, location_id: int, db: Session = Depends(get_db)):
db_scan = crud.get_scan(db, id=id)
if db_scan is None:
raise HTTPException(status_code=404, detail="Scan not found")
location = crud.get_location(db, id=location_id)
if location is None:
raise HTTPException(status_code=404, detail="Location not found")
crud.delete_location(db, location_id)
@router.delete(
"/{id}/locations",
dependencies=[Depends(oauth2_password_bearer_or_api_key)],
)
async def remove_scan(id: int, host: str, db: Session = Depends(get_db)):
db_scan = crud.get_scan(db, id=id)
if db_scan is None:
raise HTTPException(status_code=404, detail="Scan not found")
crud.delete_locations(db, scan_id=id, host=host)
db_scan = crud.get_scan(db, id=id)
scan_updated_event = schemas.ScanUpdateEvent(id=id)
if db_scan is not None:
scan_updated_event.locations = db_scan.locations
await send_scan_event_to_kafka(scan_updated_event)
|
import unittest
from darwcss.darwcss import CSS, Selector, Style
class TestConf(unittest.TestCase):
def test_conf_inheritance_selector(self):
css = CSS({"darwcss_auto": True})
with css.selector('.home') as selector:
self.assertEqual(selector.meta_cfg, {"darwcss_auto": True})
if __name__ == "__main__":
unittest.main()
|
import logging
from typing import Dict, List, Optional, Tuple
from localstack import config
from localstack.services import install
from localstack.utils.common import (
TMP_THREADS,
ShellCommandThread,
chmod_r,
get_free_tcp_port,
mkdir,
)
from localstack.utils.run import FuncThread
from localstack.utils.serving import Server
LOG = logging.getLogger(__name__)
class KinesisMockServer(Server):
"""
Server abstraction for controlling Kinesis Mock in a separate thread
"""
def __init__(
self,
port: int,
bin_path: str,
latency: str,
host: str = "localhost",
log_level: str = "INFO",
data_dir: Optional[str] = None,
initialize_streams: Optional[str] = None,
) -> None:
self._latency = latency
self._initialize_streams = initialize_streams
self._data_dir = data_dir
self._bin_path = bin_path
self._log_level = log_level
super().__init__(port, host)
def do_start_thread(self) -> FuncThread:
cmd, env_vars = self._create_shell_command()
LOG.debug("starting kinesis process %s with env vars %s", cmd, env_vars)
t = ShellCommandThread(
cmd,
strip_color=True,
env_vars=env_vars,
log_listener=self._log_listener,
auto_restart=True,
)
TMP_THREADS.append(t)
t.start()
return t
def _create_shell_command(self) -> Tuple[List, Dict]:
"""
Helper method for creating kinesis mock invocation command
:return: returns a tuple containing the command list and a dictionary with the environment variables
"""
env_vars = {"KINESIS_MOCK_PLAIN_PORT": self.port, "SHARD_LIMIT": config.KINESIS_SHARD_LIMIT}
latency_params = [
"CREATE_STREAM_DURATION",
"DELETE_STREAM_DURATION",
"REGISTER_STREAM_CONSUMER_DURATION",
"START_STREAM_ENCRYPTION_DURATION",
"STOP_STREAM_ENCRYPTION_DURATION",
"DEREGISTER_STREAM_CONSUMER_DURATION",
"MERGE_SHARDS_DURATION",
"SPLIT_SHARD_DURATION",
"UPDATE_SHARD_COUNT_DURATION",
]
for param in latency_params:
env_vars[param] = self._latency
if self._data_dir:
env_vars["SHOULD_PERSIST_DATA"] = "true"
env_vars["PERSIST_PATH"] = self._data_dir
env_vars["PERSIST_INTERVAL"] = config.KINESIS_MOCK_PERSIST_INTERVAL
env_vars["LOG_LEVEL"] = self._log_level
if self._initialize_streams:
env_vars["INITIALIZE_STREAMS"] = self._initialize_streams
if self._bin_path.endswith(".jar"):
cmd = ["java", "-XX:+UseG1GC", "-jar", self._bin_path]
else:
chmod_r(self._bin_path, 0o777)
cmd = [self._bin_path, "--gc=G1"]
return cmd, env_vars
def _log_listener(self, line, **_kwargs):
LOG.info(line.rstrip())
def create_kinesis_mock_server(port=None, persist_path: Optional[str] = None) -> KinesisMockServer:
"""
Creates a new Kinesis Mock server instance. Installs Kinesis Mock on the host first if necessary.
Introspects on the host config to determine server configuration:
config.dirs.data -> if set, the server runs with persistence using the path to store data
config.LS_LOG -> configure kinesis mock log level (defaults to INFO)
config.KINESIS_LATENCY -> configure stream latency (in milliseconds)
config.KINESIS_INITIALIZE_STREAMS -> Initialize the given streams on startup
"""
port = port or get_free_tcp_port()
is_kinesis_mock_installed, kinesis_mock_bin_path = install.get_is_kinesis_mock_installed()
if not is_kinesis_mock_installed:
install.install_kinesis_mock(kinesis_mock_bin_path)
persist_path = (
f"{config.dirs.data}/kinesis" if not persist_path and config.dirs.data else persist_path
)
if persist_path:
mkdir(persist_path)
if config.LS_LOG:
if config.LS_LOG == "warning":
log_level = "WARN"
else:
log_level = config.LS_LOG.upper()
else:
log_level = "INFO"
latency = config.KINESIS_LATENCY + "ms"
initialize_streams = (
config.KINESIS_INITIALIZE_STREAMS if config.KINESIS_INITIALIZE_STREAMS else None
)
server = KinesisMockServer(
port=port,
bin_path=kinesis_mock_bin_path,
log_level=log_level,
latency=latency,
initialize_streams=initialize_streams,
data_dir=persist_path,
)
return server
|
from ..deque.deque import Deque, Cell
from .exceptions import InconsistentMaxSize
class DequeuedQueue(Deque):
def __init__(self, max_size):
if max_size <= 0:
raise InconsistentMaxSize("DequeuedQueue has max_size greater than 0")
super().__init__(max_size)
def _add(self, data, position):
removed_cell = None
if self.full():
if position == 'head':
removed_cell = self.remove_tail()
else:
removed_cell = self.remove_head()
new_cell = Cell(data)
if self.size == 0:
self._head = new_cell
self._tail = new_cell
elif position == 'head':
self._head.left = new_cell
new_cell.right = self._head
self._head = new_cell
else:
self._tail.right = new_cell
new_cell.left = self._tail
self._tail = new_cell
self._size += 1
return removed_cell
def add_head(self, data):
return self._add(data, 'head')
def add_tail(self, data):
return self._add(data, 'tail')
|
import os
from typing import Generator
from .vector import Colour
def chunk(seq: list[str], n: int) -> Generator[list[str], None, None]:
for i in range(0, len(seq), n):
yield seq[i : i + n]
class Canvas:
def __init__(self, width: int, height: int) -> None:
self.height = height
self.width = width
self.pixels = [[Colour(0, 0, 0) for _ in range(width)] for _ in range(height)]
def write(self, x: int, y: int, colour: Colour) -> None:
if not (0 <= x < self.width and 0 <= y < self.height):
raise IndexError("Canvas write out of range")
self.pixels[y][x] = colour
def get(self, x: int, y: int) -> Colour:
if not (0 <= x < self.width and 0 <= y < self.height):
raise IndexError("Canvas read out of range")
return self.pixels[y][x]
def as_ppm_string(self) -> str:
header = f"P3\n{self.width} {self.height}\n255\n"
body = "\n".join(
"\n".join(
" ".join(line)
for line in chunk(
[
f"{pixel.r*255:.0f} {pixel.g*255:.0f} {pixel.b*255:.0f}"
for pixel in row
],
5,
)
)
for row in self.pixels
)
return header + body + "\n"
def save_ppm(self, path: os.PathLike) -> None:
data = self.as_ppm_string()
with open(path, "w") as f:
f.write(data)
|
default_app_config = '_app.initial_data.apps.InitialDataConfig'
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import api_pb2 as api__pb2
class ManagerStub(object):
"""For each RPC service, we define mapping to HTTP REST API method.
The mapping includes the URL path, query parameters and request body.
https://cloud.google.com/service-infrastructure/docs/service-management/reference/rpc/google.api#http
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateStudy = channel.unary_unary(
'/api.Manager/CreateStudy',
request_serializer=api__pb2.CreateStudyRequest.SerializeToString,
response_deserializer=api__pb2.CreateStudyReply.FromString,
)
self.GetStudy = channel.unary_unary(
'/api.Manager/GetStudy',
request_serializer=api__pb2.GetStudyRequest.SerializeToString,
response_deserializer=api__pb2.GetStudyReply.FromString,
)
self.GetStudyList = channel.unary_unary(
'/api.Manager/GetStudyList',
request_serializer=api__pb2.GetStudyListRequest.SerializeToString,
response_deserializer=api__pb2.GetStudyListReply.FromString,
)
self.CreateTrial = channel.unary_unary(
'/api.Manager/CreateTrial',
request_serializer=api__pb2.CreateTrialRequest.SerializeToString,
response_deserializer=api__pb2.CreateTrialReply.FromString,
)
self.GetTrials = channel.unary_unary(
'/api.Manager/GetTrials',
request_serializer=api__pb2.GetTrialsRequest.SerializeToString,
response_deserializer=api__pb2.GetTrialsReply.FromString,
)
self.RegisterWorker = channel.unary_unary(
'/api.Manager/RegisterWorker',
request_serializer=api__pb2.RegisterWorkerRequest.SerializeToString,
response_deserializer=api__pb2.RegisterWorkerReply.FromString,
)
self.GetWorkers = channel.unary_unary(
'/api.Manager/GetWorkers',
request_serializer=api__pb2.GetWorkersRequest.SerializeToString,
response_deserializer=api__pb2.GetWorkersReply.FromString,
)
self.UpdateWorkerState = channel.unary_unary(
'/api.Manager/UpdateWorkerState',
request_serializer=api__pb2.UpdateWorkerStateRequest.SerializeToString,
response_deserializer=api__pb2.UpdateWorkerStateReply.FromString,
)
self.GetSuggestions = channel.unary_unary(
'/api.Manager/GetSuggestions',
request_serializer=api__pb2.GetSuggestionsRequest.SerializeToString,
response_deserializer=api__pb2.GetSuggestionsReply.FromString,
)
self.GetShouldStopWorkers = channel.unary_unary(
'/api.Manager/GetShouldStopWorkers',
request_serializer=api__pb2.GetShouldStopWorkersRequest.SerializeToString,
response_deserializer=api__pb2.GetShouldStopWorkersReply.FromString,
)
self.GetMetrics = channel.unary_unary(
'/api.Manager/GetMetrics',
request_serializer=api__pb2.GetMetricsRequest.SerializeToString,
response_deserializer=api__pb2.GetMetricsReply.FromString,
)
self.SetSuggestionParameters = channel.unary_unary(
'/api.Manager/SetSuggestionParameters',
request_serializer=api__pb2.SetSuggestionParametersRequest.SerializeToString,
response_deserializer=api__pb2.SetSuggestionParametersReply.FromString,
)
self.GetSuggestionParameters = channel.unary_unary(
'/api.Manager/GetSuggestionParameters',
request_serializer=api__pb2.GetSuggestionParametersRequest.SerializeToString,
response_deserializer=api__pb2.GetSuggestionParametersReply.FromString,
)
self.GetSuggestionParameterList = channel.unary_unary(
'/api.Manager/GetSuggestionParameterList',
request_serializer=api__pb2.GetSuggestionParameterListRequest.SerializeToString,
response_deserializer=api__pb2.GetSuggestionParameterListReply.FromString,
)
self.SetEarlyStoppingParameters = channel.unary_unary(
'/api.Manager/SetEarlyStoppingParameters',
request_serializer=api__pb2.SetEarlyStoppingParametersRequest.SerializeToString,
response_deserializer=api__pb2.SetEarlyStoppingParametersReply.FromString,
)
self.GetEarlyStoppingParameters = channel.unary_unary(
'/api.Manager/GetEarlyStoppingParameters',
request_serializer=api__pb2.GetEarlyStoppingParametersRequest.SerializeToString,
response_deserializer=api__pb2.GetEarlyStoppingParametersReply.FromString,
)
self.GetEarlyStoppingParameterList = channel.unary_unary(
'/api.Manager/GetEarlyStoppingParameterList',
request_serializer=api__pb2.GetEarlyStoppingParameterListRequest.SerializeToString,
response_deserializer=api__pb2.GetEarlyStoppingParameterListReply.FromString,
)
self.SaveStudy = channel.unary_unary(
'/api.Manager/SaveStudy',
request_serializer=api__pb2.SaveStudyRequest.SerializeToString,
response_deserializer=api__pb2.SaveStudyReply.FromString,
)
self.SaveModel = channel.unary_unary(
'/api.Manager/SaveModel',
request_serializer=api__pb2.SaveModelRequest.SerializeToString,
response_deserializer=api__pb2.SaveModelReply.FromString,
)
self.ReportMetricsLogs = channel.unary_unary(
'/api.Manager/ReportMetricsLogs',
request_serializer=api__pb2.ReportMetricsLogsRequest.SerializeToString,
response_deserializer=api__pb2.ReportMetricsLogsReply.FromString,
)
self.GetSavedStudies = channel.unary_unary(
'/api.Manager/GetSavedStudies',
request_serializer=api__pb2.GetSavedStudiesRequest.SerializeToString,
response_deserializer=api__pb2.GetSavedStudiesReply.FromString,
)
self.GetSavedModels = channel.unary_unary(
'/api.Manager/GetSavedModels',
request_serializer=api__pb2.GetSavedModelsRequest.SerializeToString,
response_deserializer=api__pb2.GetSavedModelsReply.FromString,
)
class ManagerServicer(object):
"""For each RPC service, we define mapping to HTTP REST API method.
The mapping includes the URL path, query parameters and request body.
https://cloud.google.com/service-infrastructure/docs/service-management/reference/rpc/google.api#http
"""
def CreateStudy(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStudy(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStudyList(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateTrial(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTrials(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RegisterWorker(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetWorkers(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateWorkerState(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSuggestions(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetShouldStopWorkers(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetMetrics(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetSuggestionParameters(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSuggestionParameters(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSuggestionParameterList(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetEarlyStoppingParameters(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetEarlyStoppingParameters(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetEarlyStoppingParameterList(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SaveStudy(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SaveModel(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReportMetricsLogs(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSavedStudies(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSavedModels(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ManagerServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateStudy': grpc.unary_unary_rpc_method_handler(
servicer.CreateStudy,
request_deserializer=api__pb2.CreateStudyRequest.FromString,
response_serializer=api__pb2.CreateStudyReply.SerializeToString,
),
'GetStudy': grpc.unary_unary_rpc_method_handler(
servicer.GetStudy,
request_deserializer=api__pb2.GetStudyRequest.FromString,
response_serializer=api__pb2.GetStudyReply.SerializeToString,
),
'GetStudyList': grpc.unary_unary_rpc_method_handler(
servicer.GetStudyList,
request_deserializer=api__pb2.GetStudyListRequest.FromString,
response_serializer=api__pb2.GetStudyListReply.SerializeToString,
),
'CreateTrial': grpc.unary_unary_rpc_method_handler(
servicer.CreateTrial,
request_deserializer=api__pb2.CreateTrialRequest.FromString,
response_serializer=api__pb2.CreateTrialReply.SerializeToString,
),
'GetTrials': grpc.unary_unary_rpc_method_handler(
servicer.GetTrials,
request_deserializer=api__pb2.GetTrialsRequest.FromString,
response_serializer=api__pb2.GetTrialsReply.SerializeToString,
),
'RegisterWorker': grpc.unary_unary_rpc_method_handler(
servicer.RegisterWorker,
request_deserializer=api__pb2.RegisterWorkerRequest.FromString,
response_serializer=api__pb2.RegisterWorkerReply.SerializeToString,
),
'GetWorkers': grpc.unary_unary_rpc_method_handler(
servicer.GetWorkers,
request_deserializer=api__pb2.GetWorkersRequest.FromString,
response_serializer=api__pb2.GetWorkersReply.SerializeToString,
),
'UpdateWorkerState': grpc.unary_unary_rpc_method_handler(
servicer.UpdateWorkerState,
request_deserializer=api__pb2.UpdateWorkerStateRequest.FromString,
response_serializer=api__pb2.UpdateWorkerStateReply.SerializeToString,
),
'GetSuggestions': grpc.unary_unary_rpc_method_handler(
servicer.GetSuggestions,
request_deserializer=api__pb2.GetSuggestionsRequest.FromString,
response_serializer=api__pb2.GetSuggestionsReply.SerializeToString,
),
'GetShouldStopWorkers': grpc.unary_unary_rpc_method_handler(
servicer.GetShouldStopWorkers,
request_deserializer=api__pb2.GetShouldStopWorkersRequest.FromString,
response_serializer=api__pb2.GetShouldStopWorkersReply.SerializeToString,
),
'GetMetrics': grpc.unary_unary_rpc_method_handler(
servicer.GetMetrics,
request_deserializer=api__pb2.GetMetricsRequest.FromString,
response_serializer=api__pb2.GetMetricsReply.SerializeToString,
),
'SetSuggestionParameters': grpc.unary_unary_rpc_method_handler(
servicer.SetSuggestionParameters,
request_deserializer=api__pb2.SetSuggestionParametersRequest.FromString,
response_serializer=api__pb2.SetSuggestionParametersReply.SerializeToString,
),
'GetSuggestionParameters': grpc.unary_unary_rpc_method_handler(
servicer.GetSuggestionParameters,
request_deserializer=api__pb2.GetSuggestionParametersRequest.FromString,
response_serializer=api__pb2.GetSuggestionParametersReply.SerializeToString,
),
'GetSuggestionParameterList': grpc.unary_unary_rpc_method_handler(
servicer.GetSuggestionParameterList,
request_deserializer=api__pb2.GetSuggestionParameterListRequest.FromString,
response_serializer=api__pb2.GetSuggestionParameterListReply.SerializeToString,
),
'SetEarlyStoppingParameters': grpc.unary_unary_rpc_method_handler(
servicer.SetEarlyStoppingParameters,
request_deserializer=api__pb2.SetEarlyStoppingParametersRequest.FromString,
response_serializer=api__pb2.SetEarlyStoppingParametersReply.SerializeToString,
),
'GetEarlyStoppingParameters': grpc.unary_unary_rpc_method_handler(
servicer.GetEarlyStoppingParameters,
request_deserializer=api__pb2.GetEarlyStoppingParametersRequest.FromString,
response_serializer=api__pb2.GetEarlyStoppingParametersReply.SerializeToString,
),
'GetEarlyStoppingParameterList': grpc.unary_unary_rpc_method_handler(
servicer.GetEarlyStoppingParameterList,
request_deserializer=api__pb2.GetEarlyStoppingParameterListRequest.FromString,
response_serializer=api__pb2.GetEarlyStoppingParameterListReply.SerializeToString,
),
'SaveStudy': grpc.unary_unary_rpc_method_handler(
servicer.SaveStudy,
request_deserializer=api__pb2.SaveStudyRequest.FromString,
response_serializer=api__pb2.SaveStudyReply.SerializeToString,
),
'SaveModel': grpc.unary_unary_rpc_method_handler(
servicer.SaveModel,
request_deserializer=api__pb2.SaveModelRequest.FromString,
response_serializer=api__pb2.SaveModelReply.SerializeToString,
),
'ReportMetricsLogs': grpc.unary_unary_rpc_method_handler(
servicer.ReportMetricsLogs,
request_deserializer=api__pb2.ReportMetricsLogsRequest.FromString,
response_serializer=api__pb2.ReportMetricsLogsReply.SerializeToString,
),
'GetSavedStudies': grpc.unary_unary_rpc_method_handler(
servicer.GetSavedStudies,
request_deserializer=api__pb2.GetSavedStudiesRequest.FromString,
response_serializer=api__pb2.GetSavedStudiesReply.SerializeToString,
),
'GetSavedModels': grpc.unary_unary_rpc_method_handler(
servicer.GetSavedModels,
request_deserializer=api__pb2.GetSavedModelsRequest.FromString,
response_serializer=api__pb2.GetSavedModelsReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'api.Manager', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class SuggestionStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetSuggestions = channel.unary_unary(
'/api.Suggestion/GetSuggestions',
request_serializer=api__pb2.GetSuggestionsRequest.SerializeToString,
response_deserializer=api__pb2.GetSuggestionsReply.FromString,
)
class SuggestionServicer(object):
# missing associated documentation comment in .proto file
pass
def GetSuggestions(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SuggestionServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetSuggestions': grpc.unary_unary_rpc_method_handler(
servicer.GetSuggestions,
request_deserializer=api__pb2.GetSuggestionsRequest.FromString,
response_serializer=api__pb2.GetSuggestionsReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'api.Suggestion', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class EarlyStoppingStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetShouldStopWorkers = channel.unary_unary(
'/api.EarlyStopping/GetShouldStopWorkers',
request_serializer=api__pb2.GetShouldStopWorkersRequest.SerializeToString,
response_deserializer=api__pb2.GetShouldStopWorkersReply.FromString,
)
class EarlyStoppingServicer(object):
# missing associated documentation comment in .proto file
pass
def GetShouldStopWorkers(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_EarlyStoppingServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetShouldStopWorkers': grpc.unary_unary_rpc_method_handler(
servicer.GetShouldStopWorkers,
request_deserializer=api__pb2.GetShouldStopWorkersRequest.FromString,
response_serializer=api__pb2.GetShouldStopWorkersReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'api.EarlyStopping', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
import argparse
import sys
from flowy import SWFWorkflowStarter
def main():
parser = argparse.ArgumentParser()
parser.add_argument("domain")
parser.add_argument("name")
parser.add_argument("version")
parser.add_argument("--task-list")
parser.add_argument("--task-duration", type=int, default=None)
parser.add_argument("--workflow-duration", type=int, default=None)
parser.add_argument("--child-policy", type=str, default=None)
parser.add_argument("--lambda-role", type=str, default=None)
parser.add_argument('args', nargs=argparse.REMAINDER)
args = parser.parse_args()
starter = SWFWorkflowStarter(args.domain, args.name, args.version,
swf_client=None, task_list=args.task_list,
task_duration=args.task_duration,
workflow_duration=args.workflow_duration,
child_policy=args.child_policy,
lambda_role=args.lambda_role)
return not starter(*args.args) # 0 is success
if __name__ == '__main__':
sys.exit(main())
|
import argparse
import copy
from covid19.version import version
def get_configuration(args=None):
default_interval = 1200
default_port = 8080
default_pg_port = 5432
default_pg_database = 'covid19'
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version=f'%(prog)s {version}')
parser.add_argument('--interval', type=int, default=default_interval,
help=f'Time between measurements (default: {default_interval} sec)')
parser.add_argument('--port', type=int, default=default_port,
help=f'Prometheus listener port (default: {default_port})')
parser.add_argument('--debug', action='store_true',
help='Set logging level to debug')
parser.add_argument('--once', action='store_true',
help='Measure once and then terminate')
parser.add_argument('--apikey',
help='API Key')
parser.add_argument('--postgres-host',
help='Postgres DB host')
parser.add_argument('--postgres-port', default=default_pg_port,
help=f'Postgres DB port (default: {default_pg_port})')
parser.add_argument('--postgres-database', default=default_pg_database,
help=f'Postgres DB database name (default: {default_pg_database})')
parser.add_argument('--postgres-user',
help='Postgres DB user name')
parser.add_argument('--postgres-password',
help='Postgres DB password')
parser.add_argument('--pushgateway',
help='URL of Prometheus pushgateway server')
return parser.parse_args(args)
def print_configuration(config):
redacted = copy.deepcopy(config)
if redacted.apikey:
redacted.apikey = '*' * len(redacted.apikey)
if redacted.postgres_password:
redacted.postgres_password = '*' * 12
return ', '.join([f'{key}={val}' for key, val in vars(redacted).items()])
|
#!/usr/bin/env python
import os
import unittest
import sys
from test_common import TestCommon
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))
from fastly_service import FastlyConfiguration
class TestFastlySettings(TestCommon):
@TestCommon.vcr.use_cassette()
def test_fastly_settings(self):
settings_configuration = self.minimal_configuration.copy()
settings_configuration.update({
'settings': {
'general.default_ttl': 1000
}
})
settings_configuration = FastlyConfiguration(settings_configuration)
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, settings_configuration).service
self.assertEqual(service.active_version.configuration.settings.general_default_ttl, 1000)
self.assertEqual(service.active_version.configuration, settings_configuration)
active_version_number = service.active_version.number
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, settings_configuration).service
self.assertEqual(service.active_version.number, active_version_number)
if __name__ == '__main__':
unittest.main()
|
from collections import defaultdict
from typing import List, Tuple, Set, Optional
import torch
from torch.utils.data import Dataset
from transformers import BertTokenizer
MAX_SEQ_LEN = 512
def turn2example(
tokenizer: BertTokenizer,
domain: str,
slot: str,
value: str,
context_ids: List[int],
triple_labels: Optional[Set[tuple]] = None,
belief_state: Optional[List[Tuple[str, str, str]]] = None,
dial_id: str = None,
turn_id: int = None,
) -> tuple:
"""Convert turn data to example based on ontology.
Args:
tokenizer: BertTokenizer, see https://huggingface.co/transformers/model_doc/bert.html#berttokenizer
domain: domain of current example
slot: slot of current example
value: value of current example
context_ids: context token's id in bert vocab
triple_labels: set of (domain, slot, value)
belief_state: list of (domain, slot, value)
dial_id: current dialogue id
turn_id: current turn id
Returns:
example, (input_ids, token_type_ids, domain, slot, value, ...)
"""
candidate = domain + "-" + slot + " = " + value
candidate_ids = tokenizer.encode(candidate, add_special_tokens=False)
input_ids = (
[tokenizer.cls_token_id]
+ context_ids
+ [tokenizer.sep_token_id]
+ candidate_ids
+ [tokenizer.sep_token_id]
)
token_type_ids = [0] + [0] * len(context_ids) + [0] + [1] * len(candidate_ids) + [1]
example = (input_ids, token_type_ids, domain, slot, value)
if dial_id is not None:
label = int((domain, slot, value) in triple_labels)
example += (belief_state, label, dial_id, str(turn_id))
return example
class DSTDataset(Dataset):
def __init__(self, examples: List[tuple]):
super(DSTDataset, self).__init__()
if len(examples) > 5:
(
self.input_ids,
self.token_type_ids,
self.domains,
self.slots,
self.values,
self.belief_states,
self.labels,
self.dialogue_idxs,
self.turn_ids,
) = examples
else:
(
self.input_ids,
self.token_type_ids,
self.domains,
self.slots,
self.values,
) = examples
def __getitem__(self, index: int) -> tuple:
if hasattr(self, "labels"):
return (
self.input_ids[index],
self.token_type_ids[index],
self.domains[index],
self.slots[index],
self.values[index],
self.belief_states[index],
self.labels[index],
self.dialogue_idxs[index],
self.turn_ids[index],
)
else:
return (
self.input_ids[index],
self.token_type_ids[index],
self.domains[index],
self.slots[index],
self.values[index],
)
def __len__(self):
return len(self.input_ids)
def collate_fn(examples: List[tuple], mode: str = "train") -> dict:
"""Merge a list of samples to form a mini-batch of Tensor(s)
generate input_id tensor, token_type_id tensor, attention_mask tensor, pad all tensor to the longest
sequence in the batch.
Args:
examples: list of (input_ids, token_type_ids, domain, slot, value, ...)
mode: train, dev, tests, infer
Returns:
batch data
"""
batch_examples = {}
examples = list(zip(*examples))
if mode == "infer":
(
batch_examples["domains"],
batch_examples["slots"],
batch_examples["values"],
) = examples[2:]
else:
(
batch_examples["domains"],
batch_examples["slots"],
batch_examples["values"],
batch_examples["belief_states"],
batch_examples["labels"],
batch_examples["dialogue_idxs"],
batch_examples["turn_ids"],
) = examples[2:]
attention_mask, input_ids_tensor, token_type_ids_tensor = get_bert_input(examples)
data = {
"input_ids": input_ids_tensor,
"token_type_ids": token_type_ids_tensor,
"attention_mask": attention_mask,
}
if mode == "infer":
data.update(
{
"domains": batch_examples["domains"],
"slots": batch_examples["slots"],
"values": batch_examples["values"],
}
)
return data
else:
data.update(
{"labels": torch.tensor(batch_examples["labels"], dtype=torch.long)}
)
if mode != "train":
data.update(
{
"domains": batch_examples["domains"],
"slots": batch_examples["slots"],
"values": batch_examples["values"],
"belief_states": batch_examples["belief_states"],
"dialogue_idxs": batch_examples["dialogue_idxs"],
"turn_ids": batch_examples["turn_ids"],
}
)
return data
def get_bert_input(
examples: List[tuple],
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
"""Convert input list to torch tensor.
Args:
examples: (input_id_list, )
Returns:
attention_mask, input_ids_tensor, token_type_ids_tensor
"""
input_ids = examples[0]
token_type_ids = examples[1]
max_seq_len = min(max(len(input_id) for input_id in input_ids), MAX_SEQ_LEN)
input_ids_tensor = torch.zeros((len(input_ids), max_seq_len), dtype=torch.long)
token_type_ids_tensor = torch.zeros_like(input_ids_tensor)
attention_mask = torch.ones_like(input_ids_tensor)
for i, input_id in enumerate(input_ids):
cur_seq_len = len(input_id)
if cur_seq_len <= max_seq_len:
input_ids_tensor[i, :cur_seq_len] = torch.tensor(input_id, dtype=torch.long)
token_type_ids_tensor[i, :cur_seq_len] = torch.tensor(
token_type_ids[i], dtype=torch.long
)
attention_mask[i, cur_seq_len:] = 0
else:
input_ids_tensor[i] = torch.tensor(
input_id[: max_seq_len - 1] + [102], dtype=torch.long
)
token_type_ids_tensor[i] = torch.tensor(
token_type_ids[i][:max_seq_len], dtype=torch.long
)
return attention_mask, input_ids_tensor, token_type_ids_tensor
def rank_values(logits: List[float], preds: List[tuple], top_k: int) -> List[tuple]:
"""Rank domain-slot pair corresponding values.
Args:
logits: prediction corresponding logits
preds: a list of triple labels
top_k: take top k prediction labels
Returns:
top-1 predicted triple label
"""
top_k = min(len(preds), top_k)
preds_logits_pair = sorted(zip(preds, logits), key=lambda x: x[1], reverse=True)[
:top_k
]
ranking_dict = defaultdict(list)
for pred, logit in preds_logits_pair:
key = "-".join(pred[:2])
ranking_dict[key].append((pred[-1], logit))
preds = []
for ds, pred_score in ranking_dict.items():
domain, slot = ds.split("-")
value = sorted(pred_score, key=lambda x: x[-1], reverse=True)[0][0]
preds.append((domain, slot, value))
return preds
|
# Copyright (C) 2019 Nanyang Wang, Yinda Zhang, Zhuwen Li, Yanwei Fu, Wei Liu, Yu-Gang Jiang, Fudan University
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
def construct_feed_dict(pkl, placeholders):
"""Construct feed dictionary."""
coord = pkl[0]
pool_idx = pkl[4]
faces = pkl[5]
# laplace = pkl[6]
lape_idx = pkl[7]
edges = []
for i in range(1,4):
adj = pkl[i][1]
edges.append(adj[0])
feed_dict = dict()
feed_dict.update({placeholders['features']: coord})
feed_dict.update({placeholders['edges'][i]: edges[i] for i in range(len(edges))})
feed_dict.update({placeholders['faces'][i]: faces[i] for i in range(len(faces))})
feed_dict.update({placeholders['pool_idx'][i]: pool_idx[i] for i in range(len(pool_idx))})
feed_dict.update({placeholders['lape_idx'][i]: lape_idx[i] for i in range(len(lape_idx))})
feed_dict.update({placeholders['support1'][i]: pkl[1][i] for i in range(len(pkl[1]))})
feed_dict.update({placeholders['support2'][i]: pkl[2][i] for i in range(len(pkl[2]))})
feed_dict.update({placeholders['support3'][i]: pkl[3][i] for i in range(len(pkl[3]))})
return feed_dict
|
# Generated by Django 2.2.19 on 2021-06-24 09:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('posts', '0004_auto_20210617_1146'),
]
operations = [
migrations.AlterModelOptions(
name='pagehit',
options={'ordering': ['client', '-count']},
),
]
|
import sys
from collections import deque
input = sys.stdin.readline
'''
BFS를 쓰면 초 단위로 바이러스 퍼지는거 파악 가능
각 바이러스별로 다음 단위시간에 퍼질 구역들을 저장할 배열 필요.
'''
# function
def bfs(N, S):
directions = ((1, 0), (-1, 0), (0, 1), (0, -1))
time_table = [[0 for _ in range(N)] for _ in range(N)]
while virus:
r, c, virus_name, time = virus.popleft()
if time >= S: break
for dr, dc in directions:
r2 = r + dr
c2 = c + dc
if not 0 <= r2 < N or not 0 <= c2 < N:
continue
if test_tube[r2][c2] == 0 \
or (test_tube[r2][c2] > virus_name and time_table[r2][c2] == time + 1):
test_tube[r2][c2] = virus_name
time_table[r2][c2] = time + 1
virus.append((r2, c2, virus_name, time + 1))
# input
N, K = map(int, input().split())
test_tube = [list(map(int, input().split())) for _ in range(N)]
S, X, Y = map(int, input().split())
# process
# 바이러스 초기 위치 저장
virus = deque()
for r in range(N):
for c in range(N):
if test_tube[r][c] != 0:
virus.append((r, c, test_tube[r][c], 0))
# 바이러스 확산
bfs(N, S)
# output
print(test_tube[X - 1][Y - 1]) |
# -*- coding: utf-8 -*-
"""
Example script to serve as starting point for displaying and comparing results of motion correction versus no motion correction.
Prerequisite:
You should have executed the following on your command prompt
./run_simulations_thorax.sh
./run_MCIR_0.sh
./run_MCIR_1.sh
Authors: Kris Thielemans & Harry Tsoumpas
"""
#%% Initial imports
import numpy
import matplotlib.pyplot as plt
import stir
from stirextra import *
import os
#%% go to directory with input files
# adapt this path to your situation (or start everything in the exercises directory)
os.chdir(os.getenv('STIR_exercises_PATH'))
#%% Read in images
MCIR=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('working_folder/MCIR/MCIR_64.hv'));
noMC=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('working_folder/noMC/noMC_64.hv'));
Original=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_simulation/FDG_g1.hv'));
#%% bitmap display of images
# pick central slice
slice=numpy.int(Original.shape[0]/2);
fig=plt.figure();
maxforplot=Original.max()/2;
ax=plt.subplot(1,3,1);
plt.imshow(Original[slice,:,:,]);
plt.clim(0,maxforplot);
plt.colorbar();
plt.axis('off');
ax.set_title('Original');
ax=plt.subplot(1,3,2);
plt.imshow(MCIR[slice,:,:,]);
plt.clim(0,maxforplot);
plt.colorbar();
plt.axis('off');
ax.set_title('MCIR');
ax=plt.subplot(1,3,3);
plt.imshow(noMC[slice,:,:,]);
plt.clim(0,maxforplot);
plt.colorbar();
plt.axis('off');
ax.set_title('noMC');
fig.savefig('MCIR_vs_noMC_bitmaps.png')
#%% Display difference image
diff=MCIR-noMC;
fig=plt.figure()
ax=plt.subplot(1,1,1);
plt.imshow(diff[slice,:,:,]);
plt.clim(-maxforplot/50,maxforplot/50)
plt.colorbar();
plt.axis('off');
ax.set_title('diff');
fig.savefig('MCIR_vs_noMC_diff_bitmap.png')
#%% Display central horizontal profiles through the image
# pick a line close to central line
row=numpy.int(MCIR.shape[1]/2+3);
fig=plt.figure()
plt.plot(MCIR[slice,row,:],'b');
#plt.hold(True)
plt.plot(noMC[slice,row,:],'c');
plt.legend(('MCIR','noMC'));
#%% bitmap display of motion vectors
# The motion files forward & backward
# First read in motion information from images
ForwardMotion_X=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_motion/motion_g1d1.hv'));
ForwardMotion_Y=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_motion/motion_g1d2.hv'));
ForwardMotion_Z=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_motion/motion_g1d3.hv'));
BackwardMotion_X=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_motion/motion_inv_g1d1.hv'));
BackwardMotion_Y=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_motion/motion_inv_g1d2.hv'));
BackwardMotion_Z=to_numpy(stir.FloatVoxelsOnCartesianGrid.read_from_file('EX_motion/motion_inv_g1d3.hv'));
#%% bitmaps showing forward motion images
# In the current demo, they are quite boring: the example motion is just a translation
maxforplot=ForwardMotion_Z.max();
# pick central slice
slice=numpy.int(ForwardMotion_X.shape[0]/2);
plt.figure();
ax=plt.subplot(1,3,1);
plt.imshow(ForwardMotion_X[slice,:,:,]);
plt.clim(0,maxforplot)
plt.colorbar();
plt.axis('off');
ax.set_title('Forward_X');
ax=plt.subplot(1,3,2);
plt.imshow(ForwardMotion_Y[slice,:,:,]);
plt.clim(0,maxforplot)
plt.colorbar();
plt.axis('off');
ax.set_title('Forward_Y');
ax=plt.subplot(1,3,3);
plt.imshow(ForwardMotion_Z[slice,:,:,]);
plt.clim(0,maxforplot)
plt.colorbar();
plt.axis('off');
ax.set_title('Forward_Z');
#%% bitmaps showing backward motion images
maxforplot=BackwardMotion_Z.max();
# pick central slice
slice=numpy.int(BackwardMotion_X.shape[0]/2);
plt.figure();
ax=plt.subplot(1,3,1);
plt.imshow(BackwardMotion_X[slice,:,:,]);
plt.clim(0,maxforplot)
plt.colorbar();
plt.axis('off');
ax.set_title('Backward_X');
ax=plt.subplot(1,3,2);
plt.imshow(BackwardMotion_Y[slice,:,:,]);
plt.clim(0,maxforplot)
plt.colorbar();
plt.axis('off');
ax.set_title('Backward_Y');
ax=plt.subplot(1,3,3);
plt.imshow(BackwardMotion_Z[slice,:,:,]);
plt.clim(0,maxforplot)
plt.colorbar();
plt.axis('off');
ax.set_title('Backward_Z');
#%% close all plots
plt.close('all')
|
# Solution to [Python If-Else](https://www.hackerrank.com/challenges/py-if-else/problem)
#!/bin/python3
if __name__ == '__main__':
weird = "Weird"
not_weird = "Not Weird"
n = int(input().strip())
odd = (n % 2 == 1)
even = not odd
if odd:
print(weird)
elif even and 2 <= n <= 5:
print(not_weird)
elif even and 6 <= n <= 20:
print(weird)
elif even and n > 20:
print(not_weird) |
'''
Test the TMCL_Slave.
Created on 15.12.2020
@author: LK
'''
from PyTrinamicMicro.TMCL_Slave import TMCL_Slave
from PyTrinamicMicro.connections.virtual_tmcl_interface import virtual_tmcl_interface
from PyTrinamic.TMCL import TMCL_Request, TMCL_Reply, TMCL
from PyTrinamic.modules.TMCM0960.TMCM0960 import TMCM0960
import logging
import re
import struct
VERSION_PATTERN = "^\d\d\d\dV\d\d\d$"
logger = logging.getLogger(__name__)
logger.info("Test TMCL_Slave")
logger.info("Initializing virtual interface.")
host = virtual_tmcl_interface()
res = virtual_tmcl_interface()
# Add virtual commands to interface
host.send_request_only(TMCL_Request(1, TMCL.COMMANDS["GET_FIRMWARE_VERSION"], TMCM0960.ENUMs.VERSION_FORMAT_ASCII, 0, 0))
host.send_request_only(TMCL_Request(1, TMCL.COMMANDS["GET_FIRMWARE_VERSION"], TMCM0960.ENUMs.VERSION_FORMAT_BINARY, 0, 0))
host.send_request_only(TMCL_Request(1, TMCL.COMMANDS["GET_FIRMWARE_VERSION"], TMCM0960.ENUMs.VERSION_FORMAT_BUILD, 0, 0))
host.send_request_only(TMCL_Request(1, TMCL.COMMANDS["TMCL_UF0"], 0, 0, 0)) # stop
slave = TMCL_Slave()
c = 0
while(not(slave.status.stop)):
assert (c < 4), "TMCL_Slave did not stop on stop command"
if(host.request_available()):
logger.debug("Request available.")
request = host.receive_request()
if(not(slave.filter(request))):
continue
logger.debug("Request for this slave detected.")
reply = slave.handle_request(request)
res.send_reply(reply)
c += 1
version_str = ""
if(res.data_available()):
reply = res.receive_reply()
version_str = reply.versionString()
assert re.match(VERSION_PATTERN, version_str), "Invalid version string"
else:
assert False, "Not enough replies"
if(res.data_available()):
reply = res.receive_reply()
version = struct.unpack("BBBB", struct.pack(">I", reply.value))
assert ((version[0] == int(version_str[0:2])) and (version[1] == int(version_str[2:4]))
and (version[2] == int(version_str[5:6])) and (version[3] == int(version_str[6:8]))), "Binary version does not match version string"
else:
assert False, "Not enough replies"
if(res.data_available()):
reply = res.receive_reply()
else:
assert False, "Not enough replies"
logger.info("Test completed successfully.")
|
from django.contrib import admin
from .models import Reference
@admin.register(Reference)
class ReferenceAdmin(admin.ModelAdmin):
list_display = ['title', 'description', 'link', 'author', 'publish']
|
import os
import logging
from flask import Flask, render_template, Response, send_from_directory, request, current_app
flask_app = Flask(__name__)
logging.basicConfig()
log = logging.getLogger(__name__)
@flask_app.route("/")
def main():
return render_template('main.html', title='Inventory')
@flask_app.route("/small")
def small():
return render_template('small.html', title='pyDashie')
@flask_app.route("/dashboard/<dashlayout>/")
def custom_layout(dashlayout):
return render_template('%s.html'%dashlayout, title='pyDashie')
@flask_app.route("/assets/application.js")
def javascripts():
if not hasattr(current_app, 'javascripts'):
import coffeescript
scripts = [
'assets/javascripts/jquery.js',
'assets/javascripts/es5-shim.js',
'assets/javascripts/d3.v2.min.js',
'assets/javascripts/batman.js',
'assets/javascripts/batman.jquery.js',
'assets/javascripts/jquery.gridster.js',
'assets/javascripts/jquery.leanModal.min.js',
#'assets/javascripts/dashing.coffee',
'assets/javascripts/dashing.gridster.coffee',
'assets/javascripts/jquery.knob.js',
'assets/javascripts/rickshaw.min.js',
'assets/javascripts/dashing-chartjs.coffee',
#'assets/javascripts/application.coffee',
'assets/javascripts/app.js',
#'widgets/clock/clock.coffee',
'widgets/number/number.coffee',
'widgets/linechart/linechart.coffee',
]
nizzle = True
if not nizzle:
scripts = ['assets/javascripts/application.js']
output = []
for path in scripts:
output.append('// JS: %s\n' % path)
if '.coffee' in path:
log.info('Compiling Coffee for %s ' % path)
contents = coffeescript.compile_file(path)
else:
f = open(path)
contents = f.read()
f.close()
output.append(contents)
if nizzle:
f = open('/tmp/foo.js', 'w')
for o in output:
print(o, end="", file=f)
f.close()
f = open('/tmp/foo.js', 'rb')
output = f.read()
f.close()
current_app.javascripts = output
else:
current_app.javascripts = ''.join(output)
return Response(current_app.javascripts, mimetype='application/javascript')
@flask_app.route('/assets/application.css')
def application_css():
scripts = [
'assets/stylesheets/application.css',
]
output = ''
for path in scripts:
output = output + open(path).read()
return Response(output, mimetype='text/css')
@flask_app.route('/assets/images/<path:filename>')
def send_static_img(filename):
directory = os.path.join('assets', 'images')
return send_from_directory(directory, filename)
@flask_app.route('/views/<widget_name>.html')
def widget_html(widget_name):
html = '%s.html' % widget_name
path = os.path.join('widgets', widget_name, html)
if os.path.isfile(path):
f = open(path)
contents = f.read()
f.close()
return contents
import queue
class EventsManager:
def __init__(self):
self.events_queue = {}
self.last_events = {}
self.using_events = True
self.MAX_QUEUE_LENGTH = 20
self.stopped = False
events_manager = EventsManager()
@flask_app.route('/events')
def events():
if events_manager.using_events:
event_stream_port = request.environ['REMOTE_PORT']
current_event_queue = queue.Queue()
events_manager.events_queue[event_stream_port] = current_event_queue
current_app.logger.info('New Client %s connected. Total Clients: %s' %
(event_stream_port, len(events_manager.events_queue)))
#Start the newly connected client off by pushing the current last events
for event in events_manager.last_events.values():
current_event_queue.put(event)
return Response(pop_queue(current_event_queue), mimetype='text/event-stream')
return Response(events_manager.last_events.values(), mimetype='text/event-stream')
def pop_queue(current_event_queue):
while not events_manager.stopped:
try:
data = current_event_queue.get(timeout=0.1)
yield data
except queue.Empty:
#This makes the server quit nicely - previously the queue threads would block and never exit.
# This makes it keep checking for dead application
pass
def purge_streams():
big_queues = [port for port, queue in events_manager.events_queue.items()
if queue.qsize() > events_manager.MAX_QUEUE_LENGTH]
for big_queue in big_queues:
current_app.logger.info('Client %s is stale. Disconnecting. Total Clients: %s' %
(big_queue, events_manager.events_queue.qsize()))
del queue[big_queue]
def close_stream(*args, **kwargs):
event_stream_port = args[2][1]
del events_manager.events_queue[event_stream_port]
log.info('Client %s disconnected. Total Clients: %s' % (event_stream_port, len(events_manager.events_queue)))
def run_sample_app():
import socketserver
socketserver.BaseServer.handle_error = close_stream
import app
app.run(flask_app, events_manager)
if __name__ == "__main__":
run_sample_app()
|
"""
Extractor module
"""
from nltk.tokenize import sent_tokenize
from .base import Pipeline
from .questions import Questions
from .tokenizer import Tokenizer
class Extractor(Pipeline):
"""
Class that uses an extractive question-answering model to extract content from a given text context.
"""
def __init__(self, embeddings, path, quantize=False, gpu=False, model=None, tokenizer=None):
"""
Builds a new extractor.
Args:
embeddings: embeddings model
path: path to qa model
quantize: True if model should be quantized before inference, False otherwise.
gpu: if gpu inference should be used (only works if GPUs are available)
model: optional existing pipeline model to wrap
tokenizer: Tokenizer class
"""
# Embeddings model
self.embeddings = embeddings
# QA Pipeline
self.pipeline = Questions(path, quantize, gpu, model)
# Tokenizer class use default method if not set
self.tokenizer = tokenizer if tokenizer else Tokenizer
def __call__(self, queue, texts):
"""
Extracts answers to input questions. This method runs queries against a list of text, finds the top n best matches
and uses that as the question context. A question-answering model is then run against the context for the input question,
with the answer returned.
Args:
queue: input queue (name, query, question, snippet)
texts: list of text
Returns:
list of (name, answer)
"""
# Execute embeddings query
results = self.query([query for _, query, _, _ in queue], texts)
# Build question-context pairs
names, questions, contexts, snippets = [], [], [], []
for x, (name, _, question, snippet) in enumerate(queue):
# Build context using top n best matching segments
topn = sorted(results[x], key=lambda y: y[2], reverse=True)[:3]
context = " ".join([text for _, text, _ in sorted(topn, key=lambda y: y[0])])
names.append(name)
questions.append(question)
contexts.append(context)
snippets.append(snippet)
# Run qa pipeline and return answers
return self.answers(names, questions, contexts, snippets)
def query(self, queries, texts):
"""
Executes the extractor embeddings query. Returns results sorted by best match.
Args:
queries: list of embedding queries to run
texts: list of text
Returns:
list of (id, text, score)
"""
# Tokenize text
segments, tokenlist = [], []
for text in texts:
tokens = self.tokenizer.tokenize(text)
if tokens:
segments.append(text)
tokenlist.append(tokens)
# Add index id to segments to preserver ordering after filters
segments = list(enumerate(segments))
# Build question-context pairs
results = []
for query in queries:
# Get list of required and prohibited tokens
must = [token.strip("+") for token in query.split() if token.startswith("+") and len(token) > 1]
mnot = [token.strip("-") for token in query.split() if token.startswith("-") and len(token) > 1]
# Tokenize search query
query = self.tokenizer.tokenize(query)
# List of matches
matches = []
# Get list of (id, score) - sorted by highest score
scores = self.embeddings.similarity(query, tokenlist)
for x, score in scores:
# Get segment text
text = segments[x][1]
# Add result if:
# - all required tokens are present or there are not required tokens AND
# - all prohibited tokens are not present or there are not prohibited tokens
if (not must or all([token.lower() in text.lower() for token in must])) and (
not mnot or all([token.lower() not in text.lower() for token in mnot])
):
matches.append(segments[x] + (score,))
# Add query matches sorted by highest score
results.append(matches)
return results
def answers(self, names, questions, contexts, snippets):
"""
Executes QA pipeline and formats extracted answers.
Args:
names: question identifiers/names
questions: questions
contexts: question context
snippets: flags to enable answer snippets per answer
Returns:
list of (name, answer)
"""
results = []
# Run qa pipeline
answers = self.pipeline(questions, contexts)
# Extract and format answer
for x, answer in enumerate(answers):
# Resolve snippet if necessary
if answer and snippets[x]:
answer = self.snippet(contexts[x], answer)
results.append((names[x], answer))
return results
def snippet(self, context, answer):
"""
Extracts text surrounding the answer within context.
Args:
context: full context
answer: answer within context
Returns:
text surrounding answer as a snippet
"""
# Searches for first sentence to contain answer
if answer:
for x in sent_tokenize(context):
if answer in x:
return x
return answer
|
"""Dialog to ask user for base class and class name
@copyright: 2016-2018 Dietmar Schwertberger
@license: MIT (see LICENSE.txt) - THIS PROGRAM COMES WITH NO WARRANTY
"""
import wx
import common, compat
class WindowDialog(wx.Dialog):
# dialog for builder function
parent = parent_property = None # used by StandaloneOrChildDialog
def __init__(self, klass, base_classes=None, message="Select Class", toplevel=True):
pos = wx.GetMousePosition()
wx.Dialog.__init__(self, common.main, -1, _(message), pos)
self.standalone = self.base = None
if base_classes:
# base class
self.base = wx.RadioBox(self, -1, ("base class"), choices=base_classes, style=wx.RA_VERTICAL)
self.base.SetSelection(0)
self.number = 1
self.class_names = set( common.app_tree.get_all_class_names() )
self.toplevel_names = set( common.app_tree.get_toplevel_class_names() )
self.toplevel = toplevel # if this is True, the name must be unique, as the generated class will have it
# class
self._klass = klass
if common.app_tree.app.language.lower() != 'xrc':
klass = self.get_next_class_name(klass)
self.klass = wx.TextCtrl(self, -1, klass)
self.klass.Bind(wx.EVT_TEXT, self.on_text) # for validation
# for frame or standalone? this is used by the derived class below and just for menu bar and tool bar
if self.parent_property and self.parent:
# self.parent must be set by the derived class in this case; here we check whether it is set already
choices = ["Add to %s '%s'"%(self.parent.base[2:], self.parent.name),
"Standalone"]
self.standalone = wx.RadioBox(self, -1, ("Type"), choices=choices, style=wx.RA_VERTICAL)
self.standalone.Bind(wx.EVT_RADIOBOX, self.on_standalone)
if self.parent.properties[self.parent_property].value:
self.standalone.SetSelection(1)
self.standalone.Disable()
else:
self.standalone.SetSelection(0)
self.klass.Disable()
# layout
szr = wx.BoxSizer(wx.VERTICAL)
if self.standalone: szr.Add(self.standalone, 0, wx.ALL|wx.EXPAND, 5)
if self.base: szr.Add(self.base, 0, wx.ALL|wx.EXPAND, 5)
hszr = wx.BoxSizer(wx.HORIZONTAL)
hszr.Add(wx.StaticText(self, -1, _("class"),), 0, wx.ALIGN_CENTRE_VERTICAL|wx.ALIGN_RIGHT|wx.EXPAND|wx.ALL, 3)
hszr.Add(self.klass, 2)
szr.Add(hszr, 0, wx.EXPAND)
# buttons
btnbox = wx.BoxSizer(wx.HORIZONTAL)
self.btnOK = btnOK = wx.Button(self, wx.ID_OK, _('OK'))
btnCANCEL = wx.Button(self, wx.ID_CANCEL, _('Cancel'))
btnbox.Add(btnOK, 0, wx.ALL, 3)
btnbox.Add(btnCANCEL, 0, wx.ALL, 3)
btnOK.SetFocus()
szr.Add(btnbox, 0, wx.ALL|wx.ALIGN_CENTER, 3)
self.SetAutoLayout(True)
self.SetSizer(szr)
szr.Fit(self)
def get_next_class_name(self, name):
#names = [c.widget.klass for c in common.app_tree.root.children or []]
if not name in self.class_names: return name
while True:
ret = '%s%d'%(name,self.number)
if not ret in self.class_names: return ret
self.number += 1
def get_next_name(self, name):
names = common.app_tree.get_all_names()
names = [n for n in names if n.startswith(name)]
if not name in names: return name
while True:
ret = '%s_%d'%(name,self.number)
if not ret in names: return ret
self.number += 1
def on_text(self, event):
import re
validation_re = re.compile(r'^[a-zA-Z_]+[\w:.0-9-]*$')
name = event.GetString()
OK = bool( validation_re.match( name ) )
if not OK:
self.klass.SetBackgroundColour(wx.RED)
compat.SetToolTip(self.klass, "Class name not valid")
else:
#if name in [c.widget.klass for c in common.app_tree.root.children or []]:
if self.toplevel and name in self.toplevel_names:
self.klass.SetBackgroundColour( wx.RED )
compat.SetToolTip(self.klass, "Class name already in use for toplevel window")
OK = False
elif name in self.class_names:
# if the class name is in use already, indicate in yellow
self.klass.SetBackgroundColour( wx.Colour(255, 255, 0, 255) )
compat.SetToolTip(self.klass, "Class name not unique")
if self.toplevel and name in self.toplevel_names: OK = False
else:
self.klass.SetBackgroundColour( compat.wx_SystemSettings_GetColour(wx.SYS_COLOUR_WINDOW) )
compat.SetToolTip(self.klass, "")
self.klass.Refresh()
self.btnOK.Enable(OK)
event.Skip()
def on_standalone(self, event):
self.klass.Enable(event.Selection==1)
event.Skip()
def show(self):
"returns values if user has hit OK, None otherwise"
res = self.ShowModal()
if res != wx.ID_OK: return None
if self.standalone and self.standalone.GetSelection()==0:
return True
klass = self.klass.GetValue()
if not self.base:
return klass
base = self.base.GetStringSelection()
return klass, base
class StandaloneOrChildDialog(WindowDialog):
# for menu and tool bars
def __init__(self, klass, message, parent, parent_property):
self.parent = parent
self.parent_property = parent_property
WindowDialog.__init__(self, klass, None, message, True)
|
import numpy as np
from copy import deepcopy
from pycqed.measurement.waveform_control.block import Block
from pycqed.measurement.waveform_control import sequence
from pycqed.measurement.waveform_control import pulsar as ps
from pycqed.measurement.pulse_sequences.single_qubit_tek_seq_elts import \
sweep_pulse_params, add_preparation_pulses, pulse_list_list_seq
from pycqed.measurement.pulse_sequences.multi_qubit_tek_seq_elts import \
generate_mux_ro_pulse_list
import logging
log = logging.getLogger(__name__)
def get_pulse_dict_from_pars(pulse_pars):
'''
Returns a dictionary containing pulse_pars for all the primitive pulses
based on a single set of pulse_pars.
Using this function deepcopies the pulse parameters preventing accidently
editing the input dictionary.
input args:
pulse_pars: dictionary containing pulse_parameters
return:
pulses: dictionary of pulse_pars dictionaries
'''
pi_amp = pulse_pars['amplitude']
pi2_amp = pulse_pars['amplitude']*pulse_pars['amp90_scale']
pulses = {'I': deepcopy(pulse_pars),
'X180': deepcopy(pulse_pars),
'mX180': deepcopy(pulse_pars),
'X90': deepcopy(pulse_pars),
'mX90': deepcopy(pulse_pars),
'Y180': deepcopy(pulse_pars),
'mY180': deepcopy(pulse_pars),
'Y90': deepcopy(pulse_pars),
'mY90': deepcopy(pulse_pars)}
pulses['I']['amplitude'] = 0
pulses['mX180']['amplitude'] = -pi_amp
pulses['X90']['amplitude'] = pi2_amp
pulses['mX90']['amplitude'] = -pi2_amp
pulses['Y180']['phase'] = 90
pulses['mY180']['phase'] = 90
pulses['mY180']['amplitude'] = -pi_amp
pulses['Y90']['amplitude'] = pi2_amp
pulses['Y90']['phase'] = 90
pulses['mY90']['amplitude'] = -pi2_amp
pulses['mY90']['phase'] = 90
return pulses
def Ramsey_with_flux_pulse_meas_seq(thetas, qb, X90_separation, verbose=False,
upload=True, return_seq=False,
cal_points=False):
'''
Performs a Ramsey with interleaved Flux pulse
Timings of sequence
<----- |fluxpulse|
|X90| ------------------- |X90| --- |RO|
sweep phase
timing of the flux pulse relative to the center of the first X90 pulse
Args:
thetas: numpy array of phase shifts for the second pi/2 pulse
qb: qubit object (must have the methods get_operation_dict(),
get_drive_pars() etc.
X90_separation: float (separation of the two pi/2 pulses for Ramsey
verbose: bool
upload: bool
return_seq: bool
Returns:
if return_seq:
seq: qcodes sequence
el_list: list of pulse elements
else:
seq_name: string
'''
raise NotImplementedError(
'Ramsey_with_flux_pulse_meas_seq has not been '
'converted to the latest waveform generation code and can not be used.')
qb_name = qb.name
operation_dict = qb.get_operation_dict()
pulse_pars = qb.get_drive_pars()
RO_pars = qb.get_RO_pars()
seq_name = 'Measurement_Ramsey_sequence_with_Flux_pulse'
seq = sequence.Sequence(seq_name)
el_list = []
pulses = get_pulse_dict_from_pars(pulse_pars)
flux_pulse = operation_dict["flux "+qb_name]
# Used for checking dynamic phase compensation
# if flux_pulse['amplitude'] != 0:
# flux_pulse['basis_rotation'] = {qb_name: -80.41028958782647}
flux_pulse['ref_point'] = 'end'
X90_2 = deepcopy(pulses['X90'])
X90_2['pulse_delay'] = X90_separation - flux_pulse['pulse_delay'] \
- X90_2['nr_sigma']*X90_2['sigma']
X90_2['ref_point'] = 'start'
for i, theta in enumerate(thetas):
X90_2['phase'] = theta*180/np.pi
if cal_points and (i == (len(thetas)-4) or i == (len(thetas)-3)):
el = multi_pulse_elt(i, station, [RO_pars])
elif cal_points and (i == (len(thetas)-2) or i == (len(thetas)-1)):
flux_pulse['amplitude'] = 0
el = multi_pulse_elt(i, station,
[pulses['X90'], flux_pulse, X90_2, RO_pars])
else:
el = multi_pulse_elt(i, station,
[pulses['X90'], flux_pulse, X90_2, RO_pars])
el_list.append(el)
seq.append_element(el, trigger_wait=True)
if upload:
station.pulsar.program_awgs(seq, *el_list, verbose=verbose)
if return_seq:
return seq, el_list
else:
return seq_name
def fluxpulse_scope_sequence(
delays, freqs, qb_name, operation_dict, cz_pulse_name,
ro_pulse_delay=None, cal_points=None, prep_params=None, upload=True):
'''
Performs X180 pulse on top of a fluxpulse
Timings of sequence
| ---------- |X180| ---------------------------- |RO|
| --- | --------- fluxpulse ---------- |
<- delay ->
:param ro_pulse_delay: Can be 'auto' to start out the readout after
the end of the flux pulse or a delay in seconds to start a fixed
amount of time after the drive pulse. If not provided or set to
None, a default fixed delay of 100e-9 is used.
'''
if prep_params is None:
prep_params = {}
if ro_pulse_delay is None:
ro_pulse_delay = 100e-9
seq_name = 'Fluxpulse_scope_sequence'
ge_pulse = deepcopy(operation_dict['X180 ' + qb_name])
ge_pulse['name'] = 'FPS_Pi'
ge_pulse['element_name'] = 'FPS_Pi_el'
flux_pulse = deepcopy(operation_dict[cz_pulse_name])
flux_pulse['name'] = 'FPS_Flux'
flux_pulse['ref_pulse'] = 'FPS_Pi'
flux_pulse['ref_point'] = 'middle'
flux_pulse_delays = -np.asarray(delays) - flux_pulse.get(
'buffer_length_start', 0)
ro_pulse = deepcopy(operation_dict['RO ' + qb_name])
ro_pulse['name'] = 'FPS_Ro'
ro_pulse['ref_pulse'] = 'FPS_Pi'
ro_pulse['ref_point'] = 'end'
ro_pulse['pulse_delay'] = ro_pulse_delay
if ro_pulse_delay == 'auto':
ro_pulse['ref_point'] = 'middle'
ro_pulse['pulse_delay'] = \
flux_pulse['pulse_length'] - np.min(delays) + \
flux_pulse.get('buffer_length_end', 0) + \
flux_pulse.get('trans_length', 0)
pulses = [ge_pulse, flux_pulse, ro_pulse]
swept_pulses = sweep_pulse_params(
pulses, {'FPS_Flux.pulse_delay': flux_pulse_delays})
swept_pulses_with_prep = \
[add_preparation_pulses(p, operation_dict, [qb_name], **prep_params)
for p in swept_pulses]
seq = pulse_list_list_seq(swept_pulses_with_prep, seq_name, upload=False)
if cal_points is not None:
# add calibration segments
seq.extend(cal_points.create_segments(operation_dict, **prep_params))
seq.repeat_ro(f"RO {qb_name}", operation_dict)
log.debug(seq)
if upload:
ps.Pulsar.get_instance().program_awgs(seq)
return seq, np.arange(seq.n_acq_elements()), freqs
def fluxpulse_amplitude_sequence(amplitudes,
freqs,
qb_name,
operation_dict,
cz_pulse_name,
delay=None,
cal_points=None,
prep_params=None,
upload=True):
'''
Performs X180 pulse on top of a fluxpulse
Timings of sequence
| ---------- |X180| ------------------------ |RO|
| --- | --------- fluxpulse ---------- |
'''
if prep_params is None:
prep_params = {}
seq_name = 'Fluxpulse_amplitude_sequence'
ge_pulse = deepcopy(operation_dict['X180 ' + qb_name])
ge_pulse['name'] = 'FPA_Pi'
ge_pulse['element_name'] = 'FPA_Pi_el'
flux_pulse = deepcopy(operation_dict[cz_pulse_name])
flux_pulse['name'] = 'FPA_Flux'
flux_pulse['ref_pulse'] = 'FPA_Pi'
flux_pulse['ref_point'] = 'middle'
if delay is None:
delay = flux_pulse['pulse_length'] / 2
flux_pulse['pulse_delay'] = -flux_pulse.get('buffer_length_start',
0) - delay
ro_pulse = deepcopy(operation_dict['RO ' + qb_name])
ro_pulse['name'] = 'FPA_Ro'
ro_pulse['ref_pulse'] = 'FPA_Pi'
ro_pulse['ref_point'] = 'middle'
ro_pulse['pulse_delay'] = flux_pulse['pulse_length'] - delay + \
flux_pulse.get('buffer_length_end', 0) + \
flux_pulse.get('trans_length', 0)
pulses = [ge_pulse, flux_pulse, ro_pulse]
swept_pulses = sweep_pulse_params(pulses,
{'FPA_Flux.amplitude': amplitudes})
swept_pulses_with_prep = \
[add_preparation_pulses(p, operation_dict, [qb_name], **prep_params)
for p in swept_pulses]
seq = pulse_list_list_seq(swept_pulses_with_prep, seq_name, upload=False)
if cal_points is not None:
# add calibration segments
seq.extend(cal_points.create_segments(operation_dict, **prep_params))
log.debug(seq)
if upload:
ps.Pulsar.get_instance().program_awgs(seq)
return seq, np.arange(seq.n_acq_elements()), freqs
def T2_freq_sweep_seq(amplitudes,
qb_name,
operation_dict,
cz_pulse_name,
flux_lengths,
phases,
cal_points=None,
upload=True):
'''
Performs a X180 pulse before changing the qubit frequency with the flux
Timings of sequence
| ---|X180| ------------------------------|RO|
| --------| --------- fluxpulse ---------- |
'''
len_amp = len(amplitudes)
len_flux = len(flux_lengths)
len_phase = len(phases)
amplitudes = np.repeat(amplitudes, len_flux * len_phase)
flux_lengths = np.tile(np.repeat(flux_lengths, len_phase), len_amp)
phases = np.tile(phases, len_flux * len_amp)
seq_name = 'T2_freq_sweep_seq'
ge_pulse = deepcopy(operation_dict['X90 ' + qb_name])
ge_pulse['name'] = 'DF_X90'
ge_pulse['element_name'] = 'DF_X90_el'
flux_pulse = deepcopy(operation_dict[cz_pulse_name])
flux_pulse['name'] = 'DF_Flux'
flux_pulse['ref_pulse'] = 'DF_X90'
flux_pulse['ref_point'] = 'end'
flux_pulse['pulse_delay'] = 0 #-flux_pulse.get('buffer_length_start', 0)
ge_pulse2 = deepcopy(operation_dict['X90 ' + qb_name])
ge_pulse2['name'] = 'DF_X90_2'
ge_pulse2['ref_pulse'] = 'DF_Flux'
ge_pulse2['ref_point'] = 'end'
ge_pulse2['pulse_delay'] = 0
ge_pulse2['element_name'] = 'DF_X90_el'
ro_pulse = deepcopy(operation_dict['RO ' + qb_name])
ro_pulse['name'] = 'DF_Ro'
ro_pulse['ref_pulse'] = 'DF_X90_2'
ro_pulse['ref_point'] = 'end'
ro_pulse['pulse_delay'] = 0
pulses = [ge_pulse, flux_pulse, ge_pulse2, ro_pulse]
swept_pulses = sweep_pulse_params(
pulses, {
'DF_Flux.amplitude': amplitudes,
'DF_Flux.pulse_length': flux_lengths,
'DF_X90_2.phase': phases
})
seq = pulse_list_list_seq(swept_pulses, seq_name, upload=False)
if cal_points is not None:
# add calibration segments
seq.extend(cal_points.create_segments(operation_dict))
seq.repeat_ro('RO ' + qb_name, operation_dict)
if upload:
ps.Pulsar.get_instance().program_awgs(seq)
return seq, np.arange(seq.n_acq_elements())
def T1_freq_sweep_seq(amplitudes,
qb_name,
operation_dict,
cz_pulse_name,
flux_lengths,
cal_points=None,
upload=True,
prep_params=None):
'''
Performs a X180 pulse before changing the qubit frequency with the flux
Timings of sequence
| ---|X180| ------------------------------|RO|
| --------| --------- fluxpulse ---------- |
'''
if prep_params is None:
prep_params = {}
len_amp = len(amplitudes)
amplitudes = np.repeat(amplitudes, len(flux_lengths))
flux_lengths = np.tile(flux_lengths, len_amp)
seq_name = 'T1_freq_sweep_sequence'
ge_pulse = deepcopy(operation_dict['X180 ' + qb_name])
ge_pulse['name'] = 'DF_Pi'
ge_pulse['element_name'] = 'DF_Pi_el'
flux_pulse = deepcopy(operation_dict[cz_pulse_name])
flux_pulse['name'] = 'DF_Flux'
flux_pulse['ref_pulse'] = 'DF_Pi'
flux_pulse['ref_point'] = 'end'
flux_pulse['pulse_delay'] = 0 #-flux_pulse.get('buffer_length_start', 0)
ro_pulse = deepcopy(operation_dict['RO ' + qb_name])
ro_pulse['name'] = 'DF_Ro'
ro_pulse['ref_pulse'] = 'DF_Flux'
ro_pulse['ref_point'] = 'end'
ro_pulse['pulse_delay'] = flux_pulse.get('buffer_length_end', 0)
pulses = [ge_pulse, flux_pulse, ro_pulse]
swept_pulses = sweep_pulse_params(pulses, {
'DF_Flux.amplitude': amplitudes,
'DF_Flux.pulse_length': flux_lengths
})
swept_pulses_with_prep = \
[add_preparation_pulses(p, operation_dict, [qb_name], **prep_params)
for p in swept_pulses]
seq = pulse_list_list_seq(swept_pulses_with_prep, seq_name, upload=False)
if cal_points is not None:
# add calibration segments
seq.extend(cal_points.create_segments(operation_dict, **prep_params))
seq.repeat_ro('RO ' + qb_name, operation_dict)
if upload:
ps.Pulsar.get_instance().program_awgs(seq)
return seq, np.arange(seq.n_acq_elements())
def add_suffix(operation_list, suffix):
return [op + suffix for op in operation_list] |
from pymilvus_orm import Index
from pymilvus_orm.types import DataType
from pymilvus_orm.default_config import DefaultConfig
import sys
sys.path.append("..")
from check.param_check import *
from check.func_check import *
from utils.util_log import test_log as log
from common.common_type import *
def index_catch():
def wrapper(func):
def inner_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs), True
except Exception as e:
log.error("[Index API Exception]%s: %s" % (str(func), str(e)))
return e, False
return inner_wrapper
return wrapper
@index_catch()
def func_req(_list, **kwargs):
if isinstance(_list, list):
func = _list[0]
if callable(func):
arg = []
if len(_list) > 1:
for a in _list[1:]:
arg.append(a)
return func(*arg, **kwargs)
return False, False
class ApiIndex:
index = None
def index_init(self, collection, field_name, index_params, name="", check_res=None, check_params=None, **kwargs):
""" In order to distinguish the same name of index """
func_name = sys._getframe().f_code.co_name
res, check = func_req([Index, collection, field_name, index_params, name], **kwargs)
self.index = res if check is True else None
check_result = CheckFunc(res, func_name, check_res, check_params, collection=collection, field_name=field_name,
index_params=index_params, name=name, **kwargs).run()
return res, check_result
def name(self, check_res=None, check_params=None):
func_name = sys._getframe().f_code.co_name
res, check = func_req([self.index.name])
check_result = CheckFunc(res, func_name, check_res, check_params).run()
return res, check_result
def params(self, check_res=None, check_params=None):
func_name = sys._getframe().f_code.co_name
res, check = func_req([self.index.params])
check_result = CheckFunc(res, func_name, check_res, check_params).run()
return res, check_result
def collection_name(self, check_res=None, check_params=None):
func_name = sys._getframe().f_code.co_name
res, check = func_req([self.index.collection_name])
check_result = CheckFunc(res, func_name, check_res, check_params).run()
return res, check_result
def field_name(self, check_res=None, check_params=None):
func_name = sys._getframe().f_code.co_name
res, check = func_req([self.index.field_name])
check_result = CheckFunc(res, func_name, check_res, check_params).run()
return res, check_result
def drop(self, check_res=None, check_params=None, **kwargs):
func_name = sys._getframe().f_code.co_name
res, check = func_req([self.index.drop], **kwargs)
check_result = CheckFunc(res, func_name, check_res, check_params, **kwargs).run()
return res, check_result
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology.
# SPDX-FileCopyrightText: © 2021 Lee McCuller <[email protected]>
# NOTICE: authors should document their contributions in concisely in NOTICE
# with details inline in source files, comments, and docstrings.
"""
"""
import os
import re
from wavestate.bunch import Bunch
import subprocess
RE_item = re.compile(r'^(\s*)(\w+)::(.*)::(.*)$')
def pytest_collection_parse(fname):
# mapping of packages to modules
packages = {None: []}
# mapping of modules to tests
modules = {}
tests = {}
current_package = None
current_module = None
last_indent = ""
with open(fname, "r") as f:
for line in f.readlines():
line = line.rstrip()
m = RE_item.match(line)
if m:
indent = m.group(1)
indent_diff = len(indent) - len(last_indent)
if indent_diff < -4:
current_module = None
current_package = None
elif indent_diff < -2:
if current_module is None:
current_package = None
else:
current_module = None
last_indent = indent
itemtype = m.group(2)
itemname = m.group(3)
itempath = m.group(4)
if itemtype == 'Package':
current_package = itemname
packages[current_package] = []
elif itemtype == 'Module':
current_module = itemname
packages[current_package].append(current_module)
modules[current_module] = []
fpath, fmod = os.path.split(itemname)
modname, modext = os.path.splitext(fmod)
tests[modname] = itempath
pass
elif itemtype == 'Function':
current_function = itemname
modules[current_module].append(current_function)
else:
raise RuntimeError("Unrecognized Group Type")
if current_module is None:
raise RuntimeError("Didn't parse any modules! Did the format change?")
return Bunch(
packages = packages,
modules = modules,
tests = tests,
)
|
"""
Information about individual spreadsheets as they're used in the spreadsheet
visualizer.
"""
from nest_py.core.data_types.tablelike_schema import TablelikeSchema
COLLECTION_NAME = 'ssviz_spreadsheets'
def generate_schema():
schema = TablelikeSchema(COLLECTION_NAME)
schema.add_foreignid_attribute('file_id')
schema.add_boolean_attribute('is_file_samples_as_rows')
# the sample names from the spreadsheet, ordered as they appear in the file
schema.add_categoric_list_attribute('sample_names', valid_values=None)
# the feature names from the spreadsheet, ordered as they appear in the file
schema.add_categoric_list_attribute('feature_names', valid_values=None)
# the feature types from the spreadsheet, ordered as they appear in the file
schema.add_categoric_list_attribute('feature_types', \
valid_values=['numeric', 'categoric', 'other'])
schema.add_int_list_attribute('feature_nan_counts')
schema.add_int_list_attribute('sample_nan_counts')
schema.add_index(['file_id'])
return schema
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RUnits(RPackage):
"""Measurement Units for R Vectors
Support for measurement units in R vectors, matrices and arrays: automatic
propagation, conversion, derivation and simplification of units; raising
errors in case of unit incompatibility. Compatible with the POSIXct, Date
and difftime classes. Uses the UNIDATA udunits library and unit database
for unit compatibility checking and conversion. Documentation about
'units' is provided in the paper by Pebesma, Mailund & Hiebert (2016,
<doi:10.32614/RJ-2016-061>), included in this package as a vignette; see
'citation("units")' for details."""
homepage = "https://github.com/edzer/units/"
url = "https://cloud.r-project.org/src/contrib/units_0.4-6.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/units"
version('0.6-7', sha256='3f73a62bafdbe0f93bbf00ac4b1adb8f919dd04649ff8f1d007f2986e35cb7e5')
version('0.6-3', sha256='03de88d9dcfe80d22dd3813413f33657c576aed24a8091dbfc7f68602020a64f')
version('0.6-2', sha256='5e286775d0712c8e15b6ae3a533d4c4349b0f6410c2d9d897ca519c3d0e5f170')
version('0.4-6', sha256='db383c9b7ec221a5da29a2ddf4f74f9064c44ea2102ea7e07cc1cc5bb30fa1ef')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@:0.5-1', type=('build', 'run'))
depends_on('udunits', when='@0.6-0:')
|
# import codecs
import codecs
import binascii
s = b'I love python.'
# Using codecs.decode() method
gfg = codecs.decode(s)
print(gfg)
print(binascii.crc32(b"hello world"))
# Or, in two pieces:
crc = binascii.crc32(b"hello")
crc = binascii.crc32(b" world", crc)
print('crc32 = {:#010x}'.format(crc)) |
class Animal(object):
# weight
# color
def __init__(self,weight=None, color=None):
print("Initializing...")
self.weight = weight
self.color = color
def __str__(self):
return "Animal( weight: {}, color: {})".format(self.weight, self.color)
class Mammal(Animal):
def __init__(self, weight = None, color = None, furry = None):
super().__init__(weight, color)
self.furry = furry
def __str__(self):
return "Mammal({}, {})".format(self.weight,self.furry)
print("Creating animal")
a = Mammal(30,"red","full")
print(a, a.weight)
b = Animal(40)
print(b)
a.wheels = 24
print (a.wheels)
|
import os
import numpy as np
from netCDF4 import Dataset
from compliance_checker.ioos import (
IOOS0_1Check,
IOOS1_1Check,
IOOS1_2_PlatformIDValidator,
IOOS1_2Check,
NamingAuthorityValidator,
)
from compliance_checker.tests import BaseTestCase
from compliance_checker.tests.helpers import MockTimeSeries, MockVariable
from compliance_checker.tests.resources import STATIC_FILES
from compliance_checker.tests.test_cf import get_results
class TestIOOS0_1(BaseTestCase):
"""
Tests for the IOOS Inventory Metadata v0.1
"""
def setUp(self):
# Use the NCEI Gold Standard Point dataset for IOOS checks
self.ds = self.load_dataset(STATIC_FILES["ncei_gold_point_1"])
self.ioos = IOOS0_1Check()
def test_cc_meta(self):
assert self.ioos._cc_spec == "ioos"
assert self.ioos._cc_spec_version == "0.1"
def test_global_attributes(self):
"""
Tests that all global attributes checks are working
"""
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
results = self.ioos.check_global_attributes(nc_obj)
for result in results:
self.assert_result_is_bad(result)
attrs = [
"acknowledgement",
"publisher_email",
"institution",
"publisher_name",
"Conventions",
]
for attr in attrs:
setattr(nc_obj, attr, "test")
results = self.ioos.check_global_attributes(nc_obj)
for result in results:
self.assert_result_is_good(result)
def test_variable_attributes(self):
"""
Tests that the platform variable attributes check is working
"""
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.createVariable("platform", "S1", ())
platform = nc_obj.variables["platform"]
results = self.ioos.check_variable_attributes(nc_obj)
for result in results:
self.assert_result_is_bad(result)
platform.long_name = "platform"
platform.short_name = "platform"
platform.source = "glider"
platform.ioos_name = "urn:ioos:station:glos:leorgn"
platform.wmo_id = "1234"
platform.comment = "test"
results = self.ioos.check_variable_attributes(nc_obj)
for result in results:
self.assert_result_is_good(result)
def test_variable_units(self):
"""
Tests that the variable units test is working
"""
# this check tests that units attribute is present on EVERY variable
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.createVariable("sample_var", "d", ("time",))
sample_var = nc_obj.variables["sample_var"]
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_bad(results)
sample_var.units = "m"
sample_var.short_name = "sample_var"
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_good(results)
def test_altitude_units(self):
"""
Tests that the altitude variable units test is working
"""
results = self.ioos.check_altitude_units(self.ds)
self.assert_result_is_good(results)
# Now test an nc file with a 'z' variable without units
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.createVariable("z", "d", ("time",))
z = nc_obj.variables["z"]
z.short_name = "sample_var"
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_bad(results)
class TestIOOS1_1(BaseTestCase):
"""
Tests for the compliance checker implementation of IOOS Metadata Profile
for NetCDF, Version 1.1
"""
def setUp(self):
# Use the IOOS 1_1 dataset for testing
self.ds = self.load_dataset(STATIC_FILES["ioos_gold_1_1"])
self.ioos = IOOS1_1Check()
def test_cc_meta(self):
assert self.ioos._cc_spec == "ioos"
assert self.ioos._cc_spec_version == "1.1"
def test_required_attributes(self):
"""
Tests that required attributes test is working properly
"""
results = self.ioos.check_high(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_recomended_attributes(self):
"""
Tests that recommended attributes test is working properly
"""
results = self.ioos.check_recommended(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_platform_variables(self):
"""
Tests that the platform variable attributes check is working
"""
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.platform = "platform"
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_platform_variables(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_platform_variables(self):
"""
Tests that the platform variable attributes check is working
"""
results = self.ioos.check_platform_variables(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_geophysical_vars_fill_value(self):
"""
Tests that the geophysical variable _FillValue check is working
"""
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.createVariable("sample_var", "d", ("time",))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables["sample_var"]
sample_var.units = "m"
sample_var.short_name = "temp"
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_fill_value(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_geophysical_vars_fill_value(self):
"""
Tests that the geophysical variable _FillValue check is working
"""
results = self.ioos.check_geophysical_vars_fill_value(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_geophysical_vars_standard_name(self):
"""
Tests that the platform variable attributes check is working
"""
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.createVariable("sample_var", "d", ("time",))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables["sample_var"]
sample_var.units = "m"
sample_var.short_name = "temp"
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_standard_name(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_geophysical_vars_standard_name(self):
"""
Tests that the geophysical variable _FillValue check is working
"""
results = self.ioos.check_geophysical_vars_standard_name(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_units(self):
"""
Tests that the valid units check is working
"""
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, "w", diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension("time", 1)
nc_obj.createVariable("temperature", "d", ("time",))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables["temperature"]
sample_var.units = "degC" # Not valid units
sample_var.short_name = "temp"
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_standard_name(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_units(self):
"""
Tests that the valid units check is working
"""
results = self.ioos.check_units(self.ds)
for result in results:
self.assert_result_is_good(result)
class TestIOOS1_2(BaseTestCase):
"""
Tests for the compliance checker implementation of IOOS Metadata Profile
for NetCDF, Version 1.1
"""
def setUp(self):
self.ioos = IOOS1_2Check()
def test_check_geophysical_vars_have_attrs(self):
# create geophysical variable
ds = MockTimeSeries() # time, lat, lon, depth
temp = ds.createVariable("temp", np.float64, dimensions=("time",))
# should fail here
results = self.ioos.check_geophysical_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# set the necessary attributes
ds = MockTimeSeries(default_fill_value=9999999999.0) # time, lat, lon, depth
temp = ds.createVariable(
"temp", np.float64, fill_value=9999999999.0
) # _FillValue
temp.setncattr("missing_value", 9999999999.0)
temp.setncattr("standard_name", "sea_surface_temperature")
temp.setncattr(
"standard_name_url",
"http://cfconventions.org/Data/cf-standard-names/64/build/cf-standard-name-table.html",
)
temp.setncattr("units", "degree_C")
temp.setncattr("platform", "myPlatform")
results = self.ioos.check_geophysical_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_check_geospatial_vars_have_attrs(self):
# create geophysical variable
ds = MockTimeSeries() # time, lat, lon, depth
temp = ds.createVariable("temp", np.float64, dimensions=("time",))
# should fail here
results = self.ioos.check_geospatial_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# should pass - default_fill_value sets _FillValue attr
ds = MockTimeSeries(default_fill_value=9999999999.0) # time, lat, lon, depth
ds.variables["time"].setncattr("standard_name", "time")
ds.variables["time"].setncattr(
"standard_name_url",
"http://cfconventions.org/Data/cf-standard-names/64/build/cf-standard-name-table.html",
)
ds.variables["time"].setncattr("units", "hours since 1970-01-01T00:00:00")
ds.variables["time"].setncattr("missing_value", 9999999999.0)
results = self.ioos.check_geospatial_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_check_contributor_role_and_vocabulary(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no contributor_role or vocab, fail both
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertFalse(all(r.value for r in results))
# bad contributor_role and vocab
ds.setncattr("contributor_role", "bad")
ds.setncattr("contributor_role_vocabulary", "bad")
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertFalse(all(r.value for r in results))
# good role, bad vocab
ds.setncattr("contributor_role", "contributor")
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertTrue(results[0].value)
self.assertEqual(results[0].msgs, [])
self.assertFalse(results[1].value)
# bad role, good vocab
ds.setncattr("contributor_role", "bad")
ds.setncattr(
"contributor_role_vocabulary",
"http://vocab.nerc.ac.uk/collection/G04/current/",
)
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertFalse(results[0].value)
self.assertTrue(results[1].value)
self.assertEqual(results[1].msgs, [])
# good role, good vocab
ds.setncattr("contributor_role", "contributor")
ds.setncattr(
"contributor_role_vocabulary",
"http://vocab.nerc.ac.uk/collection/G04/current/",
)
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertTrue(results[0].value)
self.assertEqual(results[0].msgs, [])
self.assertTrue(results[1].value)
self.assertEqual(results[1].msgs, [])
ds.setncattr("contributor_role", "resourceProvider")
ds.setncattr(
"contributor_role_vocabulary",
"https://www.ngdc.noaa.gov/wiki/index.php?title=ISO_19115_and_19115-2_CodeList_Dictionaries#CI_RoleCode",
)
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertTrue(results[0].value)
self.assertEqual(results[0].msgs, [])
self.assertTrue(results[1].value)
self.assertEqual(results[1].msgs, [])
def test_check_creator_and_publisher_type(self):
"""
Checks the creator_type and publisher_type global attributes with
the following values:
Empty: Valid, defaults to "person" when not specified, which is
contained in the list of valid values.
Bad values: Invalid, not contained in list of valid values.
Good values: Valid, contained in list.
"""
ds = MockTimeSeries()
# values which are not set/specified default to person, which is valid
result_list = self.ioos.check_creator_and_publisher_type(ds)
self.assertTrue(all(res.value for res in result_list))
# create invalid values for attribute
ds.setncattr("creator_type", "PI")
ds.setncattr("publisher_type", "Funder")
result_list = self.ioos.check_creator_and_publisher_type(ds)
err_regex = (
r"^If specified, \w+_type must be in value list "
r"\(\['group', 'institution', 'person', 'position'\]\)$"
)
for res in result_list:
self.assertFalse(res.value)
self.assertRegex(res.msgs[0], err_regex)
# good values
ds.setncattr("creator_type", "person")
ds.setncattr("publisher_type", "institution")
result_list = self.ioos.check_creator_and_publisher_type(ds)
self.assertTrue(all(res.value for res in result_list))
def test_check_gts_ingest_global(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no gts_ingest_requirements, should pass
result = self.ioos.check_gts_ingest_global(ds)
self.assertTrue(result.value)
self.assertEqual(result.msgs, [])
# passing value
ds.setncattr("gts_ingest", "true")
result = self.ioos.check_gts_ingest_global(ds)
self.assertTrue(result.value)
self.assertEqual(result.msgs, [])
ds.setncattr("gts_ingest", "false")
result = self.ioos.check_gts_ingest_global(ds)
self.assertTrue(result.value)
ds.setncattr("gts_ingest", "notgood")
result = self.ioos.check_gts_ingest_global(ds)
self.assertFalse(result.value)
def test_check_gts_ingest_requirements(self):
ds = MockTimeSeries() # time, lat, lon, depth
# NOTE: this check will always have a "failing" result; see
# https://github.com/ioos/compliance-checker/issues/759#issuecomment-625356938
# and subsequent discussion
# no gts_ingest_requirements, should pass
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
# flag for ingest, no variables flagged - default pass
ds.setncattr("gts_ingest", "true")
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
# give one variable the gts_ingest attribute
# no standard_name or ancillary vars, should fail
ds.variables["time"].setncattr("gts_ingest", "true")
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
# no ancillary vars, should fail
ds.variables["time"].setncattr("gts_ingest", "true")
ds.variables["time"].setncattr("standard_name", "time")
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
self.assertIn(
"The following variables did not qualify for NDBC/GTS Ingest: time\n",
result.msgs,
)
# set ancillary var with bad standard name
tmp = ds.createVariable("tmp", np.byte, ("time",))
tmp.setncattr("standard_name", "bad")
ds.variables["time"].setncattr("ancillary_variables", "tmp")
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
self.assertIn(
"The following variables did not qualify for NDBC/GTS Ingest: time\n",
result.msgs,
)
# good ancillary var standard name, time units are bad
tmp.setncattr("standard_name", "aggregate_quality_flag")
ds.variables["time"].setncattr("units", "bad since bad")
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
self.assertIn(
"The following variables did not qualify for NDBC/GTS Ingest: time\n",
result.msgs,
)
# good ancillary var stdname, good units, pass
tmp.setncattr("standard_name", "aggregate_quality_flag")
ds.variables["time"].setncattr("units", "seconds since 1970-01-01T00:00:00Z")
result = self.ioos.check_gts_ingest_requirements(ds)
self.assertFalse(result.value)
self.assertIn(
"The following variables qualified for NDBC/GTS Ingest: time\n", result.msgs
)
def test_check_instrument_variables(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no instrument variable, should pass
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
temp = ds.createVariable("temp", np.float64, dimensions=("time",))
temp.setncattr("cf_role", "timeseries")
temp.setncattr("standard_name", "sea_surface_temperature")
temp.setncattr("units", "degree_C")
temp.setncattr("axis", "Y")
temp.setncattr("instrument", "myInstrument")
temp[:] = 45.0
instr = ds.createVariable("myInstrument", np.float64, dimensions=("time",))
# give instrument variable with component
instr.setncattr("component", "someComponent")
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# give discriminant
instr.setncattr("discriminant", "someDiscriminant")
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# bad component
instr.setncattr("component", 45)
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
def test_check_wmo_platform_code(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no wmo_platform_code, pass
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
self.assertEqual(result.msgs, [])
# valid code
ds.setncattr("wmo_platform_code", "12345")
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
# valid code
ds.setncattr("wmo_platform_code", "7654321")
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
# alphanumeric, valid
ds.setncattr("wmo_platform_code", "abcd1")
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
# invalid length, fail
ds.setncattr("wmo_platform_code", "123")
result = self.ioos.check_wmo_platform_code(ds)
self.assertFalse(result.value)
# alphanumeric len 7, fail
ds.setncattr("wmo_platform_code", "1a2b3c7")
result = self.ioos.check_wmo_platform_code(ds)
self.assertFalse(result.value)
def test_check_standard_name(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no standard names
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# give standard names to all variables
ds.variables["time"].setncattr("standard_name", "time")
ds.variables["lon"].setncattr("standard_name", "longitude")
ds.variables["lat"].setncattr("standard_name", "latitude")
ds.variables["depth"].setncattr("standard_name", "depth")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# add a QARTOD variable, no standard name - should fail
qr = ds.createVariable("depth_qc", np.byte)
qr.setncattr("flag_meanings", "blah")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# bad standard name
qr.setncattr("standard_name", "blah")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# good standard name
qr.setncattr("standard_name", "spike_test_quality_flag")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_naming_authority_validation(self):
test_attr_name = "naming_authority"
validator = NamingAuthorityValidator()
# check URL - should pass
self.assertTrue(validator.validate(test_attr_name, "https://ioos.us")[0])
# check reverse DNS - should pass
self.assertTrue(validator.validate(test_attr_name, "edu.ucar.unidata")[0])
# email address is neither of the above, so should fail
bad_result = validator.validate(test_attr_name, "[email protected]")
self.assertFalse(bad_result[0])
self.assertEqual(
bad_result[1],
[
"naming_authority should either be a URL or a "
'reversed DNS name (e.g "edu.ucar.unidata")'
],
)
def test_platform_id_validation(self):
attn = "platform_id"
attv = "alphaNum3R1C"
v = IOOS1_2_PlatformIDValidator()
self.assertTrue(v.validate(attn, attv)[0])
attv = "alpha"
v = IOOS1_2_PlatformIDValidator()
self.assertTrue(v.validate(attn, attv)[0])
attv = "311123331112"
v = IOOS1_2_PlatformIDValidator()
self.assertTrue(v.validate(attn, attv)[0])
attv = "---fail---"
v = IOOS1_2_PlatformIDValidator()
self.assertFalse(v.validate(attn, attv)[0])
def test_check_platform_cf_role(self):
"""
Check that cf_role inside platform variables only allows certain
values, namely "profile_id", "timeseries_id", or "trajectory_id"
"""
ds = MockTimeSeries()
plat_var = ds.createVariable("platform", np.int8, ())
ds.variables["depth"].platform = "platform"
self.ioos.setup(ds)
results = self.ioos.check_platform_variable_cf_role(ds)
# don't set attribute, should raise error about attribute not
# existing
self.assertEqual(len(results), 1)
score, out_of = results[0].value
self.assertLess(score, out_of)
# set to invalid value
plat_var.setncattr("cf_role", "bad_value")
results = self.ioos.check_platform_variable_cf_role(ds)
self.assertLess(score, out_of)
expected_vals = {"profile_id", "timeseries_id", "trajectory_id"}
expect_msg = (
'Platform variable "platform" must have a cf_role attribute '
"with one of the values {}".format(sorted(expected_vals))
)
self.assertEqual(results[0].msgs, [expect_msg])
# set to valid value
plat_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_platform_variable_cf_role(ds)
score, out_of = results[0].value
self.assertEqual(score, out_of)
def test_check_platform_global(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no global attr, fail
self.assertFalse(self.ioos.check_platform_global(ds).value)
# bad global attr, fail
ds.setncattr("platform", "bad value")
self.assertFalse(self.ioos.check_platform_global(ds).value)
# another bad value
ds.setncattr("platform", " bad")
self.assertFalse(self.ioos.check_platform_global(ds).value)
# good value
ds.setncattr("platform", "single_string")
res = self.ioos.check_platform_global(ds)
self.assertTrue(res.value)
self.assertEqual(res.msgs, [])
def test_check_single_platform(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no global attr but also no platform variables, should pass
result = self.ioos.check_single_platform(ds)
self.assertTrue(result.value)
self.assertEqual(result.msgs, [])
# give platform global, no variables, fail
ds.setncattr("platform", "buoy")
result = self.ioos.check_single_platform(ds)
self.assertFalse(result.value)
# global platform, one platform variable, pass
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
result = self.ioos.check_single_platform(ds)
self.assertTrue(result.value)
self.assertEqual(result.msgs, [])
# two platform variables, fail
temp2 = ds.createVariable("temp2", "d", ("time"))
temp2.setncattr("platform", "platform_var2")
plat = ds.createVariable("platform_var2", np.byte)
result = self.ioos.check_single_platform(ds)
self.assertFalse(result.value)
# no global attr, one variable, fail
ds = MockTimeSeries() # time, lat, lon, depth
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
result = self.ioos.check_single_platform(ds)
self.assertFalse(result.value)
def test_check_cf_dsg(self):
ds = MockTimeSeries() # time, lat, lon, depth
ds.setncattr("platform", "single_string")
# correct cf_role & featureType, pass
ds.setncattr("featureType", "profile")
ds.createDimension("profile", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("profile",))
cf_role_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_cf_dsg(ds)
self.assertTrue(all(r.value for r in results))
self.assertTrue(all(r.msgs == [] for r in results))
# correct featureType, incorrect cf_role var dimension
ds = MockTimeSeries() # time, lat, lon, depth
ds.setncattr("featureType", "trajectoryprofile")
ds.createDimension("trajectory", 2) # should only be 1
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("trajectory",))
cf_role_var.setncattr("cf_role", "trajectory_id")
results = self.ioos.check_cf_dsg(ds)
self.assertFalse(results[0].value)
# featureType==timeSeries, cf_role=timeseries_id
ds = MockTimeSeries()
ds.setncattr("featureType", "timeSeries")
ds.createDimension("station", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("station",))
cf_role_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_cf_dsg(ds)
# check should pass with no results
self.assertEqual(results, [])
# featureType==timeSeriesProfile, cf_role==timeseries_id, dim 1, pass
ds = MockTimeSeries()
ds.setncattr("featureType", "timeSeriesProfile")
ds.createDimension("station", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("station",))
cf_role_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_cf_dsg(ds)
self.assertEqual(results, [])
# featureType==timeSeriesProfile, cf_role==timeseries_id, dim 2, fail
ds = MockTimeSeries()
ds.setncattr("platform", "platform")
ds.setncattr("featureType", "timeSeriesProfile")
ds.createDimension("station", 2)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("station",))
cf_role_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_cf_dsg(ds)
self.assertFalse(results[0].value)
# featureType==trajectory, cf_role==trajectory_id, dim 1, pass
ds = MockTimeSeries()
ds.setncattr("featureType", "trajectory")
ds.createDimension("trajectory", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("trajectory",))
cf_role_var.setncattr("cf_role", "trajectory_id")
results = self.ioos.check_cf_dsg(ds)
self.assertEqual(results, [])
# featureType==trajectory, cf_role==trajectory, dim 2, fail
ds = MockTimeSeries()
ds.setncattr("featureType", "trajectory")
ds.createDimension("trajectory", 2)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("trajectory",))
cf_role_var.setncattr("cf_role", "trajectory_id")
results = self.ioos.check_cf_dsg(ds)
self.assertFalse(results[0].value)
# featureType==trajectoryProfile, cf_role==trajectory_id, dim 1, pass
ds = MockTimeSeries()
ds.setncattr("featureType", "trajectoryProfile")
ds.createDimension("trajectoryprof", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("trajectoryprof",))
cf_role_var.setncattr("cf_role", "trajectory_id")
results = self.ioos.check_cf_dsg(ds)
self.assertEqual(results, [])
# featureType==trajectoryProfile, cf_role==trajectory_id, dim 2, fail
ds = MockTimeSeries()
ds.setncattr("featureType", "trajectoryProfile")
ds.createDimension("trajectoryprof", 2)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("trajectoryprof",))
cf_role_var.setncattr("cf_role", "trajectory_id")
results = self.ioos.check_cf_dsg(ds)
self.assertFalse(results[0].value)
# featureType==profile, cf_role==profile_id, dim 1, pass
ds = MockTimeSeries()
ds.setncattr("featureType", "profile")
ds.createDimension("prof", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("prof",))
cf_role_var.setncattr("cf_role", "profile_id")
results = self.ioos.check_cf_dsg(ds)
self.assertEqual(results, [])
# featureType==profile, cf_role==profile_id, dim 2, fail
ds = MockTimeSeries()
ds.setncattr("featureType", "profile")
ds.createDimension("prof", 2)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("prof",))
cf_role_var.setncattr("cf_role", "profile_id")
results = self.ioos.check_cf_dsg(ds)
self.assertFalse(results[0].value)
# featureType==point -- do nothing
ds = MockTimeSeries()
ds.setncattr("featureType", "point")
ds.createDimension("blah", 2)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("blah",))
cf_role_var.setncattr("cf_role", "profile_id")
results = self.ioos.check_cf_dsg(ds)
self.assertEqual(results, [])
def test_check_platform_vocabulary(self):
ds = MockTimeSeries() # time, lat, lon, depth
ds.setncattr("platform_vocabulary", "http://google.com")
result = self.ioos.check_platform_vocabulary(ds)
self.assertTrue(result.value)
self.assertEqual(result.msgs, [])
ds.setncattr("platform_vocabulary", "bad")
self.assertFalse(self.ioos.check_platform_vocabulary(ds).value)
def test_check_qartod_variables_flags(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no QARTOD variables
results = self.ioos.check_qartod_variables_flags(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# QARTOD variable without flag_values, flag_meanings (fail)
qr = ds.createVariable("depth_qc", np.byte)
qr.setncattr("standard_name", "spike_test_quality_flag")
results = self.ioos.check_qartod_variables_flags(ds)
self.assertTrue(not any(r.value for r in results)) # all False
# QARTOD variable with flag meanings, without flag_meanings
qr.setncattr("flag_values", np.array([0, 1, 2], dtype=np.byte))
results = self.ioos.check_qartod_variables_flags(ds)
self.assertEqual(results[0].value[0], results[0].value[1]) # should pass
self.assertFalse(results[1].value) # still fail
# QARTOD variable with flag meanings, flag_values
qr.setncattr("flag_meanings", "x y z") # alphanumeric, space-separated
results = self.ioos.check_qartod_variables_flags(ds)
self.assertEqual(results[0].value[0], results[0].value[1]) # pass
self.assertEqual(results[1].value[0], results[1].value[1]) # pass
# flag_values array not equal to length of flag_meanings
qr.setncattr("flag_values", np.array([0, 1], dtype=np.byte))
results = self.ioos.check_qartod_variables_flags(ds)
self.assertLess(results[0].value[0], results[0].value[1]) # should fail
self.assertEqual(results[1].value[0], results[1].value[1]) # pass
# flag_values right length, wrong type
qr.setncattr("flag_values", np.array([0, 1, 2], dtype=np.float64))
results = self.ioos.check_qartod_variables_flags(ds)
self.assertLess(results[0].value[0], results[0].value[1]) # should fail
self.assertEqual(results[1].value[0], results[1].value[1]) # pass
def test_check_qartod_variables_references(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no QARTOD variables
results = self.ioos.check_qartod_variables_references(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# QARTOD variable without references (fail)
qr = ds.createVariable("depth_qc", np.byte)
qr.setncattr("flag_meanings", "blah")
qr.setncattr("standard_name", "spike_test_quality_flag")
results = self.ioos.check_qartod_variables_references(ds)
self.assertFalse(all(r.value for r in results))
# QARTOD variable with references (pass)
qr.setncattr("references", "http://services.cormp.org/quality.php")
results = self.ioos.check_qartod_variables_references(ds)
self.assertTrue(all(r.value for r in results))
self.assertEqual(results[0].msgs, []) # only one Result to test
# QARTOD variable with bad references (fail)
qr.setncattr(
"references", r"p9q384ht09q38@@####???????////??//\/\/\/\//\/\74ht"
)
results = self.ioos.check_qartod_variables_references(ds)
self.assertFalse(all(r.value for r in results))
def test_check_ioos_ingest(self):
ds = MockTimeSeries()
# no value, pass
res = self.ioos.check_ioos_ingest(ds)
self.assertTrue(res.value)
self.assertEqual(res.msgs, [])
# value false
ds.setncattr("ioos_ingest", "false")
self.assertTrue(self.ioos.check_ioos_ingest(ds).value)
# value true
ds.setncattr("ioos_ingest", "true")
self.assertTrue(self.ioos.check_ioos_ingest(ds).value)
# case insensitive
ds.setncattr("ioos_ingest", "True")
self.assertTrue(self.ioos.check_ioos_ingest(ds).value)
ds.setncattr("ioos_ingest", "False")
self.assertTrue(self.ioos.check_ioos_ingest(ds).value)
# anything else fails
ds.setncattr("ioos_ingest", "badval")
self.assertFalse(self.ioos.check_ioos_ingest(ds).value)
ds.setncattr("ioos_ingest", 0)
self.assertFalse(self.ioos.check_ioos_ingest(ds).value)
def test_vertical_dimension(self):
# MockTimeSeries has a depth variable, with axis of 'Z', units of 'm',
# and positive = 'down'
nc_obj = MockTimeSeries()
result = self.ioos.check_vertical_coordinates(nc_obj)[0]
self.assertEqual(*result.value)
nc_obj.variables["depth"].positive = "upwards"
result = self.ioos.check_vertical_coordinates(nc_obj)[0]
self.assertNotEqual(*result.value)
nc_obj.variables["depth"].positive = "up"
result = self.ioos.check_vertical_coordinates(nc_obj)[0]
self.assertEqual(*result.value)
# test units
nc_obj.variables["depth"].units = "furlong"
result = self.ioos.check_vertical_coordinates(nc_obj)[0]
expected_msg = (
"depth's units attribute furlong is not equivalent to "
"one of ('meter', 'inch', 'foot', 'yard', "
"'US_survey_foot', 'mile', 'fathom')"
)
self.assertEqual(result.msgs[0], expected_msg)
self.assertNotEqual(*result.value)
accepted_units = (
"meter",
"meters",
"inch",
"foot",
"yard",
"mile",
"miles",
"US_survey_foot",
"US_survey_feet",
"fathom",
"fathoms",
"international_inch",
"international_inches",
"international_foot",
"international_feet",
"international_yard",
"international_yards",
"international_mile",
"international_miles",
"inches",
"in",
"feet",
"ft",
"yd",
"mi",
)
for units in accepted_units:
nc_obj.variables["depth"].units = units
result = self.ioos.check_vertical_coordinates(nc_obj)[0]
self.assertEqual(*result.value)
|
# Copyright (C) 2019 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.query import builder
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = builder.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
import os.path
from random import randint
class XkcdPasswordGenerator:
"""generates xkcd-inspired passwords"""
def __init__(self, word_list):
self.word_list = word_list
def generate(self, words=4, separator=' '):
result = [self.random_word() for _ in range(words)]
if separator is not None:
return separator.join(result)
else:
return result
def random_word(self):
return self.word_list[randint(0, (len(self.word_list) - 1))]
if __name__ == '__main__':
with open('dictionary.csv') as f:
generator = XkcdPasswordGenerator([line for line in f.read().split(',')])
print(generator.generate())
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-04 08:40
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mygallery', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='image',
options={'ordering': ['image_name']},
),
migrations.AlterModelOptions(
name='location',
options={},
),
migrations.AlterField(
model_name='image',
name='image_category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mygallery.Category'),
),
migrations.AlterField(
model_name='image',
name='image_description',
field=models.TextField(),
),
migrations.AlterField(
model_name='image',
name='image_location',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mygallery.Location'),
),
]
|
from django.conf.urls import url
from rest_framework import routers
from talentmap_api.fsbid.views import reference as views
router = routers.SimpleRouter()
urlpatterns = [
url(r'^dangerpay/$', views.FSBidDangerPayView.as_view(), name='FSBid-danger-pay'),
url(r'^cycles/$', views.FSBidCyclesView.as_view(), name='FSBid-cycles'),
url(r'^bureaus/$', views.FSBidBureausView.as_view(), name='FSBid-bureaus'),
url(r'^differentialrates/$', views.FSBidDifferentialRatesView.as_view(), name='FSBid-differential-rates'),
url(r'^grades/$', views.FSBidGradesView.as_view(), name='FSBid-grades'),
url(r'^languages/$', views.FSBidLanguagesView.as_view(), name='FSBid-languages'),
url(r'^tourofduties/$', views.FSBidTourOfDutiesView.as_view(), name='FSBid-tour-of-duties'),
url(r'^codes/$', views.FSBidCodesView.as_view(), name='FSBid-skill-codes'),
url(r'^cones/$', views.FSBidConesView.as_view(), name='FSBid-cones'),
url(r'^locations/$', views.FSBidLocationsView.as_view(), name='FSBid-locations'),
url(r'^classifications/$', views.FSBidClassificationsView.as_view(), name='FSBid-classifications'),
url(r'^postindicators/$', views.FSBidPostIndicatorsView.as_view(), name='FSBid-post-indicators'),
url(r'^unaccompaniedstatuses/$', views.FSBidUnaccompaniedStatusView.as_view(), name='FSBid-unaccompanied-statuses'),
url(r'^commuterposts/$', views.FSBidCommuterPostsView.as_view(), name='FSBid-commuter-posts'),
]
urlpatterns += router.urls
|
'''Sample module showing the creation of blaze arrays'''
from __future__ import absolute_import, division, print_function
import sys
import numpy as np
import blaze
try:
import tables as tb
except ImportError:
print("This example requires PyTables to run.")
sys.exit()
def print_section(a_string, level=0):
spacing = 2 if level == 0 else 1
underline = ['=', '-', '~', ' '][min(level,3)]
print ('%s%s\n%s' % ('\n'*spacing,
a_string,
underline*len(a_string)))
fname = "sample.h5"
print_section('building basic hdf5 files')
# Create a simple HDF5 file
a1 = np.array([[1, 2, 3], [4, 5, 6]], dtype="int32")
a2 = np.array([[1, 2, 3], [3, 2, 1]], dtype="int64")
t1 = np.array([(1, 2, 3), (3, 2, 1)], dtype="i4,i8,f8")
with tb.open_file(fname, "w") as f:
f.create_array(f.root, 'a1', a1)
f.create_table(f.root, 't1', t1)
f.create_group(f.root, 'g')
f.create_array(f.root.g, 'a2', a2)
print("Created HDF5 file with the next contents:\n%s" % str(f))
print_section('opening and handling datasets in hdf5 files')
# Open an homogeneous dataset there
ddesc = blaze.HDF5_DDesc(fname, datapath="/a1", mode="r")
a = blaze.array(ddesc)
# Print it
print("/a1 contents:", a)
# Print the datashape
print("datashape for /a1:", a.dshape)
# Open another homogeneous dataset there
ddesc = blaze.HDF5_DDesc(fname, datapath="/g/a2", mode="r")
a = blaze.array(ddesc)
# Print it
print("/g/a2 contents:", a)
# Print the datashape
print("datashape for /g/a2:", a.dshape)
# Now, get an heterogeneous dataset
ddesc = blaze.HDF5_DDesc(fname, datapath="/t1", mode="r")
t = blaze.array(ddesc)
# Print it
print("/t1 contents:", t)
# Print the datashape
print("datashape for /t1:", t.dshape)
# Finally, get rid of the sample file
ddesc.remove()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('heritage', '0012_auto_20160107_0107'),
]
operations = [
migrations.AddField(
model_name='asset',
name='interview',
field=models.ForeignKey(to='heritage.Interview', default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='asset',
name='session',
field=models.ForeignKey(null=True, blank=True, to='heritage.Session'),
),
]
|
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.12.0
# kernelspec:
# display_name: ODC
# language: python
# name: odc
# ---
# %% [markdown]
# # Access Sentinel 2 Data from AWS
#
# https://registry.opendata.aws/sentinel-2-l2a-cogs/
# %%
import odc.ui
import yaml
from odc.algo import to_rgba
from odc.stac import stac2ds, stac_load
from pystac_client import Client
# %%
cfg = """---
"*":
warnings: ignore # Disable warnings about duplicate common names
sentinel-s2-l2a-cogs:
assets:
'*':
data_type: uint16
nodata: 0
unit: '1'
SCL:
data_type: uint8
nodata: 0
unit: '1'
visual:
data_type: uint8
nodata: 0
unit: '1'
aliases: # Alias -> Canonical Name
red: B04
green: B03
blue: B02
"""
cfg = yaml.load(cfg, Loader=yaml.CSafeLoader)
catalog = Client.open("https://earth-search.aws.element84.com/v0")
# %% [markdown]
# ## Find STAC Items to Load
# %%
km2deg = 1.0 / 111
x, y = (113.887, -25.843) # Center point of a query
r = 100 * km2deg
query = catalog.search(
collections=["sentinel-s2-l2a-cogs"],
datetime="2021-09-16",
limit=10,
bbox=(x - r, y - r, x + r, y + r),
)
items = list(query.get_items())
print(f"Found: {len(items):d} datasets")
# %% [markdown]
# ## Construct Dask Dataset
#
# Note that even though there are 9 STAC Items on input, there is only one timeslice on output. This is because of `groupy="solar_day"`. With that setting `stac_load` will place all items that occured on the same day (as adjusted for the timezone) into one image plane.
# %%
# crs = "epsg:32749" # native UTM projection in the query region
crs = "epsg:3857" # Since we will plot it on a map we need to use 3857 projection
zoom = 2 ** 5 # overview level 5
xx = stac_load(
items,
output_crs=crs,
resolution=(-10 * zoom, 10 * zoom),
chunks={},
groupby="solar_day",
measurements=["red", "green", "blue"],
stac_cfg=cfg,
)
xx
# %% [markdown]
# ## Convert to RGB and load
# %%
# %%time
rgba = to_rgba(xx, clamp=(1, 3000))
_rgba = rgba.compute()
# %% [markdown]
# ## Display Image on a map
# %%
dss = list(stac2ds(items, cfg))
_map = odc.ui.show_datasets(dss, style={"fillOpacity": 0.1}, scroll_wheel_zoom=True)
ovr = odc.ui.mk_image_overlay(_rgba)
_map.add_layer(ovr)
_map
# %% [markdown]
# --------------------------------------------------------------
|
"""The Blackholes AstroCatalog.
"""
# flake8: noqa --- ignore imported but unused flake8 warnings
import os
import glob
from . import main
# from . import production
_PATH_BLACKHOLES = os.path.join(os.path.dirname(__file__), "")
PATH_BH_SCHEMA = os.path.join(_PATH_BLACKHOLES, "schema", "")
|
from core import *
#\item[something] has no enumeration
#[TAG] changes arguments to [] type
"""
<ol class="enumeration" type="1">
<li value="(O1)">This is item three.</li>
<li value="(O2)">This is item fifty.</li>
<li value="(O3)">
Something Aweakkjlk
<ol class="enumeration" type="1">
<li value="">This is item three.</li>
<li value="(A5)">This is item fifty.</li>
<li value="(A6)">This is item one hundred.</li>
</ol>
</li>
</ol>
"""
class ItemizeItem(Element):
def __init__(self,modifiable_content,parent,label = "•"):
super().__init__("",parent)
self.label = label
self.children = [Undefined(label,self),Undefined(modifiable_content,self)]
def label_name(self):
return self.label#self.children[0].to_string()
def to_string(self):
out = "<li value='"+self.children[0].to_string()+"'>"
self.children = self.children[1:]
for child in self.children:
out += child.to_string()
out += "</li>"
return out
@staticmethod
def position(input):
return position_of(input,"\\item")
@staticmethod
def split_and_create(input,parent):
pre,content = split_on_next(input,"\\item")
if "\\item" in content:
content,post = split_on_next(content,"\\item")
post = "\\item" + post
else:
post = ""
label = ""
if first_char_brace(content,"["):
label,content = split_on_first_brace(content,"[","]")
elem_out = ItemizeItem(content,parent,label)
return pre,elem_out,post
class Itemize(Element):
current_index = 0
def __init__(self,modifiable_content,parent):
super().__init__(modifiable_content,parent)
def to_string(self):
out = "</p><ol class='enumeration'>"
for child in self.children:
out += child.to_string()
out += "</ol><p>"
return out
@staticmethod
def position(input):
return position_of(input,"\\begin{itemize}")
@staticmethod
def split_and_create(input,parent):
pre,content,post = begin_end_split(input,"\\begin{itemize}","\\end{itemize}")
elem_out = Itemize(content,parent)
elem_out.expand([ItemizeItem])
return pre,elem_out,post
class EnumerationItem(Element):
def __init__(self,modifiable_content,parent,label = None):
super().__init__("",parent)
self.label = ""
if label is None:
enumeration = self.search_class(Enumeration)
self.label = enumeration.generate_item_label()
else:
self.label = label
self.children = [Undefined(self.label,self),Undefined(modifiable_content,self)]
def label_name(self):
return self.label
def to_string(self):
out = "<li value='"+self.children[0].to_string()+"'>"
self.children = self.children[1:]
for child in self.children:
out += child.to_string()
out += "</li>"
return out
@staticmethod
def position(input):
return position_of(input,"\\item")
@staticmethod
def split_and_create(input,parent):
pre,content = split_on_next(input,"\\item")
if "\\item" in content:
content,post = split_on_next(content,"\\item")
post = "\\item" + post
else:
post = ""
label = None
if first_char_brace(content,"["):
label,content = split_on_first_brace(content,"[","]")
elem_out = EnumerationItem(content,parent,label)
return pre,elem_out,post
def enum_style_roman(index):
roman = ["i","ii","iii","iv","v","vi","vii","viii","ix","x"]
return roman[index].upper()
def enum_style_Roman(index):
roman = ["i","ii","iii","iv","v","vi","vii","viii","ix","x"]
return roman[index]
def enum_style_arabic(index):
return str(index + 1)
def enum_style_alph(index):
lowercase = 'abcdefghijklmnopqrstuvwxyz'
return lowercase[index]
def enum_style_Alph(index):
uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
return uppercase[index]
def enum_style_empty(index):
return ""
enum_styles = {"\\roman":enum_style_roman,"\\Roman":enum_style_Roman,"\\arabic":enum_style_arabic,"\\alph":enum_style_alph,"\\Alph":enum_style_Alph}
class Enumeration(Element):
current_index = 0
def __init__(self,modifiable_content,parent,style_func,left,right):
super().__init__(modifiable_content,parent)
self.style_func,self.left,self.right = style_func,left,right
def to_string(self):
out = "</p><ol class='enumeration'>"
for child in self.children:
out += child.to_string()
out += "</ol><p>"
return out
@staticmethod
def position(input):
return position_of(input,"\\begin{enumerate}")
@staticmethod
def split_and_create(input,parent):
pre,content,post = begin_end_split(input,"\\begin{enumerate}","\\end{enumerate}")
style_func = None
left = ""
right = ""
if first_char_brace(content,"["):
options,content = split_on_first_brace(content,"[","]")
options_pre,options_post = split_on_next(options,"label")
options_post = remove_empty_at_begin(options_post)
options_post = options_post[1:]#remove =
options_label = ""
#print("options_post",options_post)
if first_char_brace(options_post,"{"):
options_label,options_post = split_on_first_brace(options_post)
tmp = options_post.split(",")[0]
options_label = options_label + tmp
for style in enum_styles.keys():
if style in options_label:
style_func = enum_styles[style]
tmp = options_label.split(style)
left = tmp[0]
_,right = split_on_next(tmp[1],"*")
break
if style_func is None:
style_func = enum_style_empty
left = options_label
else:
style_func = enum_style_arabic
right = "."
left = left.replace("\\sffamily","")
left = left.replace("{","")
left = left.replace("}","")
right = right.replace("\\sffamily","")
right = right.replace("{","")
right = right.replace("}","")
elem_out = Enumeration(content,parent,style_func,left,right)
elem_out.expand([EnumerationItem])
return pre,elem_out,post
def generate_item_label(self):
self.current_index = self.current_index + 1
return self.left + self.style_func(self.current_index-1)+ self.right
|
#!/usr/bin/python
import fileinput
import os
try:
with fileinput.FileInput('./env/lib/python3.8/site-packages/firebase/__init__.py', inplace=True, backup='.bak') as file:
for line in file:
print(line.replace('.async', '.assync'), end='')
with fileinput.FileInput('./env/lib/python3.8/site-packages/firebase/firebase.py', inplace=True, backup='.bak') as file:
for line in file:
print(line.replace('.async', '.assync'), end='')
os.rename('./env/lib/python3.7/site-packages/firebase/async.py', './env/lib/python3.7/site-packages/firebase/assync.py')
except:
print('Already Fixed')
|
from typing import List, TypedDict
from google.appengine.ext import ndb
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.event import Event
from backend.common.models.location import Location
from backend.common.queries.dict_converters.converter_base import ConverterBase
class DummyModel(ndb.Model):
int_prop = ndb.IntegerProperty()
class DummyDict(TypedDict):
int_val: int
class DummyConverter(ConverterBase):
SUBVERSIONS = {
ApiMajorVersion.API_V3: 0,
}
@classmethod
def _convert_list(
cls, model_list: List[DummyModel], version: ApiMajorVersion
) -> List[DummyDict]:
return list(map(cls.converter_v3, model_list))
@classmethod
def converter_v3(cls, model: DummyModel) -> DummyDict:
return {
"int_val": model.int_prop,
}
def test_convert_single() -> None:
converted = DummyConverter(DummyModel(int_prop=604)).convert(ApiMajorVersion.API_V3)
assert converted == {"int_val": 604}
def test_convert_multi() -> None:
converted = DummyConverter(
[DummyModel(int_prop=254), DummyModel(int_prop=604)]
).convert(ApiMajorVersion.API_V3)
assert converted == [{"int_val": 254}, {"int_val": 604}]
def test_convert_none() -> None:
converted = DummyConverter(None).convert(ApiMajorVersion.API_V3)
assert converted is None
def test_constructLocation_v3() -> None:
location = Location(
name="Test",
formatted_address="1234 Street St., San Jose, CA 95120, USA",
lat_lng=ndb.GeoPt(30, 40),
city="San Jose",
state_prov="California",
state_prov_short="CA",
country="United States",
country_short="US",
place_details={},
)
event = Event(
normalized_location=location,
)
converted = ConverterBase.constructLocation_v3(event)
assert converted["location_name"] == location.name
assert converted["city"] == location.city
assert converted["state_prov"] == location.state_prov_short
assert converted["country"] == location.country_short_if_usa
assert converted["lat"] == location.lat_lng.lat
assert converted["lng"] == location.lat_lng.lon
|
cidade = str(input('Digite uma o nome de cidade: ')).lower().strip()
print(cidade[:5] == 'santo')
#separado = cidade.split()
#print('santo' in separado[0])
|
Subsets and Splits