repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
MadsJensen/malthe_alpha_project | make_inverse_operator.py | 1 | 1501 | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 31 10:00:32 2015
@author: mje
"""
import mne
from mne.minimum_norm import (make_inverse_operator, apply_inverse,
write_inverse_operator)
import socket
import numpy as np
import matplotlib.pyplot as plt
# Setup paths and prepare raw data
hostname = socket.gethostname()
if hostname == "Wintermute":
data_path = "/home/mje/mnt/caa/scratch/"
n_jobs = 1
else:
data_path = "/projects/MINDLAB2015_MEG-CorticalAlphaAttention/scratch/"
n_jobs = 1
subjects_dir = data_path + "fs_subjects_dir/"
fname_fwd = data_path + '0001-fwd.fif'
fname_cov = data_path + '0001-cov.fif'
fname_evoked = data_path + "0001_p_03_filter_ds_ica-mc_raw_tsss-ave.fif"
snr = 1.0
lambda2 = 1.0 / snr ** 2
# Load data
evoked = mne.read_evokeds(fname_evoked, condition=0, baseline=(None, 0))
forward_meeg = mne.read_forward_solution(fname_fwd, surf_ori=True)
noise_cov = mne.read_cov(fname_cov)
# Restrict forward solution as necessary for MEG
forward_meg = mne.pick_types_forward(forward_meeg, meg=True, eeg=False)
# Alternatively, you can just load a forward solution that is restricted
# make an M/EEG, MEG-only, and EEG-only inverse operators
inverse_operator_meg = make_inverse_operator(evoked.info, forward_meg,
noise_cov,
loose=0.2, depth=0.8)
write_inverse_operator('0001-meg-oct-6-inv.fif',
inverse_operator_meg)
| mit | 4,213,506,564,462,215,000 | 30.93617 | 75 | 0.655563 | false |
tyler274/Recruitment-App | recruit_app/ia/views.py | 1 | 1374 | from flask import Blueprint, render_template, flash, redirect, request, url_for, current_app
from flask_security.decorators import login_required
from flask_security import current_user
from recruit_app.ia.managers import IaManager
from recruit_app.user.eve_api_manager import EveApiManager
from recruit_app.ia.forms import SubmitIssueForm
blueprint = Blueprint("ia", __name__, url_prefix='/ia', static_folder="../static")
@blueprint.route("/submit_issue", methods=['GET', 'POST'])
@login_required
def submit_issue():
# Check if user is in Karmafleet (98370861)
if not EveApiManager.check_if_character_is_in_corp(int(current_user.main_character_id), 98370861):
flash('You are not a current KarmaFleet member.', 'error')
return redirect(url_for('public.home'))
form = SubmitIssueForm()
# Display for if get, submit if POST
if request.method == 'POST':
if form.validate_on_submit():
# Do the submission
if IaManager.submit_issue(current_user, form.subject.data, form.body.data, form.logs.data):
flash('Issue submitted successfully.', 'info')
else:
flash('Error submitting issue. Please try again later.', 'error')
return redirect(url_for('public.home'))
# GET
return render_template('ia/submit_issue.html', form=form) | bsd-3-clause | -1,894,737,977,002,553,900 | 40.666667 | 103 | 0.676128 | false |
menegazzo/travispy | travispy/entities/repo.py | 2 | 2715 | from ._stateful import Stateful
class Repo(Stateful):
'''
:ivar str slug:
Repository slug.
:ivar str description:
Description on |github|.
:ivar int last_build_id:
Build ID of the last executed build.
:ivar str last_build_number:
Build number of the last executed build.
:ivar str last_build_state:
Build state of the last executed build.
:ivar str last_build_duration:
Build duration of the last executed build.
:ivar str last_build_started_at:
Build started at of the last executed build.
:ivar str last_build_finished_at:
Build finished at of the last executed build.
:ivar str github_language:
Language on |github|.
:ivar bool active:
Whether or not the repository is active on |travisci|.
'''
__slots__ = [
'slug',
'description',
'last_build_id',
'last_build_number',
'last_build_state',
'last_build_duration',
'last_build_started_at',
'last_build_finished_at',
'last_build_language',
'github_language',
'active',
]
@property
def state(self):
'''
:class:`.Repo` state is given through ``last_build_state``.
.. seealso:: :class:`.Stateful` for ``state`` full documentation.
'''
return self.last_build_state
@property
def last_build(self):
'''
:rtype: :class:`.Build`
:returns:
A :class:`.Build` object with information related to current ``last_build_id``.
'''
from .build import Build
return self._load_one_lazy_information(Build, 'last_build_id')
@classmethod
def find_one(cls, session, entity_id, **kwargs):
result = super(Repo, cls).find_one(session, entity_id, **kwargs)
return result
def _set_hook(self, flag):
response = self._session.put(
self._session.uri + '/hooks/{}'.format(self.id),
json={"hook": {"active": flag}},
)
result = response.status_code == 200
if result:
self.active = flag
return result
def disable(self):
'''
Disable Travis CI for the repository.
:rtype: bool
:returns:
``True`` if API call was successful.
``False`` if API call was unsuccessful.
'''
return self._set_hook(False)
def enable(self):
'''
Enable Travis CI for the repository
:rtype: bool
:returns:
``True`` if API call was successful
``False`` if API call was unsuccessful
'''
return self._set_hook(True)
| gpl-3.0 | -575,740,151,106,898,900 | 24.857143 | 91 | 0.559116 | false |
daspecster/google-cloud-python | resource_manager/unit_tests/test__http.py | 1 | 2784 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
class TestConnection(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.resource_manager._http import Connection
return Connection
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_build_api_url_no_extra_query_params(self):
conn = self._make_one(object())
URI = '/'.join([
conn.API_BASE_URL,
conn.API_VERSION,
'foo',
])
self.assertEqual(conn.build_api_url('/foo'), URI)
def test_build_api_url_w_extra_query_params(self):
from six.moves.urllib.parse import parse_qsl
from six.moves.urllib.parse import urlsplit
conn = self._make_one(object())
uri = conn.build_api_url('/foo', {'bar': 'baz'})
scheme, netloc, path, qs, _ = urlsplit(uri)
self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL)
self.assertEqual(path,
'/'.join(['', conn.API_VERSION, 'foo']))
parms = dict(parse_qsl(qs))
self.assertEqual(parms['bar'], 'baz')
def test_extra_headers(self):
from google.cloud import _http as base_http
from google.cloud.resource_manager import _http as MUT
http = mock.Mock(spec=['request'])
response = mock.Mock(status=200, spec=['status'])
data = b'brent-spiner'
http.request.return_value = response, data
client = mock.Mock(_http=http, spec=['_http'])
conn = self._make_one(client)
req_data = 'req-data-boring'
result = conn.api_request(
'GET', '/rainbow', data=req_data, expect_json=False)
self.assertEqual(result, data)
expected_headers = {
'Content-Length': str(len(req_data)),
'Accept-Encoding': 'gzip',
base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO,
'User-Agent': conn.USER_AGENT,
}
expected_uri = conn.build_api_url('/rainbow')
http.request.assert_called_once_with(
body=req_data,
headers=expected_headers,
method='GET',
uri=expected_uri,
)
| apache-2.0 | 7,192,150,940,954,311,000 | 33.37037 | 74 | 0.609195 | false |
isarn/isarn-sketches-spark | python/isarnproject/sketches/spark/tdigest.py | 1 | 14993 | import sys
import random
import itertools as it
from bisect import bisect_left, bisect_right
from pyspark.sql.types import UserDefinedType, StructField, StructType, \
ArrayType, DoubleType, IntegerType
from pyspark.sql.column import Column, _to_java_column, _to_seq
from pyspark.context import SparkContext
__all__ = ['tdigestIntUDF', 'tdigestLongUDF', 'tdigestFloatUDF', 'tdigestDoubleUDF', \
'tdigestMLVecUDF', 'tdigestMLLibVecUDF', \
'tdigestIntArrayUDF', 'tdigestLongArrayUDF', 'tdigestFloatArrayUDF', 'tdigestDoubleArrayUDF', \
'tdigestReduceUDF', 'tdigestArrayReduceUDF', \
'TDigest']
def tdigestIntUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of integer data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestIntUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestLongUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of long integer data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestLongUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestFloatUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of (single precision) float data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestFloatUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestDoubleUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of double float data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestDoubleUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestMLVecUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of ML Vector data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestMLVecUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestMLLibVecUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of MLLib Vector data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestMLLibVecUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestIntArrayUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of integer-array data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestIntArrayUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestLongArrayUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of long-integer array data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestLongArrayUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestFloatArrayUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of (single-precision) float array data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestFloatArrayUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestDoubleArrayUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of double array data.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestDoubleArrayUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestReduceUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of t-digests.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestReduceUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
def tdigestArrayReduceUDF(col, compression=0.5, maxDiscrete=0):
"""
Return a UDF for aggregating a column of t-digest vectors.
:param col: name of the column to aggregate
:param compression: T-Digest compression parameter (default 0.5)
:param maxDiscrete: maximum unique discrete values to store before reverting to
continuous (default 0)
"""
sc = SparkContext._active_spark_context
tdapply = sc._jvm.org.isarnproject.sketches.spark.tdigest.functions.tdigestArrayReduceUDF( \
compression, maxDiscrete).apply
return Column(tdapply(_to_seq(sc, [col], _to_java_column)))
class TDigestUDT(UserDefinedType):
@classmethod
def sqlType(cls):
return StructType([
StructField("compression", DoubleType(), False),
StructField("maxDiscrete", IntegerType(), False),
StructField("cent", ArrayType(DoubleType(), False), False),
StructField("mass", ArrayType(DoubleType(), False), False)])
@classmethod
def module(cls):
return "isarnproject.sketches.udt.tdigest"
@classmethod
def scalaUDT(cls):
return "org.apache.spark.isarnproject.sketches.udtdev.TDigestUDT"
def simpleString(self):
return "tdigest"
def serialize(self, obj):
if isinstance(obj, TDigest):
return (obj.compression, obj.maxDiscrete, obj._cent, obj._mass)
else:
raise TypeError("cannot serialize %r of type %r" % (obj, type(obj)))
def deserialize(self, datum):
return TDigest(datum[0], datum[1], datum[2], datum[3])
class TDigest(object):
"""
A T-Digest sketch of a cumulative numeric distribution.
This is a "read-only" python mirror of org.isarnproject.sketches.java.TDigest which supports
all cdf and sampling methods, but does not currently support update with new data. It is
assumed to have been produced with a t-digest aggregating UDF, also exposed in this package.
"""
# Because this is a value and not a function, TDigestUDT has to be defined above,
# and in the same file.
__UDT__ = TDigestUDT()
def __init__(self, compression, maxDiscrete, cent, mass):
self.compression = float(compression)
self.maxDiscrete = int(maxDiscrete)
assert self.compression > 0.0, "compression must be > 0"
assert self.maxDiscrete >= 0, "maxDiscrete must be >= 0"
self._cent = [float(v) for v in cent]
self._mass = [float(v) for v in mass]
assert len(self._mass) == len(self._cent), "cluster mass and cent must have same dimension"
self.nclusters = len(self._cent)
# Current implementation is "read only" so we can just store cumulative sum here.
# To support updating, 'csum' would need to become a Fenwick tree array
self._csum = list(it.accumulate(self._mass))
def __repr__(self):
return "TDigest(%s, %s, %s, %s)" % \
(repr(self.compression), repr(self.maxDiscrete), repr(self._cent), repr(self._mass))
def mass(self):
"""
Total mass accumulated by this TDigest
"""
if len(self._csum) == 0: return 0.0
return self._csum[-1]
def size(self):
"""
Number of clusters in this TDigest
"""
return len(self._cent)
def isEmpty(self):
"""
Returns True if this TDigest is empty, False otherwise
"""
return len(self._cent) == 0
def __reduce__(self):
return (self.__class__, (self.compression, self.maxDiscrete, self._cent, self._mass, ))
def _lmcovj(self, m):
assert self.nclusters >= 2
assert (m >= 0.0) and (m <= self.mass())
return bisect_left(self._csum, m)
def _rmcovj(self, m):
assert self.nclusters >= 2
assert (m >= 0.0) and (m <= self.mass())
return bisect_right(self._csum, m) - 1
def _rcovj(self, x):
return bisect_right(self._cent, x) - 1
# emulates behavior from isarn java TDigest, which computes
# cumulative sum via a Fenwick tree
def _ftSum(self, j):
if (j < 0): return 0.0
if (j >= self.nclusters): return self.mass()
return self._csum[j]
def cdf(self, xx):
"""
Return CDF(x) of a numeric value x, with respect to this TDigest CDF sketch.
"""
x = float(xx)
j1 = self._rcovj(x)
if (j1 < 0): return 0.0
if (j1 >= self.nclusters - 1): return 1.0
j2 = j1 + 1
c1 = self._cent[j1]
c2 = self._cent[j2]
tm1 = self._mass[j1]
tm2 = self._mass[j2]
s = self._ftSum(j1 - 1)
d1 = 0.0 if (j1 == 0) else tm1 / 2.0
m1 = s + d1
m2 = m1 + (tm1 - d1) + (tm2 if (j2 == self.nclusters - 1) else tm2 / 2.0)
m = m1 + (x - c1) * (m2 - m1) / (c2 - c1)
return min(m2, max(m1, m)) / self.mass()
def cdfInverse(self, qq):
"""
Given a value q on [0,1], return the value x such that CDF(x) = q.
Returns NaN for any q > 1 or < 0, or if this TDigest is empty.
"""
q = float(qq)
if (q < 0.0) or (q > 1.0): return float('nan')
if (self.nclusters == 0): return float('nan')
if (self.nclusters == 1): return self._cent[0]
if (q == 0.0): return self._cent[0]
if (q == 1.0): return self._cent[self.nclusters - 1]
m = q * self.mass()
j1 = self._rmcovj(m)
j2 = j1 + 1
c1 = self._cent[j1]
c2 = self._cent[j2]
tm1 = self._mass[j1]
tm2 = self._mass[j2]
s = self._ftSum(j1 - 1)
d1 = 0.0 if (j1 == 0) else tm1 / 2.0
m1 = s + d1
m2 = m1 + (tm1 - d1) + (tm2 if (j2 == self.nclusters - 1) else tm2 / 2.0)
x = c1 + (m - m1) * (c2 - c1) / (m2 - m1)
return min(c2, max(c1, x))
def cdfDiscrete(self, xx):
"""
return CDF(x) for a numeric value x, assuming the sketch is representing a
discrete distribution.
"""
x = float(xx)
j = self._rcovj(x)
return self._ftSum(j) / self.mass()
def cdfDiscreteInverse(self, qq):
"""
Given a value q on [0,1], return the value x such that CDF(x) = q, assuming
the sketch is represenging a discrete distribution.
Returns NaN for any q > 1 or < 0, or if this TDigest is empty.
"""
q = float(qq)
if (q < 0.0) or (q > 1.0): return float('nan')
if self.nclusters == 0: return float('nan')
if self.nclusters == 1: return self._cent[0]
m = q * self.mass()
j = self._lmcovj(m)
return self._cent[j]
def samplePDF(self):
"""
Return a random sampling from the sketched distribution, using inverse
transform sampling, assuming a continuous distribution.
"""
return self.cdfInverse(random.random())
def samplePMF(self):
"""
Return a random sampling from the sketched distribution, using inverse
transform sampling, assuming a discrete distribution.
"""
return self.cdfDiscreteInverse(random.random())
def sample(self):
"""
Return a random sampling from the sketched distribution, using inverse
transform sampling, assuming a discrete distribution if the number of
TDigest clusters is <= maxDiscrete, and a continuous distribution otherwise.
"""
if self.maxDiscrete <= self.nclusters:
return self.cdfDiscreteInverse(random.random())
return self.cdfInverse(random.random())
| apache-2.0 | -8,566,851,161,304,078,000 | 38.981333 | 106 | 0.649503 | false |
ntuecon/pubfin | economy/agents.py | 1 | 5147 | '''
Created on Apr 16, 2018
@author: Hendrik Rommeswinkel
'''
import numpy as np
from scipy.optimize import minimize
from utility import Utility,Profit,GoodFactorUtility,CESUtility
from technology import LinearTechnology
from technology import DRSTechnology
class Agent(object):
'''
An agent contains an objective. When asked to optimize(), the agent maximizes the objective given constraints and bounds on the variables.
'''
def __init__(self, objective=Utility(), env=dict()):
'''
Constructor
'''
self.objective = objective
#Perhaps the env should instead be an argument to optimize()?
#In case an environment is provided, use this environment
self.env = env
#The problemsize needs to be manually rewritten in case it is not equal to 1
self.problemsize = 1
def optimize(self, bounds=None, constraints=None):
#The env tells us how large the dimension of the initial guess has to be
x0 = np.ones(self.problemsize)
opt = minimize(self.objective, x0, args=self.env, method='SLSQP', bounds=bounds, constraints=constraints.append(self.env['constraints']))
if opt['success']==0:
# If the optimization problem could not be solved, we need to raise an error.
raise ValueError("Optimization problem could not be solved.")
return opt['x']
class Consumer(Agent):
'''
A consumer is an agent who has a utility function as the objective and no internal constraints
Setting env is required as there are both goods and factors to be chosen
Constraints for the consumer need to be supplied by the economy
'''
def __init__(self, objective=Utility(), env=dict()):
'''
Constructor
'''
self.env = {'constraints': (), }
self.env.update(env)
self.problemsize = len(self.env['goods']) + len(self.env['factors'])
self.objective=objective
def consume(self, c, env=dict()):
#We update the env by env keys already in self.env
env.update(self.env)
if not 'transfers' in env:
env['transfers'] = np.zeros(self.problemsize)
print "No transfers found"
return self.objective(c + env['transfers'])
def optimize(self, bounds=None, constraints=(), env=dict()):
x0 = np.ones(self.problemsize)
env.update(self.env)
opt = minimize(self.consume, x0, args=(env,), method='SLSQP', bounds=bounds, constraints=constraints+env['constraints'])
if opt['success']==0:
raise ValueError("Optimization problem could not be solved.")
print opt
return opt['x']
class Producer(Agent):
'''
A producer is an agent who has a technology as a constraint and maximizes payoffs
The economy needs to supply prices
'''
def __init__(self, objective=Profit(), technology=None, env=None):
'''
Constructor
'''
self.objective = objective
if technology == None:
technology = DRSTechnology(env)
self.constraints = [{'type': 'ineq', 'fun':technology}]
#In case an environment is provided, use this environment
self.env = env
self.problemsize = len(self.env['goods']) + len(self.env['factors'])
'''
For a producer, optimization is slightly different since for a linear technology the optimum is not unique.
'''
if self.technology==LinearTechnology:
raise ValueError("No support for linear technologies yet")
else:
pass
class Government(Agent):
'''
The government maximizes a social welfare function. We assume a utilitarian SWF.
'''
def __init__(self, objective, env=None):
'''
Constructor
'''
self.objective = objective
self.constraints = {}
#In case an environment is provided, use this environment
self.env = env
#The problem size for the government is the number of consumers among who to do lump-sum transfers
#We only need to redistribute a single good lump-sum for a well-behaved problem. More generally, we could redistribute all goods lump-sum.
self.problemsize = len(self.env['consumers'])
conradsPreferences = {'scale': -1,
'shift': 0,
'elasticity': -2,
'exponent': .5,
'weights': np.array([2,1,1,1,1])
}
environment = {'goods' : [1,2,3,4,5], 'factors' : [], 'transfers' : np.array([0,.3,0,0,0])}
conrad = Consumer(objective = CESUtility(conradsPreferences), env=environment)
conradsConstraint = ({'type': 'eq',
'fun': lambda x: np.sum(x) - 7},)
result = conrad.optimize(constraints=conradsConstraint)
print result
#results are numerically quite imprecise. In the above example, in principle the consumption of
#goods 2 and 3 should differ only by .3 | bsd-3-clause | 151,333,219,942,449,920 | 37.914729 | 146 | 0.608316 | false |
otraczyk/gsevol-web | bindings/urec.py | 1 | 1651 | # -*- coding: utf-8 -*-
"""Bindings related to unrooted recocilation.
Using `fasturec` and `gsevol`.
"""
import tempfile
from collections import defaultdict
from bindings.base import launch
from bindings import gsevol as Gse
from bindings.utils import wrap_in_tempfile
def launch_fasturec(params, timeout=300, stdin=None, *args, **kwargs):
return launch(['lib/fasturec/fasturec'], params, timeout, stdin, *args, **kwargs)
def launch_urec(params, timeout=300, stdin=None, *args, **kwargs):
return launch(['lib/urec/urec'], params, timeout, stdin, *args, **kwargs)
def draw_unrooted(gene, species, cost, options=''):
assert cost in ("DL", "D", "L", "DC", "RF"), "Wrong cost function: %s" % cost
fu_command = ['-g %s' % gene, '-s %s' % species, '-bX%s' % cost]
fu_output = launch_fasturec(fu_command)
fu_out_file = wrap_in_tempfile(fu_output)
# Fasturec and gsevol outputs interfere and damage the picture if it's
# printed to stdout.
tmp = tempfile.NamedTemporaryFile()
gse_command = ['-dSz', '-C arrowlength=0.4;scale=2;outputfile="%s"; %s' % (tmp.name, options)]
Gse.launch(gse_command, stdin=fu_out_file)
gse_output = tmp.read()
return gse_output
def optimal_rootings(gene, species, cost):
"""Generate a list of representations of optimal rootings of a gene tree
for given cost function (default=...?).
"""
cost_to_opt = defaultdict(lambda: "a5", {
"DL": "a8",
})
command = ['-g %s' % gene, '-s %s' % species, '-b', '-%s' % cost_to_opt[cost]]
output = launch_urec(command)
rootings = [r.strip() for r in output.strip().split('\n')]
return rootings
| mit | -6,543,209,536,789,756,000 | 38.309524 | 98 | 0.651726 | false |
codesociety/friartuck | friartuck/Robinhood/trade_history_downloader.py | 1 | 2942 | """
MIT License
Copyright (c) 2017 Code Society
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import json
import csv
import shelve
from Robinhood import Robinhood
def get_symbol_from_instrument_url(rb_client, url, db):
instrument = {}
if url in db:
instrument = db[url]
else:
db[url] = fetch_json_by_url(rb_client, url)
instrument = db[url]
return instrument['symbol']
def fetch_json_by_url(rb_client, url):
return rb_client.session.get(url).json()
def order_item_info(order, rb_client, db):
#side: .side, price: .average_price, shares: .cumulative_quantity, instrument: .instrument, date : .last_transaction_at
symbol = get_symbol_from_instrument_url(rb_client, order['instrument'], db)
return {
'side': order['side'],
'price': order['average_price'],
'shares': order['cumulative_quantity'],
'symbol': symbol,
'date': order['last_transaction_at'],
'state': order['state']
}
def get_all_history_orders(rb_client):
orders = []
past_orders = rb.order_history()
orders.extend(past_orders['results'])
while past_orders['next']:
print("{} order fetched".format(len(orders)))
next_url = past_orders['next']
past_orders = fetch_json_by_url(rb_client, next_url)
orders.extend(past_orders['results'])
print("{} order fetched".format(len(orders)))
return orders
rb = Robinhood()
# !!!!!! change the username and passs, be careful when paste the code to public
rb.login(username="name", password="pass")
past_orders = get_all_history_orders(rb)
instruments_db = shelve.open('instruments.db')
orders = [order_item_info(order, rb, instruments_db) for order in past_orders]
keys = ['side', 'symbol', 'shares', 'price', 'date', 'state']
with open('orders.csv', 'w') as output_file:
dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
dict_writer.writerows(orders)
| mit | -2,211,343,214,784,098,300 | 35.320988 | 124 | 0.704623 | false |
kevinjqiu/mockingjay | mockingjay/matcher.py | 1 | 2117 | import abc
import base64
import re
class StringOrPattern(object):
"""
A decorator object that wraps a string or a regex pattern so that it can
be compared against another string either literally or using the pattern.
"""
def __init__(self, subject):
self.subject = subject
def __eq__(self, other_str):
if isinstance(self.subject, re._pattern_type):
return self.subject.search(other_str) is not None
else:
return self.subject == other_str
def __hash__(self):
return self.subject.__hash__()
class Matcher(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def assert_request_matched(self, request):
"""
Assert that the request matched the spec in this matcher object.
"""
class HeaderMatcher(Matcher):
"""
Matcher for the request's header.
:param key: the name of the header
:param value: the value of the header
"""
def __init__(self, key, value):
self.key = key
self.value = StringOrPattern(value)
def assert_request_matched(self, request):
assert request.headers.get(self.key) == self.value
class ContentTypeMatcher(HeaderMatcher):
"""
Matcher for the request's content type
"""
def __init__(self, content_type):
super(ContentTypeMatcher, self).__init__('content-type', content_type)
class BasicAuthUserMatcher(HeaderMatcher):
"""
Matcher for the request's basic auth user
"""
def __init__(self, user, password):
value = "%s:%s" % (user, '' if not password else password)
self.key = 'authorization'
# expect an exact match
# therefore, not wrapping it in StringOrPattern
self.value = 'Basic %s' % base64.b64encode(value)
class BodyMatcher(Matcher):
"""
Matcher for the request body.
:param body: can either be a string or a :class:`_sre.SRE_Pattern`: object
"""
def __init__(self, body):
self.body = StringOrPattern(body)
def assert_request_matched(self, request):
assert request.body == self.body
| bsd-3-clause | 7,529,588,538,709,815,000 | 25.4625 | 78 | 0.629192 | false |
dougalsutherland/py-sdm | sdm/mp_utils.py | 1 | 5214 | '''
Some convenince methods for use with multiprocessing.Pool.
'''
from __future__ import division, print_function
from contextlib import contextmanager
import itertools
import multiprocessing as mp
import os
import random
import string
from .utils import strict_map, imap, izip
def _apply(func_args):
func, args = func_args
return func(*args)
### Dummy implementation of (some of) multiprocessing.Pool that doesn't even
### thread (unlike multiprocessing.dummy).
class ImmediateResult(object):
"Duck-type like multiprocessing.pool.MapResult."
def __init__(self, value):
self.value = value
def get(self, timeout=None):
return self.value
def wait(self, timeout=None):
pass
def ready(self):
return True
def successful(self):
return True
class DummyPool(object):
"Duck-type like multiprocessing.Pool, mostly."
def close(self):
pass
def join(self):
pass
def apply_async(self, func, args, kwds=None, callback=None):
val = func(*args, **(kwds or {}))
callback(val)
return ImmediateResult(val)
def map(self, func, args, chunksize=None):
return strict_map(func, args)
def imap(self, func, args, chunksize=None):
return imap(func, args)
def imap_unordered(self, func, args, chunksize=None):
return imap(func, args)
def patch_starmap(pool):
'''
A function that adds the equivalent of multiprocessing.Pool.starmap
to a given pool if it doesn't have the function.
'''
if hasattr(pool, 'starmap'):
return
def starmap(func, iterables):
return pool.map(_apply, izip(itertools.repeat(func), iterables))
pool.starmap = starmap
def make_pool(n_proc=None):
"Makes a multiprocessing.Pool or a DummyPool depending on n_proc."
pool = DummyPool() if n_proc == 1 else mp.Pool(n_proc)
patch_starmap(pool)
return pool
@contextmanager
def get_pool(n_proc=None):
"A context manager that opens a pool and joins it on exit."
pool = make_pool(n_proc)
yield pool
pool.close()
pool.join()
### A helper for letting the forked processes use data without pickling.
_data_name_cands = (
'_data_' + ''.join(random.sample(string.ascii_lowercase, 10))
for _ in itertools.count())
class ForkedData(object):
'''
Class used to pass data to child processes in multiprocessing without
really pickling/unpickling it. Only works on POSIX.
Intended use:
- The master process makes the data somehow, and does e.g.
data = ForkedData(the_value)
- The master makes sure to keep a reference to the ForkedData object
until the children are all done with it, since the global reference
is deleted to avoid memory leaks when the ForkedData object dies.
- Master process constructs a multiprocessing.Pool *after*
the ForkedData construction, so that the forked processes
inherit the new global.
- Master calls e.g. pool.map with data as an argument.
- Child gets the real value through data.value, and uses it read-only.
Modifying it won't crash, but changes won't be propagated back to the
master or to other processes, since it's copy-on-write.
'''
# TODO: more flexible garbage collection options
def __init__(self, val):
g = globals()
self.name = next(n for n in _data_name_cands if n not in g)
g[self.name] = val
self.master_pid = os.getpid()
def __getstate__(self):
if os.name != 'posix':
raise RuntimeError("ForkedData only works on OSes with fork()")
return self.__dict__
@property
def value(self):
return globals()[self.name]
def __del__(self):
if os.getpid() == self.master_pid:
del globals()[self.name]
### Progress-bar handling with multiprocessing pools
def progress(counter=True, **kwargs):
import progressbar as pb
try:
widgets = kwargs.pop('widgets')
except KeyError:
# TODO: make work when maxval is unknown
if counter:
class CommaProgress(pb.Widget):
def update(self, pbar):
return '{:,} of {:,}'.format(pbar.currval, pbar.maxval)
widgets = [' ', CommaProgress(), ' (', pb.Percentage(), ') ']
else:
widgets = [' ', pb.Percentage(), ' ']
widgets.extend([pb.Bar(), ' ', pb.ETA()])
return pb.ProgressBar(widgets=widgets, **kwargs)
def progressbar_and_updater(*args, **kwargs):
pbar = progress(*args, **kwargs).start()
counter = itertools.count(1)
def update_pbar():
pbar.update(next(counter))
# race conditions mean the pbar might be updated backwards on
# occasion, but the highest count ever seen will be right.
return pbar, update_pbar
def map_unordered_with_progressbar(pool, func, jobs):
pbar, tick_pbar = progressbar_and_updater(maxval=len(jobs))
callback = lambda result: tick_pbar()
results = [pool.apply_async(func, job, callback=callback) for job in jobs]
values = [r.get() for r in results]
pbar.finish()
return values
| bsd-3-clause | 6,525,598,132,934,458,000 | 29.138728 | 79 | 0.641158 | false |
DjangoQuilla/temii | votos/urls.py | 2 | 1862 | """votos URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import logout
from apps.votos.views import (
ListarEstadoView, ListarAgendadoView, ListarFinalizadoView,
RegistrarCharlaView, DetalleCharlaView, ListarFaqView
)
from apps.votos.views import VotoView, login
urlpatterns = [
url(r'^$', ListarEstadoView.as_view(), name='index'),
url(r'^agendado$', ListarAgendadoView.as_view(), name='agendado'),
url(r'^finalizado$', ListarFinalizadoView.as_view(), name='finalizado'),
url(r'^faq$', ListarFaqView.as_view(), name='faq'),
url(r'^admin/', include(admin.site.urls)),
url(r'^registrar_charla$',
RegistrarCharlaView.as_view(),
name='registrar_charla'),
url(r'^votar/(?P<charla>\d+)$', VotoView.as_view(), name='votar'),
url(r'^posible-charla/(?P<pk>\d+)$',
DetalleCharlaView.as_view(),
name='detalle_charla'),
# Python Social Auth URLs
# url('', include('social.apps.django_app.urls', namespace='social')),
url(r'^oauth/', include('social_django.urls', namespace='social')),
url(r'^login', login, name="login"),
url(r'^users/logout/$', logout,
{'next_page': '/'},
name="user-logout"),
]
| apache-2.0 | 9,124,189,688,081,066,000 | 37 | 77 | 0.668099 | false |
shingonoide/odoo_ezdoo | addons/website_maintenance/controllers/main.py | 1 | 2661 | import logging
from openerp.http import request
import werkzeug
from openerp.addons.web import http
from openerp.addons.website.controllers.main import Website
logger = logging.getLogger(__name__)
class WebsiteMaintenance(Website):
def is_maintenance_mode(self):
is_on = ['on', '1', 'true', 'yes']
maintenance_mode = request.registry['ir.config_parameter'].get_param(
request.cr, request.uid, 'website.maintenance_mode')
logger.debug("maintenance_mode value: %s" % (maintenance_mode))
if maintenance_mode in is_on:
logger.info("Maintenance mode on")
if not request.uid:
logger.info("Not uid, request auth public")
self._auth_method_public()
ir_model = request.env['ir.model.data'].sudo()
allowed_group = ir_model.get_object('base',
'group_website_designer')
if allowed_group in request.env.user.groups_id:
logger.info("Maintenance mode off for user_id: %s" %
(request.env.user.id))
return
code = 503
status_message = request.registry['ir.config_parameter'].get_param(
request.cr, request.uid, 'website.maintenance_message',
"We're maintenance now")
values = {
'status_message': status_message,
'status_code': code,
'company_email': request.env.user.company_id.email
}
logger.debug(values)
try:
html = request.website._render('website_maintenance.%s' %
code, values)
except Exception:
html = request.website._render('website.http_error', values)
return werkzeug.wrappers.Response(html, status=code,
content_type=
'text/html;charset=utf-8')
@http.route('/', type='http', auth="public", website=True)
def index(self, **kw):
is_maintenance_mode = self.is_maintenance_mode()
if not is_maintenance_mode:
return super(WebsiteMaintenance, self).index()
else:
return is_maintenance_mode
@http.route('/page/<page:page>', type='http', auth="public", website=True)
def page(self, page, **opts):
is_maintenance_mode = self.is_maintenance_mode()
if not is_maintenance_mode:
return super(WebsiteMaintenance, self).page(page)
else:
return is_maintenance_mode
| agpl-3.0 | -3,009,250,158,980,615,000 | 38.716418 | 79 | 0.550169 | false |
Belxjander/Kirito | SnowStorm/indra/fix-incredibuild.py | 1 | 2220 | #!/usr/bin/env python
##
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
import sys
import os
import glob
def delete_file_types(path, filetypes):
if os.path.exists(path):
print 'Cleaning: ' + path
orig_dir = os.getcwd();
os.chdir(path)
filelist = []
for type in filetypes:
filelist.extend(glob.glob(type))
for file in filelist:
os.remove(file)
os.chdir(orig_dir)
def main():
build_types = ['*.exp','*.exe','*.pdb','*.idb',
'*.ilk','*.lib','*.obj','*.ib_pdb_index']
pch_types = ['*.pch']
delete_file_types("build-vc80/newview/Release", build_types)
delete_file_types("build-vc80/newview/secondlife-bin.dir/Release/",
pch_types)
delete_file_types("build-vc80/newview/RelWithDebInfo", build_types)
delete_file_types("build-vc80/newview/secondlife-bin.dir/RelWithDebInfo/",
pch_types)
delete_file_types("build-vc80/newview/Debug", build_types)
delete_file_types("build-vc80/newview/secondlife-bin.dir/Debug/",
pch_types)
delete_file_types("build-vc80/test/RelWithDebInfo", build_types)
delete_file_types("build-vc80/test/test.dir/RelWithDebInfo/",
pch_types)
if __name__ == "__main__":
main()
| gpl-3.0 | 4,947,040,908,959,312,000 | 35.393443 | 81 | 0.651351 | false |
pratikgujjar/DeepIntent | code/autoencoder_model/scripts/ds_autoencoder.py | 1 | 15225 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import hickle as hkl
import numpy as np
np.random.seed(2 ** 10)
from keras import backend as K
K.set_image_dim_ordering('tf')
from keras.layers import Dropout
from keras.models import Sequential
from keras.layers.core import Activation
from keras.utils.vis_utils import plot_model
from keras.layers.wrappers import TimeDistributed
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import Conv2DTranspose
from keras.layers.convolutional import Conv3D
from keras.layers.convolutional import Conv3DTranspose
from keras.layers.convolutional_recurrent import ConvLSTM2D
from keras.layers.normalization import BatchNormalization
from keras.layers.core import Reshape
from keras.callbacks import LearningRateScheduler
from keras.layers.advanced_activations import LeakyReLU
from config_ds import *
import tb_callback
import lrs_callback
import argparse
import math
import os
import cv2
from sys import stdout
def encoder_model():
model = Sequential()
# 10x64x64
model.add(Conv3D(filters=256,
strides=(1, 2, 2),
kernel_size=(3, 11, 11),
padding='same',
input_shape=(int(VIDEO_LENGTH/2), 64, 64, 1)))
model.add(TimeDistributed(BatchNormalization()))
model.add(TimeDistributed(LeakyReLU(alpha=0.2)))
model.add(TimeDistributed(Dropout(0.5)))
# 10x32x32
# model.add(Conv3D(filters=128,
# strides=(1, 2, 2),
# kernel_size=(3, 5, 5),
# padding='same'))
# model.add(TimeDistributed(BatchNormalization()))
# model.add(TimeDistributed(LeakyReLU(alpha=0.2)))
# model.add(TimeDistributed(Dropout(0.5)))
# 10x32x32
model.add(Conv3D(filters=128,
strides=(1, 2, 2),
kernel_size=(3, 5, 5),
padding='same'))
model.add(TimeDistributed(BatchNormalization()))
model.add(TimeDistributed(LeakyReLU(alpha=0.2)))
model.add(TimeDistributed(Dropout(0.5)))
return model
def decoder_model():
model = Sequential()
# 10x32x32
model.add(Conv3DTranspose(filters=64,
kernel_size=(3, 5, 5),
padding='same',
strides=(1, 1, 1),
input_shape=(10, 16, 16, 128)))
model.add(TimeDistributed(BatchNormalization()))
# model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(LeakyReLU(alpha=0.2)))
model.add(TimeDistributed(Dropout(0.5)))
# 10x64x64
model.add(Conv3DTranspose(filters=128,
kernel_size=(3, 5, 5),
padding='same',
strides=(1, 2, 2)))
model.add(TimeDistributed(BatchNormalization()))
# model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(LeakyReLU(alpha=0.2)))
model.add(TimeDistributed(Dropout(0.5)))
# 10x64x64
model.add(Conv3DTranspose(filters=64,
kernel_size=(3, 5, 5),
padding='same',
strides=(1, 2, 2)))
model.add(TimeDistributed(BatchNormalization()))
# model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(LeakyReLU(alpha=0.2)))
model.add(TimeDistributed(Dropout(0.5)))
# 10x128x128
model.add(Conv3DTranspose(filters=1,
kernel_size=(3, 5, 5),
strides=(1, 1, 1),
padding='same'))
model.add(TimeDistributed(BatchNormalization()))
model.add(TimeDistributed(Activation('tanh')))
model.add(TimeDistributed(Dropout(0.5)))
return model
def set_trainability(model, trainable):
model.trainable = trainable
for layer in model.layers:
layer.trainable = trainable
def autoencoder_model(encoder, decoder):
model = Sequential()
model.add(encoder)
model.add(decoder)
return model
def combine_images(X, y, generated_images):
# Unroll all generated video frames
n_frames = generated_images.shape[0] * generated_images.shape[1]
frames = np.zeros((n_frames,) + generated_images.shape[2:], dtype=generated_images.dtype)
frame_index = 0
for i in range(generated_images.shape[0]):
for j in range(generated_images.shape[1]):
frames[frame_index] = generated_images[i, j]
frame_index += 1
num = frames.shape[0]
width = int(math.sqrt(num))
height = int(math.ceil(float(num) / width))
shape = frames.shape[1:]
image = np.zeros((height * shape[0], width * shape[1], shape[2]), dtype=generated_images.dtype)
for index, img in enumerate(frames):
i = int(index / width)
j = index % width
image[i * shape[0]:(i + 1) * shape[0], j * shape[1]:(j + 1) * shape[1], :] = img
n_frames = X.shape[0] * X.shape[1]
orig_frames = np.zeros((n_frames,) + X.shape[2:], dtype=X.dtype)
# Original frames
frame_index = 0
for i in range(X.shape[0]):
for j in range(X.shape[1]):
orig_frames[frame_index] = X[i, j]
frame_index += 1
num = orig_frames.shape[0]
width = int(math.sqrt(num))
height = int(math.ceil(float(num) / width))
shape = orig_frames.shape[1:]
orig_image = np.zeros((height * shape[0], width * shape[1], shape[2]), dtype=X.dtype)
for index, img in enumerate(orig_frames):
i = int(index / width)
j = index % width
orig_image[i * shape[0]:(i + 1) * shape[0], j * shape[1]:(j + 1) * shape[1], :] = img
# Ground truth
truth_frames = np.zeros((n_frames,) + y.shape[2:], dtype=y.dtype)
frame_index = 0
for i in range(y.shape[0]):
for j in range(y.shape[1]):
truth_frames[frame_index] = y[i, j]
frame_index += 1
num = truth_frames.shape[0]
width = int(math.sqrt(num))
height = int(math.ceil(float(num) / width))
shape = truth_frames.shape[1:]
truth_image = np.zeros((height * shape[0], width * shape[1], shape[2]), dtype=y.dtype)
for index, img in enumerate(truth_frames):
i = int(index / width)
j = index % width
truth_image[i * shape[0]:(i + 1) * shape[0], j * shape[1]:(j + 1) * shape[1], :] = img
return orig_image, truth_image, image
def load_weights(weights_file, model):
model.load_weights(weights_file)
def run_utilities(encoder, decoder, autoencoder, ENC_WEIGHTS, DEC_WEIGHTS):
if PRINT_MODEL_SUMMARY:
print (encoder.summary())
print (decoder.summary())
print (autoencoder.summary())
# exit(0)
# Save model to file
if SAVE_MODEL:
print ("Saving models to file...")
model_json = encoder.to_json()
with open(os.path.join(MODEL_DIR, "encoder.json"), "w") as json_file:
json_file.write(model_json)
model_json = decoder.to_json()
with open(os.path.join(MODEL_DIR, "decoder.json"), "w") as json_file:
json_file.write(model_json)
model_json = autoencoder.to_json()
with open(os.path.join(MODEL_DIR, "autoencoder.json"), "w") as json_file:
json_file.write(model_json)
if PLOT_MODEL:
plot_model(encoder, to_file=os.path.join(MODEL_DIR, 'encoder.png'), show_shapes=True)
plot_model(decoder, to_file=os.path.join(MODEL_DIR, 'decoder.png'), show_shapes=True)
plot_model(autoencoder, to_file=os.path.join(MODEL_DIR, 'autoencoder.png'), show_shapes=True)
if ENC_WEIGHTS != "None":
print ("Pre-loading encoder with weights...")
load_weights(ENC_WEIGHTS, encoder)
if DEC_WEIGHTS != "None":
print ("Pre-loading decoder with weights...")
load_weights(DEC_WEIGHTS, decoder)
def load_X(videos_list, index, data_dir):
X = np.zeros((BATCH_SIZE, VIDEO_LENGTH,) + IMG_SIZE)
for i in range(BATCH_SIZE):
for j in range(VIDEO_LENGTH):
filename = "frame_" + str(videos_list[(index*BATCH_SIZE + i), j]) + ".png"
im_file = os.path.join(data_dir, filename)
try:
frame = cv2.imread(im_file, cv2.IMREAD_COLOR)
X[i, j] = (frame.astype(np.float32) - 127.5) / 127.5
except AttributeError as e:
print (im_file)
print (e)
return X
def train(BATCH_SIZE, ENC_WEIGHTS, DEC_WEIGHTS):
print ("Loading data...")
mnist = np.load(os.path.join(DATA_DIR, 'mnist_test_seq.npy'))
mnist = np.expand_dims(mnist, axis=4)
# Build the Spatio-temporal Autoencoder
print ("Creating models...")
encoder = encoder_model()
decoder = decoder_model()
autoencoder = autoencoder_model(encoder, decoder)
run_utilities(encoder, decoder, autoencoder, ENC_WEIGHTS, DEC_WEIGHTS)
autoencoder.compile(loss='mean_squared_error', optimizer=OPTIM)
NB_ITERATIONS = int(mnist.shape[1]/BATCH_SIZE)
# Setup TensorBoard Callback
TC = tb_callback.TensorBoard(log_dir=TF_LOG_DIR, histogram_freq=0, write_graph=False, write_images=False)
# LRS = lrs_callback.LearningRateScheduler(schedule=schedule)
# LRS.set_model(autoencoder)
print ("Beginning Training...")
# Begin Training
for epoch in range(NB_EPOCHS):
print("\n\nEpoch ", epoch)
loss = []
# Set learning rate every epoch
# LRS.on_epoch_begin(epoch=epoch)
lr = K.get_value(autoencoder.optimizer.lr)
print ("Learning rate: " + str(lr))
for index in range(NB_ITERATIONS):
# Train Autoencoder
X_train = np.zeros(shape=(10, 10, 64, 64, 1))
y_train = np.zeros(shape=(10, 10, 64, 64, 1))
for i in range(BATCH_SIZE):
X_train[i] = mnist[0 : int(VIDEO_LENGTH/2), index+i]
y_train[i] = mnist[int(VIDEO_LENGTH/2), index+i]
X_train = (X_train.astype(np.float32) - 127.5) / 127.5
y_train = (y_train.astype(np.float32) - 127.5) / 127.5
loss.append(autoencoder.train_on_batch(X_train, y_train))
arrow = int(index / (NB_ITERATIONS / 40))
stdout.write("\rIteration: " + str(index) + "/" + str(NB_ITERATIONS-1) + " " +
"loss: " + str(loss[len(loss)-1]) +
"\t [" + "{0}>".format("="*(arrow)))
stdout.flush()
if SAVE_GENERATED_IMAGES:
# Save generated images to file
predicted_images = autoencoder.predict(X_train, verbose=0)
orig_image, truth_image, pred_image = combine_images(X_train, y_train, predicted_images)
pred_image = pred_image * 127.5 + 127.5
orig_image = orig_image * 127.5 + 127.5
truth_image = truth_image * 127.5 + 127.5
if epoch == 0 :
cv2.imwrite(os.path.join(GEN_IMAGES_DIR, str(epoch) + "_" + str(index) + "_orig.png"), orig_image)
cv2.imwrite(os.path.join(GEN_IMAGES_DIR, str(epoch) + "_" + str(index) + "_truth.png"), truth_image)
cv2.imwrite(os.path.join(GEN_IMAGES_DIR, str(epoch) + "_" + str(index) + ".png"), pred_image)
# then after each epoch/iteration
avg_loss = sum(loss)/len(loss)
logs = {'loss': avg_loss}
TC.on_epoch_end(epoch, logs)
# Log the losses
with open(os.path.join(LOG_DIR, 'losses.json'), 'a') as log_file:
log_file.write("{\"epoch\":%d, \"d_loss\":%f};\n" % (epoch, avg_loss))
print("\nAvg loss: " + str(avg_loss))
# Save model weights per epoch to file
encoder.save_weights(os.path.join(CHECKPOINT_DIR, 'encoder_epoch_'+str(epoch)+'.h5'), True)
decoder.save_weights(os.path.join(CHECKPOINT_DIR, 'decoder_epoch_' + str(epoch) + '.h5'), True)
# End TensorBoard Callback
TC.on_train_end('_')
def test(ENC_WEIGHTS, DEC_WEIGHTS):
# Create models
print ("Creating models...")
encoder = encoder_model()
decoder = decoder_model()
autoencoder = autoencoder_model(encoder, decoder)
run_utilities(encoder, decoder, autoencoder, ENC_WEIGHTS, DEC_WEIGHTS)
autoencoder.compile(loss='mean_squared_error', optimizer=OPTIM)
# Build video progressions
frames_source = hkl.load(os.path.join(TEST_DATA_DIR, 'sources_test_128.hkl'))
videos_list = []
start_frame_index = 1
end_frame_index = VIDEO_LENGTH + 1
while (end_frame_index <= len(frames_source)):
frame_list = frames_source[start_frame_index:end_frame_index]
if (len(set(frame_list)) == 1):
videos_list.append(range(start_frame_index, end_frame_index))
start_frame_index = start_frame_index + VIDEO_LENGTH
end_frame_index = end_frame_index + VIDEO_LENGTH
else:
start_frame_index = end_frame_index - 1
end_frame_index = start_frame_index + VIDEO_LENGTH
videos_list = np.asarray(videos_list, dtype=np.int32)
n_videos = videos_list.shape[0]
# Test model by making predictions
loss = []
NB_ITERATIONS = int(n_videos / BATCH_SIZE)
for index in range(NB_ITERATIONS):
# Test Autoencoder
X = load_X(videos_list, index, TEST_DATA_DIR)
X_test = X[:, 0: int(VIDEO_LENGTH / 2)]
y_test = X[:, int(VIDEO_LENGTH / 2):]
loss.append(autoencoder.test_on_batch(X_test, y_test))
y_pred = autoencoder.predict_on_batch(X_test)
arrow = int(index / (NB_ITERATIONS / 40))
stdout.write("\rIteration: " + str(index) + "/" + str(NB_ITERATIONS - 1) + " " +
"loss: " + str(loss[len(loss) - 1]) +
"\t [" + "{0}>".format("=" * (arrow)))
stdout.flush()
orig_image, truth_image, pred_image = combine_images(X_test, y_test, y_pred)
pred_image = pred_image * 127.5 + 127.5
orig_image = orig_image * 127.5 + 127.5
truth_image = truth_image * 127.5 + 127.5
cv2.imwrite(os.path.join(TEST_RESULTS_DIR, str(index) + "_orig.png"), orig_image)
cv2.imwrite(os.path.join(TEST_RESULTS_DIR, str(index) + "_truth.png"), truth_image)
cv2.imwrite(os.path.join(TEST_RESULTS_DIR, str(index) + "_pred.png"), pred_image)
avg_loss = sum(loss) / len(loss)
print("\nAvg loss: " + str(avg_loss))
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--mode", type=str)
parser.add_argument("--enc_weights", type=str, default="None")
parser.add_argument("--dec_weights", type=str, default="None")
parser.add_argument("--batch_size", type=int, default=BATCH_SIZE)
parser.add_argument("--nice", dest="nice", action="store_true")
parser.set_defaults(nice=False)
args = parser.parse_args()
return args
if __name__ == "__main__":
args = get_args()
if args.mode == "train":
train(BATCH_SIZE=args.batch_size,
ENC_WEIGHTS=args.enc_weights,
DEC_WEIGHTS=args.dec_weights)
if args.mode == "test":
test(ENC_WEIGHTS=args.enc_weights,
DEC_WEIGHTS=args.dec_weights) | mit | 2,274,764,787,847,387,000 | 36.227384 | 116 | 0.59711 | false |
OCA/l10n-brazil | l10n_br_nfe/models/res_company.py | 1 | 3081 | # Copyright 2019 Akretion (Raphaël Valyi <[email protected]>)
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, fields
from odoo.addons.spec_driven_model.models import spec_models
from ..constants.nfe import (
NFE_ENVIRONMENT_DEFAULT,
NFE_ENVIRONMENTS,
NFE_VERSION_DEFAULT,
NFE_VERSIONS,
)
PROCESSADOR_ERPBRASIL_EDOC = "oca"
PROCESSADOR = [(PROCESSADOR_ERPBRASIL_EDOC, "erpbrasil.edoc")]
class ResCompany(spec_models.SpecModel):
_name = "res.company"
_inherit = ["res.company", "nfe.40.emit"]
_nfe_search_keys = ["nfe40_CNPJ", "nfe40_xNome", "nfe40_xFant"]
def _compute_nfe_data(self):
# compute because a simple related field makes the match_record fail
for rec in self:
if rec.partner_id.is_company:
rec.nfe40_choice6 = "nfe40_CNPJ"
rec.nfe40_CNPJ = rec.partner_id.cnpj_cpf
else:
rec.nfe40_choice6 = "nfe40_CPF"
rec.nfe40_CPF = rec.partner_id.cnpj_cpf
nfe40_CNPJ = fields.Char(compute="_compute_nfe_data")
nfe40_xNome = fields.Char(related="partner_id.legal_name")
nfe40_xFant = fields.Char(related="partner_id.name")
nfe40_IE = fields.Char(related="partner_id.inscr_est")
nfe40_CRT = fields.Selection(related="tax_framework")
nfe40_enderEmit = fields.Many2one("res.partner", related="partner_id")
nfe40_choice6 = fields.Selection(string="CNPJ ou CPF?", compute="_compute_nfe_data")
processador_edoc = fields.Selection(
selection_add=PROCESSADOR,
)
nfe_version = fields.Selection(
selection=NFE_VERSIONS,
string="NFe Version",
default=NFE_VERSION_DEFAULT,
)
nfe_environment = fields.Selection(
selection=NFE_ENVIRONMENTS,
string="NFe Environment",
default=NFE_ENVIRONMENT_DEFAULT,
)
nfe_default_serie_id = fields.Many2one(
comodel_name="l10n_br_fiscal.document.serie",
string="NF-e Default Serie",
)
def _build_attr(self, node, fields, vals, path, attr):
if attr.get_name() == "enderEmit" and self.env.context.get("edoc_type") == "in":
# we don't want to try build a related partner_id for enderEmit
# when importing an NFe
# instead later the emit tag will be imported as the
# document partner_id (dest) and the enderEmit data will be
# injected in the same res.partner record.
return
return super()._build_attr(node, fields, vals, path, attr)
@api.model
def _prepare_import_dict(self, values, model=None):
# we disable enderEmit related creation with dry_run=True
context = self._context.copy()
context["dry_run"] = True
values = super(ResCompany, self.with_context(context))._prepare_import_dict(
values, model
)
if not values.get("name"):
values["name"] = values.get("nfe40_xNome") or values.get("nfe40_xFant")
return values
| agpl-3.0 | -1,786,924,351,013,670,700 | 35.235294 | 88 | 0.63961 | false |
google-research/language | language/boolq/utils/best_checkpoint_exporter.py | 1 | 4132 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exporter to save the best checkpoint."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow.compat.v1 as tf
class BestCheckpointExporter(tf.estimator.Exporter):
"""Exporter that saves the model's best checkpoint.
We use this over `tf.estimator.BestExporter` since we don't want to
rely on tensorflow's `SavedModel` exporter method.
"""
def __init__(self, compare_fn, name='best-checkpoint',
event_file_pattern='eval/*.tfevents.*'):
"""Construct the exporter.
Args:
compare_fn: Function that, given the dictionary of output
metrics of the previously best and current checkpoints,
returns whether to override the previously best checkpoint
with the current one.
name: Name of the exporter
event_file_pattern: where to look for events logs
Raises:
ValueError: if given incorrect arguments
"""
self._name = name
self._compare_fn = compare_fn
if self._compare_fn is None:
raise ValueError('`compare_fn` must not be None.')
self._event_file_pattern = event_file_pattern
self._model_dir = None
self._best_eval_result = None
@property
def name(self):
return self._name
def export(self, estimator, export_path, checkpoint_path,
eval_result, is_the_final_export):
del is_the_final_export
if self._model_dir != estimator.model_dir and self._event_file_pattern:
tf.logging.info('Loading best metric from event files.')
self._model_dir = estimator.model_dir
full_event_file_pattern = os.path.join(self._model_dir,
self._event_file_pattern)
self._best_eval_result = self._get_best_eval_result(
full_event_file_pattern)
if self._best_eval_result is None or self._compare_fn(
best_eval_result=self._best_eval_result,
current_eval_result=eval_result):
tf.logging.info('Performing best checkpoint export.')
self._best_eval_result = eval_result
if not tf.gfile.Exists(export_path):
tf.gfile.MakeDirs(export_path)
new_files = set()
for file_path in tf.gfile.Glob(checkpoint_path + '.*'):
basename = os.path.basename(file_path)
new_files.add(basename)
out_file = os.path.join(export_path, basename)
tf.gfile.Copy(file_path, out_file)
# Clean out any old files
for filename in tf.gfile.ListDirectory(export_path):
if filename not in new_files:
tf.gfile.Remove(os.path.join(export_path, filename))
def _get_best_eval_result(self, event_files):
"""Get the best eval result from event files.
Args:
event_files: Absolute pattern of event files.
Returns:
The best eval result.
"""
if not event_files:
return None
best_eval_result = None
for event_file in tf.gfile.Glob(os.path.join(event_files)):
for event in tf.train.summary_iterator(event_file):
if event.HasField('summary'):
event_eval_result = {}
for value in event.summary.value:
if value.HasField('simple_value'):
event_eval_result[value.tag] = value.simple_value
if event_eval_result:
if best_eval_result is None or self._compare_fn(
best_eval_result, event_eval_result):
best_eval_result = event_eval_result
return best_eval_result
| apache-2.0 | -1,810,089,931,885,031,700 | 34.316239 | 76 | 0.659003 | false |
cadencewatches/frappe | frappe/utils/email_lib/bulk.py | 1 | 4862 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import HTMLParser
import urllib
from frappe import msgprint, throw, _
from frappe.utils.email_lib.smtp import SMTPServer
from frappe.utils.email_lib.email_body import get_email, get_formatted_html
from frappe.utils.email_lib.html2text import html2text
from frappe.utils import cint, get_url, nowdate
class BulkLimitCrossedError(frappe.ValidationError): pass
def send(recipients=None, sender=None, doctype='User', email_field='email',
subject='[No Subject]', message='[No Content]', ref_doctype=None, ref_docname=None,
add_unsubscribe_link=True):
def is_unsubscribed(rdata):
if not rdata:
return 1
return cint(rdata.unsubscribed)
def check_bulk_limit(new_mails):
this_month = frappe.db.sql("""select count(*) from `tabBulk Email` where
month(creation)=month(%s)""" % nowdate())[0][0]
monthly_bulk_mail_limit = frappe.conf.get('monthly_bulk_mail_limit') or 500
if this_month + len(recipients) > monthly_bulk_mail_limit:
throw(_("Bulk email limit {0} crossed").format(monthly_bulk_mail_limit),
BulkLimitCrossedError)
def update_message(formatted, doc, add_unsubscribe_link):
updated = formatted
if add_unsubscribe_link:
unsubscribe_link = """<div style="padding: 7px; border-top: 1px solid #aaa;
margin-top: 17px;">
<small><a href="%s/?%s">
Unsubscribe</a> from this list.</small></div>""" % (get_url(),
urllib.urlencode({
"cmd": "frappe.utils.email_lib.bulk.unsubscribe",
"email": doc.get(email_field),
"type": doctype,
"email_field": email_field
}))
updated = updated.replace("<!--unsubscribe link here-->", unsubscribe_link)
return updated
if not recipients: recipients = []
if not sender or sender == "Administrator":
sender = frappe.db.get_value('Outgoing Email Settings', None, 'auto_email_id')
check_bulk_limit(len(recipients))
formatted = get_formatted_html(subject, message)
for r in filter(None, list(set(recipients))):
rdata = frappe.db.sql("""select * from `tab%s` where %s=%s""" % (doctype,
email_field, '%s'), (r,), as_dict=1)
doc = rdata and rdata[0] or {}
if (not add_unsubscribe_link) or (not is_unsubscribed(doc)):
# add to queue
updated = update_message(formatted, doc, add_unsubscribe_link)
try:
text_content = html2text(updated)
except HTMLParser.HTMLParseError:
text_content = "[See html attachment]"
add(r, sender, subject, updated, text_content, ref_doctype, ref_docname)
def add(email, sender, subject, formatted, text_content=None,
ref_doctype=None, ref_docname=None):
"""add to bulk mail queue"""
e = frappe.new_doc('Bulk Email')
e.sender = sender
e.recipient = email
try:
e.message = get_email(email, sender=e.sender, formatted=formatted, subject=subject,
text_content=text_content).as_string()
except frappe.ValidationError:
# bad email id - don't add to queue
return
e.status = 'Not Sent'
e.ref_doctype = ref_doctype
e.ref_docname = ref_docname
e.save(ignore_permissions=True)
@frappe.whitelist(allow_guest=True)
def unsubscribe():
doctype = frappe.form_dict.get('type')
field = frappe.form_dict.get('email_field')
email = frappe.form_dict.get('email')
frappe.db.sql("""update `tab%s` set unsubscribed=1
where `%s`=%s""" % (doctype, field, '%s'), (email,))
if not frappe.form_dict.get("from_test"):
frappe.db.commit()
frappe.local.message_title = "Unsubscribe"
frappe.local.message = "<h3>Unsubscribed</h3><p>%s has been successfully unsubscribed.</p>" % email
frappe.response['type'] = 'page'
frappe.response['page_name'] = 'message.html'
def flush(from_test=False):
"""flush email queue, every time: called from scheduler"""
smtpserver = SMTPServer()
auto_commit = not from_test
if frappe.flags.mute_emails or frappe.conf.get("mute_emails") or False:
msgprint(_("Emails are muted"))
from_test = True
for i in xrange(500):
email = frappe.db.sql("""select * from `tabBulk Email` where
status='Not Sent' limit 1 for update""", as_dict=1)
if email:
email = email[0]
else:
break
frappe.db.sql("""update `tabBulk Email` set status='Sending' where name=%s""",
(email["name"],), auto_commit=auto_commit)
try:
if not from_test:
smtpserver.sess.sendmail(email["sender"], email["recipient"], email["message"])
frappe.db.sql("""update `tabBulk Email` set status='Sent' where name=%s""",
(email["name"],), auto_commit=auto_commit)
except Exception, e:
frappe.db.sql("""update `tabBulk Email` set status='Error', error=%s
where name=%s""", (unicode(e), email["name"]), auto_commit=auto_commit)
def clear_outbox():
"""remove mails older than 30 days in Outbox"""
frappe.db.sql("""delete from `tabBulk Email` where
datediff(now(), creation) > 30""")
| mit | -8,974,642,157,343,609,000 | 32.531034 | 100 | 0.693542 | false |
hzmangel/wp2hugo | wp_parser.py | 1 | 3688 | from lxml import etree
class WordpressXMLParser:
def __init__(self, xml_file):
self.tree = etree.parse(xml_file)
self.ns = self.tree.getroot().nsmap
def get_meta(self):
return {
"title": str(self.tree.xpath("/rss/channel/title/text()")[0]),
"baseurl": str(self.tree.xpath("/rss/channel/link/text()")[0]),
"description": str(self.tree.xpath("/rss/channel/description/text()")[0]),
"language": str(self.tree.xpath("/rss/channel/language/text()")[0]),
"author": {
"name": str(self.tree.xpath("/rss/channel/wp:author/wp:author_display_name/text()", namespaces=self.ns)[0]),
"email": str(self.tree.xpath("/rss/channel/wp:author/wp:author_email/text()", namespaces=self.ns)[0]),
}
}
def get_categories(self):
categories = self.tree.xpath('/rss/channel/wp:category', namespaces=self.ns)
rslt = []
for r in categories:
rslt.append({
"term_id": str(r.xpath("wp:term_id/text()", namespaces=self.ns)[0]),
"nicename": str(r.xpath("wp:category_nicename/text()", namespaces=self.ns)[0]),
"name": str(r.xpath("wp:cat_name/text()", namespaces=self.ns)[0]),
})
return rslt
def get_tags(self):
tags = self.tree.xpath('/rss/channel/wp:tag', namespaces=self.ns)
rslt = []
for r in tags:
rslt.append({
"term_id": str(r.xpath("wp:term_id/text()", namespaces=self.ns)[0]),
"nicename": str(r.xpath("wp:tag_slug/text()", namespaces=self.ns)[0]),
"name": str(r.xpath("wp:tag_name/text()", namespaces=self.ns)[0]),
})
return rslt
def get_public_posts(self):
posts = self.tree.xpath("/rss/channel/item[wp:post_type='post' and wp:status!='draft']", namespaces=self.ns)
rslt = []
for r in posts:
rslt.append({
"title": str(r.xpath("title/text()")[0]),
"link": str(r.xpath("link/text()")[0]),
"creator": str(r.xpath("dc:creator/text()", namespaces=self.ns)[0]),
"content": str(r.xpath("content:encoded/text()", namespaces=self.ns)[0]),
"post_date": str(r.xpath("wp:post_date/text()", namespaces=self.ns)[0]),
"post_name": str(r.xpath("wp:post_name/text()", namespaces=self.ns)[0]),
"post_status": str(r.xpath("wp:status/text()", namespaces=self.ns)[0]),
"categories": [str(foo) for foo in r.xpath("category[@domain='category']/text()")],
"tags": [str(foo) for foo in r.xpath("category[@domain='post_tag']/text()")],
})
return rslt
def get_drafts(self):
drafts = self.tree.xpath("/rss/channel/item[wp:post_type='post' and wp:status='draft']", namespaces=self.ns)
rslt = []
for r in drafts:
rslt.append({
"title": str(r.xpath("title/text()")[0]),
"link": str(r.xpath("link/text()")[0]),
"creator": str(r.xpath("dc:creator/text()", namespaces=self.ns)[0]),
"content": str(r.xpath("content:encoded/text()", namespaces=self.ns)[0]),
"post_date": str(r.xpath("wp:post_date/text()", namespaces=self.ns)[0]),
"post_status": str(r.xpath("wp:status/text()", namespaces=self.ns)[0]),
"categories": [str(foo) for foo in r.xpath("category[@domain='category']/text()")],
"tags": [str(foo) for foo in r.xpath("category[@domain='post_tag']/text()")],
})
return rslt
| mit | 743,162,522,983,472,900 | 42.904762 | 124 | 0.532538 | false |
talon-one/talon_one.py | test/test_new_invite_email.py | 1 | 2004 | # coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.new_invite_email import NewInviteEmail # noqa: E501
from talon_one.rest import ApiException
class TestNewInviteEmail(unittest.TestCase):
"""NewInviteEmail unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test NewInviteEmail
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.new_invite_email.NewInviteEmail() # noqa: E501
if include_optional :
return NewInviteEmail(
email = '0',
token = '0'
)
else :
return NewInviteEmail(
email = '0',
token = '0',
)
def testNewInviteEmail(self):
"""Test NewInviteEmail"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| mit | -321,614,309,028,066,400 | 35.436364 | 647 | 0.662176 | false |
jfunez/poliwall | poliwall/apps/polisessions/migrations/0002_auto__add_actioncategory__add_field_action_category.py | 1 | 5819 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ActionCategory'
db.create_table(u'polisessions_actioncategory', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
))
db.send_create_signal(u'polisessions', ['ActionCategory'])
# Adding field 'Action.category'
db.add_column(u'polisessions_action', 'category',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['polisessions.ActionCategory'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'ActionCategory'
db.delete_table(u'polisessions_actioncategory')
# Deleting field 'Action.category'
db.delete_column(u'polisessions_action', 'category_id')
models = {
u'polidata.house': {
'Meta': {'object_name': 'House'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'rol_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'polidata.legislative': {
'Meta': {'object_name': 'Legislative'},
'code': ('django.db.models.fields.IntegerField', [], {}),
'end_date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'roman_code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'start_date': ('django.db.models.fields.DateField', [], {})
},
u'polidata.politician': {
'Meta': {'object_name': 'Politician'},
'biography': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'politician_id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'profile_id': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sex': ('django.db.models.fields.CharField', [], {'default': "'M'", 'max_length': '1', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'twitter_user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'polisessions.action': {
'Meta': {'ordering': "['session']", 'object_name': 'Action'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['polisessions.ActionCategory']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legislative': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'legislative_actions'", 'to': u"orm['polidata.Legislative']"}),
'politician': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actions'", 'to': u"orm['polidata.Politician']"}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['polisessions.Session']"}),
'source_url': ('django.db.models.fields.TextField', [], {}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'polisessions.actioncategory': {
'Meta': {'object_name': 'ActionCategory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'polisessions.session': {
'Meta': {'ordering': "['date', 'ordinal']", 'object_name': 'Session'},
'assists_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'house': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'session_houses'", 'to': u"orm['polidata.House']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legislative': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'session_legislatives'", 'to': u"orm['polidata.Legislative']"}),
'number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'ordinal': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'president': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'presidents'", 'null': 'True', 'to': u"orm['polidata.Politician']"}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'source_url': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['polisessions'] | agpl-3.0 | 238,009,160,051,770,620 | 62.26087 | 180 | 0.558687 | false |
rlworkgroup/metaworld | metaworld/policies/sawyer_sweep_into_v2_policy.py | 1 | 1506 | import numpy as np
from metaworld.policies.action import Action
from metaworld.policies.policy import Policy, assert_fully_parsed, move
class SawyerSweepIntoV2Policy(Policy):
@staticmethod
@assert_fully_parsed
def _parse_obs(obs):
return {
'hand_pos': obs[:3],
'unused_1': obs[3],
'cube_pos': obs[4:7],
'unused_2': obs[7:-3],
'goal_pos': obs[-3:],
}
def get_action(self, obs):
o_d = self._parse_obs(obs)
action = Action({
'delta_pos': np.arange(3),
'grab_effort': 3
})
action['delta_pos'] = move(o_d['hand_pos'], to_xyz=self._desired_pos(o_d), p=25.)
action['grab_effort'] = self._grab_effort(o_d)
return action.array
@staticmethod
def _desired_pos(o_d):
pos_curr = o_d['hand_pos']
pos_cube = o_d['cube_pos'] + np.array([-0.005, .0, .01])
pos_goal = o_d['goal_pos']
if np.linalg.norm(pos_curr[:2] - pos_cube[:2]) > 0.04:
return pos_cube + np.array([0., 0., 0.3])
elif abs(pos_curr[2] - pos_cube[2]) > 0.04:
return pos_cube
else:
return pos_goal
@staticmethod
def _grab_effort(o_d):
pos_curr = o_d['hand_pos']
pos_cube = o_d['cube_pos']
if np.linalg.norm(pos_curr[:2] - pos_cube[:2]) > 0.04 \
or abs(pos_curr[2] - pos_cube[2]) > 0.15:
return -1.
else:
return .7
| mit | 1,833,870,968,265,588,700 | 26.381818 | 89 | 0.508632 | false |
grembo/ice | python/test/Ice/ami/TestI.py | 1 | 3180 | # **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import Ice, Test, threading, time
class TestIntfI(Test.TestIntf):
def __init__(self):
self._cond = threading.Condition()
self._batchCount = 0
self._pending = None
self._shutdown = False
def op(self, current=None):
pass
def opWithResult(self, current=None):
return 15
def opWithUE(self, current=None):
raise Test.TestIntfException()
def opWithPayload(self, bytes, current=None):
pass
def opBatch(self, current=None):
with self._cond:
self._batchCount += 1
self._cond.notify()
def opBatchCount(self, current=None):
with self._cond:
return self._batchCount
def waitForBatch(self, count, current=None):
with self._cond:
while self._batchCount < count:
self._cond.wait(5)
result = count == self._batchCount
self._batchCount = 0
return result
def close(self, mode, current=None):
current.con.close(Ice.ConnectionClose.valueOf(mode.value))
def sleep(self, ms, current=None):
time.sleep(ms / 1000.0)
def startDispatch(self, current=None):
with self._cond:
if self._shutdown:
# Ignore, this can occur with the forcefull connection close test, shutdown can be dispatch
# before start dispatch.
v = Ice.Future()
v.set_result(None)
return v
elif self._pending:
self._pending.set_result(None)
self._pending = Ice.Future()
return self._pending
def finishDispatch(self, current=None):
with self._cond:
if self._shutdown:
return
elif self._pending: # Pending might not be set yet if startDispatch is dispatch out-of-order
self._pending.set_result(None)
self._pending = None
def shutdown(self, current=None):
with self._cond:
self._shutdown = True
if self._pending:
self._pending.set_result(None)
self._pending = None
current.adapter.getCommunicator().shutdown()
def supportsAMD(self, current=None):
return True
def supportsFunctionalTests(self, current=None):
return False
def pingBiDir(self, id, current = None):
Test.PingReplyPrx.uncheckedCast(current.con.createProxy(id)).reply()
class TestIntfII(Test.Outer.Inner.TestIntf):
def op(self, i, current):
return (i, i)
class TestIntfControllerI(Test.TestIntfController):
def __init__(self, adapter):
self._adapter = adapter
def holdAdapter(self, current=None):
self._adapter.hold()
def resumeAdapter(self, current=None):
self._adapter.activate()
| gpl-2.0 | -1,354,653,715,512,082,000 | 29.576923 | 107 | 0.566038 | false |
DLR-SC/DataFinder | src/datafinder/gui/admin/datastore_configuration_wizard/s3/authentication_option_controller.py | 1 | 3646 | # $Filename$
# $Authors$
# Last Changed: $Date$ $Committer$ $Revision-Id$
#
# Copyright (c) 2003-2011, German Aerospace Center (DLR)
# All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#
#modification, are permitted provided that the following conditions are
#met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the German Aerospace Center nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
#A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
#OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
#SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
#LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
#DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
#THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Implements the functionality of the s3 authentication option page.
"""
from qt import SIGNAL
from datafinder.gui.admin.datastore_configuration_wizard.abstract_option_controller import AbstractOptionController
__version__ = "$Revision-Id:$"
class AuthenticationOptionController(AbstractOptionController):
""" Handles the authentication options of the ExternalWebDAV DataStore. """
def __init__(self, wizardView, wizardController, pageType):
"""
@see L{AbstractOptionController <datafinder.gui.
DFDataStoreConfigurationWizard.AbstractOptionController.__init__>}
"""
AbstractOptionController.__init__(self, wizardView, wizardController, pageType)
self.wizardView.connect(self.wizardView.userLineEdit, SIGNAL("textChanged(const QString&)"), self._userTextChanged)
self.wizardView.connect(self.wizardView.passwordLineEdit, SIGNAL("textChanged(const QString&)"), self._passwordTextChanged)
def showModelPart(self):
"""
@see L{AbstractOptionController <datafinder.gui.
DFDataStoreConfigurationWizard.AbstractOptionController.showModelPart>}
"""
self.wizardView.authenticationOptionWidgetStack.raiseWidget(1)
self.wizardView.userLineEdit.setText(self.wizardController.datastore.username or " ...public key ...")
self.wizardView.passwordLineEdit.setText(self.wizardController.datastore.password or " ...private key ...")
def _userTextChanged(self, username):
""" Set and validate the username. """
self.setDatastoreProperty("username", unicode(username), self.wizardView.userLineEdit)
def _passwordTextChanged(self, password):
""" Set and validate the password. """
self.setDatastoreProperty("password", unicode(password), self.wizardView.passwordLineEdit)
| bsd-3-clause | 8,393,450,673,255,310,000 | 41.951807 | 133 | 0.718047 | false |
whitelizard/pytiip | pytiip/tiip.py | 1 | 13043 | """
Python implementation of the TIIP (Thin Industrial Internet Protocol) protocol.
"""
import json
from datetime import datetime as dt
from datetime import timedelta as td
import dateutil.parser as parser
# Python3 compability fixes
import sys
PY3 = sys.version_info > (3,)
if PY3:
long = int
unicode = str
else:
# noinspection PyShadowingBuiltins
bytes = str
__version__ = 'tiip.3.0' # TIIP protocol version
class TIIPMessage(object):
# noinspection PyShadowingBuiltins
def __init__(
self, tiipStr=None, tiipDict=None, ts=None, lat=None, mid=None, sid=None, type=None,
src=None, targ=None, sig=None, ch=None, arg=None, pl=None, ok=None,
ten=None, verifyVersion=True):
"""
@param tiipStr: A string representation of a TIIPMessage to load on init
@param tiipDict: A dictionary representation of a TIIPMessage to load on init
@raise: TypeError, ValueError
All other arguments are keys to set in the TIIPMessage, see TIIP specification for more details:
https://github.com/whitelizard/tiip
"""
# Protocol keys
self.__pv = __version__
self.__ts = self.getTimeStamp()
self.__lat = None
self.__mid = None
self.__sid = None
self.__type = None
self.__src = None
self.__targ = None
self.__sig = None
self.__ch = None
self.__arg = None
self.__pl = None
self.__ok = None
self.__ten = None
# Parse constructor arguments
if tiipStr is not None:
self.loadFromStr(tiipStr, verifyVersion)
if tiipDict is not None:
self.loadFromDict(tiipDict, verifyVersion)
if ts is not None:
self.ts = ts
if lat is not None:
self.lat = lat
if mid is not None:
self.mid = mid
if sid is not None:
self.sid = sid
if type is not None:
self.type = type
if src is not None:
self.src = src
if targ is not None:
self.targ = targ
if sig is not None:
self.sig = sig
if ch is not None:
self.ch = ch
if arg is not None:
self.arg = arg
if pl is not None:
self.pl = pl
if ok is not None:
self.ok = ok
if ten is not None:
self.ten = ten
def __str__(self):
return json.dumps(dict(self))
def __iter__(self):
yield 'pv', self.__pv
yield 'ts', self.__ts
if self.__lat is not None:
yield 'lat', self.__lat
if self.__mid is not None:
yield 'mid', self.__mid
if self.__sid is not None:
yield 'sid', self.__sid
if self.__type is not None:
yield 'type', self.__type
if self.__src is not None:
yield 'src', self.__src
if self.__targ is not None:
yield 'targ', self.__targ
if self.__sig is not None:
yield 'sig', self.__sig
if self.__ch is not None:
yield 'ch', self.__ch
if self.__arg is not None:
yield 'arg', self.__arg
if self.__pl is not None:
yield 'pl', self.__pl
if self.__ok is not None:
yield 'ok', self.__ok
if self.__ten is not None:
yield 'ten', self.__ten
@staticmethod
def getTimeStamp():
"""
Creates a timestamp string representation according to the TIIP-specification for timestamps.
@return:
"""
return dt.utcnow().isoformat(timespec='microseconds') + 'Z'
@property
def pv(self):
return self.__pv
@property
def ts(self):
return self.__ts
@ts.setter
def ts(self, value):
if isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
try:
dateObj = parser.parse(value)
except ValueError:
raise ValueError('timestamp string must be parseable to datetime')
if dateObj.utcoffset() not in [None, td(0)]:
raise ValueError('timestamp string must be in utc timezone')
if value[-1] != 'Z' or value[19] != '.':
raise ValueError('seconds must be decimals and end with Z')
self.__ts = value
elif isinstance(value, dt):
if value.utcoffset() not in [None, td(0)]:
raise ValueError('timestamp string must be in utc timezone')
iso = value.isoformat(timespec='microseconds')
if iso.endswith("+00:00"):
iso = iso[:-6]
self.__ts = iso + 'Z'
else:
raise TypeError('timestamp can only be of types datetime or a valid unicode or string representation of a iso 6801')
@property
def lat(self):
return self.__lat
@lat.setter
def lat(self, value):
if value is None:
self.__lat = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
try:
float(value) # Check if string is float representation
except ValueError:
raise ValueError('Latency string must be parseable to float')
else:
self.__lat = value
elif isinstance(value, (int, float, long)):
self.__lat = repr(round(value, 6))
else:
raise TypeError('Latency can only be of types None, float, int, long or a valid unicode or string representation of a float')
@property
def mid(self):
return self.__mid
@mid.setter
def mid(self, value):
if value is None:
self.__mid = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__mid = value
else:
raise TypeError('mid can only be of types unicode, str or None')
@property
def sid(self):
return self.__sid
@sid.setter
def sid(self, value):
if value is None:
self.__sid = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__sid = value
else:
raise TypeError('sid can only be of types unicode, str or None')
@property
def type(self):
return self.__type
@type.setter
def type(self, value):
if value is None:
self.__type = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__type = value
else:
raise TypeError('type can only be of types unicode, str or None')
@property
def src(self):
return self.__src
@src.setter
def src(self, value):
if value is None:
self.__src = None
elif isinstance(value, list):
self.__src = value
else:
raise TypeError('source can only be of types list or None')
@property
def targ(self):
return self.__targ
@targ.setter
def targ(self, value):
if value is None:
self.__targ = None
elif isinstance(value, list):
self.__targ = value
else:
raise TypeError('target can only be of types list or None')
@property
def sig(self):
return self.__sig
@sig.setter
def sig(self, value):
if value is None:
self.__sig = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__sig = value
else:
raise TypeError('signal can only be of types unicode, str or None')
@property
def ch(self):
return self.__ch
@ch.setter
def ch(self, value):
if value is None:
self.__ch = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__ch = value
else:
raise TypeError('channel can only be of types unicode, str or None')
@property
def arg(self):
return self.__arg
@arg.setter
def arg(self, value):
if value is None:
self.__arg = None
elif isinstance(value, dict):
self.__arg = value
else:
raise TypeError('arguments can only be of types dict or None')
@property
def pl(self):
return self.__pl
@pl.setter
def pl(self, value):
if value is None:
self.__pl = None
elif isinstance(value, list):
self.__pl = value
else:
raise TypeError('payload can only be of types list or None')
@property
def ok(self):
return self.__ok
@ok.setter
def ok(self, value):
if value is None:
self.__ok = None
elif isinstance(value, bool):
self.__ok = value
else:
raise TypeError('ok can only be of types bool or None')
@property
def ten(self):
return self.__ten
@ten.setter
def ten(self, value):
if value is None:
self.__ten = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__ten = value
else:
raise TypeError('tenant can only be of types unicode, str or None')
def loadFromStr(self, tiipStr, verifyVersion=True):
"""
Loads this object with values from a string or unicode representation of a TIIPMessage.
@param tiipStr: The string to load properties from.
@param verifyVersion: True to verify that tiipDict has the right protocol
@raise: TypeError, ValueError
@return: None
"""
tiipDict = json.loads(tiipStr)
self.loadFromDict(tiipDict, verifyVersion)
def loadFromDict(self, tiipDict, verifyVersion=True):
"""
Loads this object with values from a dictionary representation of a TIIPMessage.
@param tiipDict: The dictionary to load properties from.
@param verifyVersion: True to verify that tiipDict has the right protocol
@raise: TypeError, ValueError
@return: None
"""
if verifyVersion:
if 'pv' not in tiipDict or tiipDict['pv'] != self.__pv:
raise ValueError('Incorrect tiip version "' + str(tiipDict['pv']) + '" expected "' + self.__pv + '"')
if 'pv' not in tiipDict or tiipDict['pv'] != self.__pv:
if tiipDict['pv'] == "tiip.2.0":
if 'ct' in tiipDict:
ct = float(tiipDict['ct'])
ts = float(tiipDict['ts'])
tiipDict['ts'] = str(ct)
tiipDict['lat'] = str(ts - ct)
tiipDict['ts'] = dt.utcfromtimestamp(float(tiipDict['ts'])).isoformat(timespec='microseconds') + 'Z'
if 'ts' in tiipDict:
self.ts = tiipDict['ts']
if 'lat' in tiipDict:
self.lat = tiipDict['lat']
if 'mid' in tiipDict:
self.mid = tiipDict['mid']
if 'sid' in tiipDict:
self.sid = tiipDict['sid']
if 'type' in tiipDict:
self.type = tiipDict['type']
if 'src' in tiipDict:
self.src = tiipDict['src']
if 'targ' in tiipDict:
self.targ = tiipDict['targ']
if 'sig' in tiipDict:
self.sig = tiipDict['sig']
if 'ch' in tiipDict:
self.ch = tiipDict['ch']
if 'arg' in tiipDict:
self.arg = tiipDict['arg']
if 'pl' in tiipDict:
self.pl = tiipDict['pl']
if 'ok' in tiipDict:
self.ok = tiipDict['ok']
if 'ten' in tiipDict:
self.ten = tiipDict['ten']
def asVersion(self, version):
if version == self.__pv:
return str(self)
elif version == "tiip.2.0":
tiipDict = {}
for key, val in self:
tiipDict[key] = val
if "lat" in tiipDict:
ct = parser.parse(tiipDict["ts"]).timestamp()
tiipDict["ct"] = str(ct)
tiipDict["ts"] = str(ct + float(tiipDict["lat"]))
tiipDict.pop("lat")
else:
tiipDict["ts"] = str(parser.parse(tiipDict["ts"]).timestamp())
tiipDict["pv"] = version
return json.dumps(tiipDict)
else:
raise ValueError('Incorrect tiip version. Can only handle versions: tiip.2.0 and tiip.3.0')
| mit | 1,881,399,946,409,692,700 | 30.936869 | 137 | 0.527026 | false |
Cloud-Rush/LOL | cacheManager.py | 1 | 6523 | -import json
-import time
-import praw
-from riotwatcher import Riotwatcher
-from riotwatcher import EUROPE_WEST
-from riotwatcher import EUROPE_NORDIC_EAST
-from riotwatcher import KOREA
-from riotwatcher import OCEANIA
-from riotwatcher import BRAZIL
-from riotwatcher import LATIN_AMERICA_SOUTH
-from riotwatcher import LATIN_AMERICA_NORTH
-from riotwatcher import NORTH_AMERICA
-from riotwatcher import RUSSIA
-from riotwatcher import TURKEY
-from twitch import *
-
-
-riot = RiotWatcher('24d89b10-e6ee-469a-91bd-f5e2d15c9e31')
-twitch = TwitchTV()
-reddit = praw.Reddit(user_agent = 'TheFountain by /u/tstarrs')
-submissions = reddit.get_subreddit('leagueoflegends').get_top(limit = 10)
-submissions2 = reddit.get_subreddit('summonerschool').get_top(limit = 10)
-submissions3 = reddit.get_subreddit('loleventvods').get_top(limit = 10)
-allSubmissions = (submissions + submissions2 + submissions3)
-
-cacheFile = open("cacheDatabase.json")
-cacheData = json.load(cacheFile)
-cacheFile.close()
-
-CHAMPION_TOLERANCE = 180000 #3min
-NEWS_TOLERANCE = 15 #2.5 min
-STREAMER_TOLERANCE = 1800000 #30 min
-SUMMONER_TOLERANCE = 3600000 #1 hour
-
-
-#used for setupDatabase
-def saveCache(saveData):
- saveFile = open("cacheDatabase.json","w")
- json.dump(saveData,saveFile)
- saveFile.close()
-
-def saveCache():
- saveFile = open("cacheDatabase.json","w")
- json.dump(cacheData,saveFile)
- saveFile.close()
-
-#used for starting a database from scratch
-#this will reset the database
-def setupDatabase():
- initData = {}
- initData["champions"] = {riot.get_all_champions()}
- initData["news"] = {allSubmissions}
- initData["summoners"] = {}
- initData["streamers"] = {twitch.getGameStreams("League of Legends")}
- saveCache(initData)
-
-#update methods take what is requested to update, and the new information for it
-#adds timestamp information
-def updateChampion(name,info):
- if name in cacheData["champiions"]:
- cacheData["champions"][name]["time"] = time.time()
- cacheData["champions"][name]["info"] = info
- cacheData["champions"][name]["stale"] = False
- else:
- cacheData["champions"][name] = {}
- cacheData["champions"][name]["time"] = time.time()
- cacheData["champions"][name]["info"] = info
- cacheData["champions"][name]["stale"] = False
- saveCache()
-
-def updateNews(name,info):
- if name in cacheData["news"]:
- cacheData["news"][name]["time"] = time.time()
- cacheData["news"][name]["info"] = info
- cacheData["news"][name]["stale"] = False
- else:
- cacheData["news"][name] = {}
- cacheData["news"][name]["time"] = time.time()
- cacheData["news"][name]["info"] = info
- cacheData["news"][name]["stale"] = False
- saveCache()
-
-def updateStreamer(name,info):
- if name in cacheData["streamers"]:
- cacheData["streamers"][name]["time"] = time.time()
- cacheData["streamers"][name]["info"] = info
- cacheData["streamers"][name]["stale"] = False
- else:
- cacheData["streamers"][name] = {}
- cacheData["streamers"][name]["time"] = time.time()
- cacheData["streamers"][name]["info"] = info
- cacheData["streamers"][name]["stale"] = False
- saveCache()
-
-def updateSummoner(name,info):
- if name in cacheData["summoners"]:
- cacheData["summoners"][name]["time"] = time.time()
- cacheData["summoners"][name]["info"] = info
- cacheData["summoners"][name]["stale"] = False
- else:
- cacheData["summoners"][name] = {}
- cacheData["summoners"][name]["time"] = time.time()
- cacheData["summoners"][name]["info"] = info
- cacheData["summoners"][name]["stale"] = False
- saveCache()
-
-#get basic data
-#returns {} if no ifo exists, or if the data is marked as stale
-def getChampionInfo(name):
- if name in cacheData["champions"] and cacheData["champions"][name]["stale"] == False:
- return cacheData["champions"][name]["info"]
- else:
- return {}
-
-def getSummonerInfo(name):
- if name in cacheData["summoners"] and cacheData["summoners"][name]["stale"] == False:
- return cacheData["summoners"][name]["info"]
- else:
- return {}
-
-def getNewsInfo(name):
- if name in cacheData["news"] and cacheData["news"][name]["stale"] == False:
- return cacheData["news"][name]["info"]
- else:
- return {}
-
-def getStreamerInfo(name):
- if name in cacheData["streamers"] and cacheData["streamers"][name]["stale"] == False:
- return cacheData["streamers"][name]["info"]
- else:
- return {}
-
-
-#trim the database, mark items as stale
-def trimCache():
- prunableSummonerKeys = []
- prunableStreamerKeys = []
- prunableNewsKeys = []
- prunableChampionKeys = []
- #for each listing, check how old the data is
- #if the data is old, mark as stale and reset timestamp
- #if data is already stale, mark for deletion
- for name in cacheData["summoners"]:
- if time.time() - SUMMONER_TOLERANCE > cacheData["summoners"][name]["time"]:
- if cacheData["summoners"][name]["stale"] == False:
- cacheData["summoners"][name]["stale"] = True
- cacheData["summoners"][name]["time"] = time.time()
- else:
- prunableSummonerKeys.append(name)
- for name in cacheData["streamers"]:
- if time.time() - STREAMER_TOLERANCE > cacheData["streamers"][name]["time"]:
- if cacheData["streamers"][name]["stale"] == False:
- cacheData["streamers"][name]["stale"] = True
- cacheData["streamers"][name]["time"] = time.time()
- else:
- prunableStreamerKeys.append(name)
- for name in cacheData["news"]:
- if time.time() - NEWS_TOLERANCE > cacheData["news"][name]["time"]:
- if cacheData["news"][name]["stale"] == False:
- cacheData["news"][name]["stale"] = True
- cacheData["news"][name]["time"] = time.time()
- else:
- prunableNewsKeys.append(name)
- for name in cacheData["champions"]:
- if time.time() - CHAMPION_TOLERANCE > cacheData["champions"][name]["time"]:
- if cacheData["champions"][name]["stale"] == False:
- cacheData["champions"][name]["stale"] = True
- cacheData["champions"][name]["time"] = time.time()
- else:
- prunableChampionKeys.append(name)
- #delete the elements marked for deletion
- for pruner in prunableSummonerKeys:
- del cacheData["summoners"][pruner]
- for pruner in prunableStreamerKeys:
- del cacheData["streamers"][pruner]
- for pruner in prunableNewsKeys:
- del cacheData["news"][pruner]
- for pruner in prunableChampionKeys:
- del cacheData["champions"][pruner]
- saveCache()
-
-def test():
- updateStreamer("Emorelleum",{"url":"www.spleen.com","title":"Viktor epic fail"})
- updateNews("Blah", {"Art 1":"la"})
- saveCache()
-
-trimCache()
-#don't uncomment this unless you want to reset the database
-#setupDatabase()
| mit | 5,110,957,185,815,924,000 | 33.513228 | 87 | 0.687414 | false |
sevenian3/ChromaStarPy | Kappas.py | 1 | 33442 | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 24 17:12:02 2017
@author: ishort
"""
import math
import Planck
import Useful
def kappas2(numDeps, pe, zScale, temp, rho, numLams, lambdas, logAHe, \
logNH1, logNH2, logNHe1, logNHe2, Ne, teff, logKapFudge):
"""/* Compute opacities properly from scratch with real physical cross-sections
*/ // *** CAUTION:
//
// This return's "kappa" as defined by Gray 3rd Ed. - cm^2 per *relelvant particle* where the "releveant particle"
// depends on *which* kappa """
#//
#// *** CAUTION:
#//
#// This return's "kappa" as defined by Gray 3rd Ed. - cm^2 per *relelvant particle* where the "releveant particle"
#// depends on *which* kappa
log10E = math.log10(math.e) #//needed for g_ff
logLog10E = math.log(log10E)
logE10 = math.log(10.0)
logNH = [0.0 for i in range(numDeps)] #//Total H particle number density cm^-3
#double logPH1, logPH2, logPHe1, logPHe2;
for i in range(numDeps):
logNH[i] = math.exp(logNH1[i]) + math.exp(logNH2[i])
logNH[i] = math.log(logNH[i])
#//System.out.println("i " + i + " logNH1 " + log10E*logNH1[i] + " logNH2 " + log10E*logNH2[i]
#//+ " logNHe1 " + log10E*logNHe1[i] + " logNHe2 " + log10E*logNHe2[i] + " logPe " + log10E*pe[1][i]);
#// logPH1 = logNH1[i] + temp[1][i] + Useful.logK();
#// logPH2 = logNH2[i] + temp[1][i] + Useful.logK();
#// logPHe1 = logNHe1[i] + temp[1][i] + Useful.logK();
#// logPHe2 = logNHe2[i] + temp[1][i] + Useful.logK();
#//System.out.println("i " + i + " logPH1 " + log10E*logPH1 + " logPH2 " + log10E*logPH2
#//+ " logPHe1 " + log10E*logPHe1 + " logPHe2 " + log10E*logPHe2 + " logPe " + log10E*pe[1][i]);
#double[][] logKappa = new double[numLams][numDeps];
logKappa = [ [0.0 for i in range(numDeps)] for j in range(numLams) ]
#double kappa; //helper
#double stimEm; //temperature- and wavelength-dependent stimulated emission correction
#double stimHelp, logStimEm;
#double ii; //useful for converting integer loop counter, i, to float
#//
#//
#//Input data and variable declarations:
#//
#//
#// H I b-f & f-f
chiIH = 13.598433 #//eV
Rydberg = 1.0968e-2 #// "R" in nm^-1
#//Generate threshold wavelengths and b-f Gaunt (g_bf) helper factors up to n=10:
#double n; //principle quantum number of Bohr atom E-level
numHlevs = 10
#double logChiHlev;
invThresh = [0.0 for i in range(numHlevs)] #//also serves as g_bf helper factor
threshLambs = [0.0 for i in range(numHlevs)]
chiHlev = [0.0 for i in range(numHlevs)]
for i in range(numHlevs):
n = 1.0 + float(i)
invThresh[i] = Rydberg / n / n #//nm^-1; also serves as g_bf helper factor
threshLambs[i] = 1.0 / invThresh[i] #//nm
logChiHlev = Useful.logH() + Useful.logC() + math.log(invThresh[i]) + 7.0*logE10 #// ergs
chiHlev[i] = math.exp(logChiHlev - Useful.logEv()) #//eV
chiHlev[i] = chiIH - chiHlev[i]
#// System.out.println("i " + i + " n " + n + " invThresh " + invThresh[i] + " threshLambs[i] " + threshLambs[i] + " chiHlev " + chiHlev[i]);
logGauntPrefac = math.log(0.3456) - 0.333333*math.log(Rydberg)
#// **** Caution: this will require lamba in A!:
a0 = 1.0449e-26 #//if lambda in A
logA0 = math.log(a0)
#// Boltzmann const "k" in eV/K - needed for "theta"
logKeV = Useful.logK() - Useful.logEv()
#//g_bf Gaunt factor - depends on lower E-level, n:
loggbf = [0.0 for i in range(numHlevs)]
#//initialize quantities that depend on lowest E-level contributing to opacity at current wavelength:
for iThresh in range(numHlevs):
loggbf[iThresh] = 0.0
#double logGauntHelp, gauntHelp;
#double gbf, gbfHelp, loggbfHelp;
#double gff, gffHelp, loggffHelp, logffHelp, loggff;
#double help, logHelp3;
#double chiLambda, logChiLambda;
#double bfTerm, logbfTerm, bfSum, logKapH1bf, logKapH1ff;
#//initial defaults:
gbf = 1.0
gff = 1.0
loggff = 0.0
logChiFac = math.log(1.2398e3) #// eV per lambda, for lambda in nm
#// Needed for kappa_ff:
#double ffBracket;
logffHelp = logLog10E - math.log(chiIH) - math.log(2.0)
#//logHelp = logffHelp - math.log(2.0)
#//
#//Hminus:
#//
#// H^- b-f
#//This is for the sixth order polynomial fit to the cross-section's wavelength dependence
numHmTerms = 7
logAHm = [0.0 for i in range(numHmTerms)]
signAHm = [0.0 for i in range(numHmTerms)]
aHmbf = 4.158e-10
#//double logAHmbf = Math.log(aHmbf);
#//Is the factor of 10^-18cm^2 from the polynomial fit to alpha_Hmbf missing in Eq. 8.12 on p. 156 of Gray 3rd Ed??
logAHmbf = math.log(aHmbf) - 18.0*logE10
#double alphaHmbf, logAlphaHmbf, logTermHmbf, logKapHmbf;
#//Computing each polynomial term logarithmically
logAHm[0] = math.log(1.99654)
signAHm[0] = 1.0
logAHm[1] = math.log(1.18267e-5)
signAHm[1] = -1.0
logAHm[2] = math.log(2.64243e-6)
signAHm[2] = 1.0
logAHm[3] = math.log(4.40524e-10)
signAHm[3] = -1.0
logAHm[4] = math.log(3.23992e-14)
signAHm[4] = 1.0
logAHm[5] = math.log(1.39568e-18)
signAHm[5] = -1.0
logAHm[6] = math.log(2.78701e-23)
signAHm[6] = 1.0
alphaHmbf = math.exp(logAHm[0]) #//initialize accumulator
#// H^- f-f:
logAHmff = -26.0*logE10
numHmffTerms = 5
#double fPoly, logKapHmff, logLambdaAFac;
fHmTerms = [ [ 0.0 for i in range(numHmffTerms) ] for j in range(3) ]
fHm = [0.0 for i in range(3)]
fHmTerms[0][0] = -2.2763
fHmTerms[0][1] = -1.6850
fHmTerms[0][2] = 0.76661
fHmTerms[0][3] = -0.053346
fHmTerms[0][4] = 0.0
fHmTerms[1][0] = 15.2827
fHmTerms[1][1] = -9.2846
fHmTerms[1][2] = 1.99381
fHmTerms[1][3] = -0.142631
fHmTerms[1][4] = 0.0
fHmTerms[2][0] = -197.789
fHmTerms[2][1] = 190.266
fHmTerms[2][2] = -67.9775
fHmTerms[2][3] = 10.6913
fHmTerms[2][4] = -0.625151
#//
#//H_2^+ molecular opacity - cool stars
#// scasles with proton density (H^+)
#//This is for the third order polynomial fit to the "sigma_l(lambda)" and "U_l(lambda)"
#//terms in the cross-section
numH2pTerms = 4
sigmaH2pTerm = [0.0 for i in range(numH2pTerms)]
UH2pTerm = [0.0 for i in range(numH2pTerms)]
#double logSigmaH2p, sigmaH2p, UH2p, logKapH2p;
aH2p = 2.51e-42
logAH2p = math.log(aH2p)
sigmaH2pTerm[0] = -1040.54
sigmaH2pTerm[1] = 1345.71
sigmaH2pTerm[2] = -547.628
sigmaH2pTerm[3] = 71.9684
#//UH2pTerm[0] = 54.0532
#//UH2pTerm[1] = -32.713
#//UH2pTerm[2] = 6.6699
#//UH2pTerm[3] = -0.4574
#//Reverse signs on U_1 polynomial expansion co-efficients - Dave Gray private communcation
#//based on Bates (1952)
UH2pTerm[0] = -54.0532
UH2pTerm[1] = 32.713
UH2pTerm[2] = -6.6699
UH2pTerm[3] = 0.4574
#// He I b-f & ff:
#double totalH1Kap, logTotalH1Kap, helpHe, logKapHe;
#//
#//He^- f-f
AHe = math.exp(logAHe)
#double logKapHemff, nHe, logNHe, thisTerm, thisLogTerm, alphaHemff, log10AlphaHemff;
#// Gray does not have this pre-factor, but PHOENIX seems to and without it
#// the He opacity is about 10^26 too high!:
logAHemff = -26.0*logE10
numHemffTerms = 5
logC0HemffTerm = [0.0 for i in range(numHemffTerms)]
logC1HemffTerm = [0.0 for i in range(numHemffTerms)]
logC2HemffTerm = [0.0 for i in range(numHemffTerms)]
logC3HemffTerm = [0.0 for i in range(numHemffTerms)]
signC0HemffTerm = [0.0 for i in range(numHemffTerms)]
signC1HemffTerm = [0.0 for i in range(numHemffTerms)]
signC2HemffTerm = [0.0 for i in range(numHemffTerms)]
signC3HemffTerm = [0.0 for i in range(numHemffTerms)]
#//we'll be evaluating the polynominal in theta logarithmically by adding logarithmic terms -
logC0HemffTerm[0] = math.log(9.66736)
signC0HemffTerm[0] = 1.0
logC0HemffTerm[1] = math.log(71.76242)
signC0HemffTerm[1] = -1.0
logC0HemffTerm[2] = math.log(105.29576)
signC0HemffTerm[2] = 1.0
logC0HemffTerm[3] = math.log(56.49259)
signC0HemffTerm[3] = -1.0
logC0HemffTerm[4] = math.log(10.69206)
signC0HemffTerm[4] = 1.0
logC1HemffTerm[0] = math.log(10.50614)
signC1HemffTerm[0] = -1.0
logC1HemffTerm[1] = math.log(48.28802)
signC1HemffTerm[1] = 1.0
logC1HemffTerm[2] = math.log(70.43363)
signC1HemffTerm[2] = -1.0
logC1HemffTerm[3] = math.log(37.80099)
signC1HemffTerm[3] = 1.0
logC1HemffTerm[4] = math.log(7.15445)
signC1HemffTerm[4] = -1.0
logC2HemffTerm[0] = math.log(2.74020)
signC2HemffTerm[0] = 1.0
logC2HemffTerm[1] = math.log(10.62144)
signC2HemffTerm[1] = -1.0
logC2HemffTerm[2] = math.log(15.50518)
signC2HemffTerm[2] = 1.0
logC2HemffTerm[3] = math.log(8.33845)
signC2HemffTerm[3] = -1.0
logC2HemffTerm[4] = math.log(1.57960)
signC2HemffTerm[4] = 1.0
logC3HemffTerm[0] = math.log(0.19923)
signC3HemffTerm[0] = -1.0
logC3HemffTerm[1] = math.log(0.77485)
signC3HemffTerm[1] = 1.0
logC3HemffTerm[2] = math.log(1.13200)
signC3HemffTerm[2] = -1.0
logC3HemffTerm[3] = math.log(0.60994)
signC3HemffTerm[3] = 1.0
logC3HemffTerm[4] = math.log(0.11564)
signC3HemffTerm[4] = -1.0
# //initialize accumulators:
cHemff = [0.0 for i in range(4)]
cHemff[0] = signC0HemffTerm[0] * math.exp(logC0HemffTerm[0]);
cHemff[1] = signC1HemffTerm[0] * math.exp(logC1HemffTerm[0]);
cHemff[2] = signC2HemffTerm[0] * math.exp(logC2HemffTerm[0]);
cHemff[3] = signC3HemffTerm[0] * math.exp(logC3HemffTerm[0]);
#//
#//Should the polynomial expansion for the Cs by in 10g10Theta?? No! Doesn't help:
#//double[] C0HemffTerm = new double[numHemffTerms];
#//double[] C1HemffTerm = new double[numHemffTerms];
#//double[] C2HemffTerm = new double[numHemffTerms];
#//double[] C3HemffTerm = new double[numHemffTerms];
#//
#//C0HemffTerm[0] = 9.66736;
#//C0HemffTerm[1] = -71.76242;
#//C0HemffTerm[2] = 105.29576;
#//C0HemffTerm[3] = -56.49259;
#//C0HemffTerm[4] = 10.69206;
#//C1HemffTerm[0] = -10.50614;
#//C1HemffTerm[1] = 48.28802;
#//C1HemffTerm[2] = -70.43363;
#//C1HemffTerm[3] = 37.80099;
#//C1HemffTerm[4] = -7.15445;
#//C2HemffTerm[0] = 2.74020;
#//C2HemffTerm[1] = -10.62144;
#//C2HemffTerm[2] = 15.50518;
#//C2HemffTerm[3] = -8.33845;
#//C2HemffTerm[4] = 1.57960;
#//C3HemffTerm[0] = -0.19923;
#//C3HemffTerm[1] = 0.77485;
#//C3HemffTerm[2] = -1.13200;
#//C3HemffTerm[3] = 0.60994;
#//C3HemffTerm[4] = -0.11564;
#//initialize accumulators:
#// double[] cHemff = new double[4];
#// cHemff[0] = C0HemffTerm[0];
#// cHemff[1] = C1HemffTerm[0];
#// cHemff[2] = C2HemffTerm[0];
#// cHemff[3] = C3HemffTerm[0];
#//
#// electron (e^-1) scattering (Thomson scattering)
#double kapE, logKapE;
alphaE = 0.6648e-24 #//cm^2/e^-1
logAlphaE = math.log(0.6648e-24)
#//Universal:
#//
# double theta, logTheta, log10Theta, log10ThetaFac;
# double logLambda, lambdaA, logLambdaA, log10LambdaA, lambdanm, logLambdanm;
#//Okay - here we go:
#//Make the wavelength loop the outer loop - lots of depth-independnet lambda-dependent quantities:
#//
#//
# //System.out.println("Kappas called...");
#//
#// **** START WAVELENGTH LOOP iLam
#//
#//
#//
for iLam in range(numLams):
#//
#//Re-initialize all accumulators to be on safe side:
kappa = 0.0
logKapH1bf = -99.0
logKapH1ff = -99.0
logKapHmbf = -99.0
logKapHmff = -99.0
logKapH2p = -99.0
logKapHe = -99.0
logKapHemff = -99.0
logKapE = -99.0
#//
#//*** CAUTION: lambda MUST be in nm here for consistency with Rydbeg
logLambda = math.log(lambdas[iLam]) #//log cm
lambdanm = 1.0e7 * lambdas[iLam]
logLambdanm = math.log(lambdanm)
lambdaA = 1.0e8 * lambdas[iLam] #//Angstroms
logLambdaA = math.log(lambdaA)
log10LambdaA = log10E * logLambdaA
logChiLambda = logChiFac - logLambdanm
chiLambda = math.exp(logChiLambda) #//eV
#// Needed for both g_bf AND g_ff:
logGauntHelp = logGauntPrefac - 0.333333*logLambdanm #//lambda in nm here
gauntHelp = math.exp(logGauntHelp)
#// if (iLam == 142){
#// System.out.println("lambdaA " + lambdaA);
#// }
#//HI b-f depth independent factors:
#//Start at largest threshold wavelength and break out of loop when next threshold lambda is less than current lambda:
#for (iThresh = numHlevs-1; iThresh >= 0; iThresh--){
for iThresh in range(0, numHlevs-1, -1):
if (threshLambs[iThresh] < lambdanm):
break
if (lambdanm <= threshLambs[iThresh]):
#//this E-level contributes
loggbfHelp = logLambdanm + math.log(invThresh[iThresh]) # //lambda in nm here; invThresh here as R/n^2
gbfHelp = math.exp(loggbfHelp)
gbf = 1.0 - (gauntHelp * (gbfHelp - 0.5))
#// if (iLam == 1){}
#// System.out.println("iThresh " + iThresh + " threshLambs " + threshLambs[iThresh] + " gbf " + gbf);
#// }
loggbf[iThresh] = math.log(gbf)
#//end iThresh loop
#//HI f-f depth independent factors:
# //logChi = logLog10E + logLambdanm - logChiFac; //lambda in nm here
# //chi = Math.exp(logChi);
loggffHelp = logLog10E - logChiLambda
#//
#//
#//
#// ****** Start depth loop iTau ******
#//
#//
#//
#//
for iTau in range(numDeps):
#//
# //Re-initialize all accumulators to be on safe side:
kappa = 0.0
logKapH1bf = -99.0
logKapH1ff = -99.0
logKapHmbf = -99.0
logKapHmff = -99.0
logKapH2p = -99.0
logKapHe = -99.0
logKapHemff = -99.0
logKapE = -99.0
#//
#//
#//if (iTau == 36 && iLam == 142){
#// System.out.println("lambdanm[142] " + lambdanm + " temp[0][iTau=36] " + temp[0][iTau=36]);
#// }
#//This is "theta" ~ 5040/T:
logTheta = logLog10E - logKeV - temp[1][iTau]
log10Theta = log10E * logTheta
theta = math.exp(logTheta)
#//System.out.println("theta " + theta + " logTheta " + logTheta);
#// temperature- and wavelength-dependent stimulated emission coefficient:
stimHelp = -1.0 * theta * chiLambda * logE10
stimEm = 1.0 - math.exp(stimHelp)
logStimEm = math.log(stimEm)
# // if (iTau == 36 && iLam == 142){
# // System.out.println("stimEm " + stimEm);
# //}
ffBracket = math.exp(loggffHelp - logTheta) + 0.5
gff = 1.0 + (gauntHelp*ffBracket)
#//if (iTau == 36 && iLam == 1){
#// System.out.println("gff " + gff);
#// }
loggff = math.log(gff)
#//H I b-f:
#//Start at largest threshold wavelength and break out of loop when next threshold lambda is less than current lambda:
bfSum = 0.0 #//initialize accumulator
logHelp3 = logA0 + 3.0*logLambdaA #//lambda in A here
#for (int iThresh = numHlevs-1; iThresh >= 0; iThresh--){
for iThresh in range(0, numHlevs-1, -1):
if (threshLambs[iThresh] < lambdanm):
break
n = 1.0 + float(iThresh)
if (lambdanm <= threshLambs[iThresh]):
#//this E-level contributes
logbfTerm = loggbf[iThresh] - 3.0*math.log(n)
logbfTerm = logbfTerm - (theta*chiHlev[iThresh])*logE10
bfSum = bfSum + math.exp(logbfTerm)
#//if (iTau == 36 && iLam == 142){
# //System.out.println("lambdanm " + lambdanm + " iThresh " + iThresh + " threshLambs[iThresh] " + threshLambs[iThresh]);
# //System.out.println("loggbf " + loggbf[iThresh] + " theta " + theta + " chiHlev " + chiHlev[iThresh]);
# //System.out.println("bfSum " + bfSum + " logbfTerm " + logbfTerm);
#// }
#//end iThresh loop
#// cm^2 per *neutral* H atom
logKapH1bf = logHelp3 + math.log(bfSum)
#//Stimulated emission correction
logKapH1bf = logKapH1bf + logStimEm
#//System.out.println("lambda " + lambdas[iLam] + "iTau " + iTau + " sigma " + Math.exp(logKapH1bf));
#//Add it in to total - opacity per neutral HI atom, so multiply by logNH1
#// This is now linear opacity in cm^-1
logKapH1bf = logKapH1bf + logNH1[iTau]
#//System.out.println(" aH1 " + Math.exp(logKapH1bf));
#////Nasty fix to make Balmer lines show up in A0 stars!
#// if (teff > 8000){
#// logKapH1bf = logKapH1bf - logE10*1.5;
#//
kappa = math.exp(logKapH1bf)
#//System.out.println("HIbf " + log10E*logKapH1bf);
#//if (iTau == 36 && iLam == 142){
#// System.out.println("lambdaA " + lambdaA + " logKapH1bf " + log10E*(logKapH1bf)); //-rho[1][iTau]));
#//}
#//H I f-f:
#// cm^2 per *neutral* H atom
logKapH1ff = logHelp3 + loggff + logffHelp - logTheta - (theta*chiIH)*logE10
#//Stimulated emission correction
logKapH1ff = logKapH1ff + logStimEm
#//Add it in to total - opacity per neutral HI atom, so multiply by logNH1
#// This is now linear opacity in cm^-1
logKapH1ff = logKapH1ff + logNH1[iTau]
#////Nasty fix to make Balmer lines show up in A0 stars!
#// if (teff > 8000){
#// logKapH1ff = logKapH1ff - logE10*1.5;
#//
kappa = kappa + math.exp(logKapH1ff);
#//System.out.println("HIff " + log10E*logKapH1ff);
#//if (iTau == 36 && iLam == 142){
#// System.out.println("logKapH1ff " + log10E*(logKapH1ff)); //-rho[1][iTau]));
#//}
#//
#//Hminus:
#//
#// H^- b-f:
#//if (iTau == 36 && iLam == 142){
# // System.out.println("temp " + temp[0][iTau] + " lambdanm " + lambdanm);
# // }
logKapHmbf = -99.0 #//initialize default
#//if ( (temp[0][iTau] > 2500.0) && (temp[0][iTau] < 10000.0) ){
#//if ( (temp[0][iTau] > 2500.0) && (temp[0][iTau] < 8000.0) ){
#//Try lowering lower Teff limit to avoid oapcity collapse in outer layers of late-type stars
if ( (temp[0][iTau] > 1000.0) and (temp[0][iTau] < 10000.0) ):
if ((lambdanm > 225.0) and (lambdanm < 1500.0) ): # //nm
#//if (iTau == 36 && iLam == 142){
# // System.out.println("In KapHmbf condition...");
#//}
ii = 0.0
alphaHmbf = signAHm[0]*math.exp(logAHm[0]) #//initialize accumulator
#for (int i = 1; i < numHmTerms; i++){
for i in range(1, numHmTerms):
ii = float(i)
#//if (iTau == 36 && iLam == 142){
#// System.out.println("ii " + ii);
#//}
logTermHmbf = logAHm[i] + ii*logLambdaA
alphaHmbf = alphaHmbf + signAHm[i]*math.exp(logTermHmbf)
#//if (iTau == 36 && iLam == 142){
#// System.out.println("logTermHmbf " + log10E*logTermHmbf + " i " + i + " logAHm " + log10E*logAHm[i]);
#//}
logAlphaHmbf = math.log(alphaHmbf)
#// cm^2 per neutral H atom
logKapHmbf = logAHmbf + logAlphaHmbf + pe[1][iTau] + 2.5*logTheta + (0.754*theta)*logE10
#//Stimulated emission correction
logKapHmbf = logKapHmbf + logStimEm
#//if (iTau == 36 && iLam == 142){
#// System.out.println("alphaHmbf " + alphaHmbf);
#// System.out.println("logKapHmbf " + log10E*logKapHmbf + " logAHmbf " + log10E*logAHmbf + " logAlphaHmbf " + log10E*logAlphaHmbf);
#// }
#//Add it in to total - opacity per neutral HI atom, so multiply by logNH1
#// This is now linear opacity in cm^-1
logKapHmbf = logKapHmbf + logNH1[iTau]
kappa = kappa + math.exp(logKapHmbf)
#//System.out.println("Hmbf " + log10E*logKapHmbf);
#//if (iTau == 36 && iLam == 142){
#// System.out.println("logKapHmbf " + log10E*(logKapHmbf)); //-rho[1][iTau]));
#//}
#//wavelength condition
#// temperature condition
#// H^- f-f:
logKapHmff = -99.0 #//initialize default
#//if ( (temp[0][iTau] > 2500.0) && (temp[0][iTau] < 10000.0) ){
#//Try lowering lower Teff limit to avoid oapcity collapse in outer layers of late-type stars
#//if ( (temp[0][iTau] > 2500.0) && (temp[0][iTau] < 8000.0) ){
if ( (temp[0][iTau] > 1000.0) and (temp[0][iTau] < 10000.0) ):
if ((lambdanm > 260.0) and (lambdanm < 11390.0) ): #//nm
#//construct "f_n" polynomials in log(lambda)
for j in range(3):
fHm[j] = fHmTerms[j][0] #//initialize accumulators
ii = 0.0
for i in range(1, numHmffTerms):
ii = float(i)
logLambdaAFac = math.pow(log10LambdaA, ii)
for j in range(3):
fHm[j] = fHm[j] + (fHmTerms[j][i]*logLambdaAFac)
#} #// i
#} #// j
#//
fPoly = fHm[0] + fHm[1]*log10Theta + fHm[2]*log10Theta*log10Theta
#// In cm^2 per neutral H atom:
#// Stimulated emission alreadya ccounted for
logKapHmff = logAHmff + pe[1][iTau] + fPoly*logE10
#//Add it in to total - opacity per neutral HI atom, so multiply by logNH1
#// This is now linear opacity in cm^-1
logKapHmff = logKapHmff + logNH1[iTau]
kappa = kappa + math.exp(logKapHmff)
#//System.out.println("Hmff " + log10E*logKapHmff);
#//if (iTau == 36 && iLam == 142){
#// System.out.println("logKapHmff " + log10E*(logKapHmff)); //-rho[1][iTau]));
#//}
#//wavelength condition
#// temperature condition
#// H^+_2:
#//
logKapH2p = -99.0 #//initialize default
if ( temp[0][iTau] < 4000.0 ):
if ((lambdanm > 380.0) and (lambdanm < 2500.0) ): # //nm
sigmaH2p = sigmaH2pTerm[0] #//initialize accumulator
UH2p = UH2pTerm[0] #//initialize accumulator
ii = 0.0#
for i in range(1, numH2pTerms):
ii = float(i)
logLambdaAFac = math.pow(log10LambdaA, ii)
#// kapH2p way too large with lambda in A - try cm: No! - leads to negative logs
#//logLambdaAFac = Math.pow(logLambda, ii);
sigmaH2p = sigmaH2p + sigmaH2pTerm[i] * logLambdaAFac
UH2p = UH2p + UH2pTerm[i] * logLambdaAFac
logSigmaH2p = math.log(sigmaH2p)
logKapH2p = logAH2p + logSigmaH2p - (UH2p*theta)*logE10 + logNH2[iTau]
#//Stimulated emission correction
logKapH2p = logKapH2p + logStimEm
#//Add it in to total - opacity per neutral HI atom, so multiply by logNH1
#// This is now linear opacity in cm^-1
logKapH2p = logKapH2p + logNH1[iTau]
kappa = kappa + math.exp(logKapH2p)
#//System.out.println("H2p " + log10E*logKapH2p);
#//if (iTau == 16 && iLam == 142){
# //System.out.println("logKapH2p " + log10E*(logKapH2p-rho[1][iTau]) + " logAH2p " + log10E*logAH2p
#// + " logSigmaH2p " + log10E*logSigmaH2p + " (UH2p*theta)*logE10 " + log10E*((UH2p*theta)*logE10) + " logNH2[iTau] " + log10E*logNH2[iTau]);
#//}
#//wavelength condition
#// temperature condition
#//He I
#//
#// HeI b-f + f-f
#//Scale sum of He b-f and f-f with sum of HI b-f and f-f
#//wavelength condition comes from requirement that lower E level be greater than n=2 (edge at 22.78 nm)
logKapHe = -99.0 #//default intialization
if ( temp[0][iTau] > 10000.0 ):
if (lambdanm > 22.8): #//nm
totalH1Kap = math.exp(logKapH1bf) + math.exp(logKapH1ff)
logTotalH1Kap = math.log(totalH1Kap)
helpHe = Useful.k() * temp[0][iTau]
#// cm^2 per neutral H atom (after all, it's scaled wrt kappHI
#// Stimulated emission already accounted for
#//
#// *** CAUTION: Is this *really* the right thing to do???
#// - we're re-scaling the final H I kappa in cm^2/g corrected for stim em, NOT the raw cross section
logKapHe = math.log(4.0) - (10.92 / helpHe) + logTotalH1Kap
#//Add it in to total - opacity per neutral HI atom, so multiply by logNH1
#// This is now linear opacity in cm^-1
logKapHe = logKapHe + logNH1[iTau]
kappa = kappa + math.exp(logKapHe)
#//System.out.println("He " + log10E*logKapHe);
#//if (iTau == 36 && iLam == 142){
#// System.out.println("logKapHe " + log10E*(logKapHe)); //-rho[1][iTau]));
#//}
#//wavelength condition
#// temperature condition
#//
#//He^- f-f:
logKapHemff = -99.0 #//default initialization
if ( (theta > 0.5) and (theta < 2.0) ):
if ((lambdanm > 500.0) and (lambdanm < 15000.0) ):
#// initialize accumulators:
cHemff[0] = signC0HemffTerm[0]*math.exp(logC0HemffTerm[0]);
#//System.out.println("C0HemffTerm " + signC0HemffTerm[0]*Math.exp(logC0HemffTerm[0]));
cHemff[1] = signC1HemffTerm[0]*math.exp(logC1HemffTerm[0]);
#//System.out.println("C1HemffTerm " + signC1HemffTerm[0]*Math.exp(logC1HemffTerm[0]));
cHemff[2] = signC2HemffTerm[0]*math.exp(logC2HemffTerm[0]);
#//System.out.println("C2HemffTerm " + signC2HemffTerm[0]*Math.exp(logC2HemffTerm[0]));
cHemff[3] = signC3HemffTerm[0]*math.exp(logC3HemffTerm[0]);
#//System.out.println("C3HemffTerm " + signC3HemffTerm[0]*Math.exp(logC3HemffTerm[0]));
#//build the theta polynomial coefficients
ii = 0.0
for i in range(1, numHemffTerms):
ii = float(i)
thisLogTerm = ii*logTheta + logC0HemffTerm[i]
cHemff[0] = cHemff[0] + signC0HemffTerm[i]*math.exp(thisLogTerm)
#//System.out.println("i " + i + " ii " + ii + " C0HemffTerm " + signC0HemffTerm[i]*Math.exp(logC0HemffTerm[i]));
thisLogTerm = ii*logTheta + logC1HemffTerm[i]
cHemff[1] = cHemff[1] + signC1HemffTerm[i]*math.exp(thisLogTerm)
#//System.out.println("i " + i + " ii " + ii + " C1HemffTerm " + signC1HemffTerm[i]*Math.exp(logC1HemffTerm[i]));
thisLogTerm = ii*logTheta + logC2HemffTerm[i]
cHemff[2] = cHemff[2] + signC2HemffTerm[i]*math.exp(thisLogTerm)
#//System.out.println("i " + i + " ii " + ii + " C2HemffTerm " + signC2HemffTerm[i]*Math.exp(logC2HemffTerm[i]));
thisLogTerm = ii*logTheta + logC3HemffTerm[i]
cHemff[3] = cHemff[3] + signC3HemffTerm[i]*math.exp(thisLogTerm)
#//System.out.println("i " + i + " ii " + ii + " C3HemffTerm " + signC3HemffTerm[i]*Math.exp(logC3HemffTerm[i]));
#//// Should polynomial expansion for Cs be in log10Theta??: - No! Doesn't help
#// initialize accumulators:
#// cHemff[0] = C0HemffTerm[0];
#// cHemff[1] = C1HemffTerm[0];
#// cHemff[2] = C2HemffTerm[0];
#// cHemff[3] = C3HemffTerm[0];
#// ii = 0.0;
#// for (int i = 1; i < numHemffTerms; i++){
#// ii = (double) i;
#// log10ThetaFac = Math.pow(log10Theta, ii);
#// thisTerm = log10ThetaFac * C0HemffTerm[i];
#// cHemff[0] = cHemff[0] + thisTerm;
#// thisTerm = log10ThetaFac * C1HemffTerm[i];
#// cHemff[1] = cHemff[1] + thisTerm;
#// thisTerm = log10ThetaFac * C2HemffTerm[i];
#// cHemff[2] = cHemff[2] + thisTerm;
#// thisTerm = log10ThetaFac * C3HemffTerm[i];
#// cHemff[3] = cHemff[3] + thisTerm;
#// }
#//Build polynomial in logLambda for alpha(He^1_ff):
log10AlphaHemff = cHemff[0] #//initialize accumulation
#//System.out.println("cHemff[0] " + cHemff[0]);
ii = 0.0
for i in range(1, 3+1):
#//System.out.println("i " + i + " cHemff[i] " + cHemff[i]);
ii = float(i)
thisTerm = cHemff[i] * math.pow(log10LambdaA, ii)
log10AlphaHemff = log10AlphaHemff + thisTerm
#//System.out.println("log10AlphaHemff " + log10AlphaHemff);
alphaHemff = math.pow(10.0, log10AlphaHemff) #//gives infinite alphas!
#// alphaHemff = log10AlphaHemff; // ?????!!!!!
#//System.out.println("alphaHemff " + alphaHemff);
#// Note: this is the extinction coefficient per *Hydrogen* particle (NOT He- particle!)
# //nHe = Math.exp(logNHe1[iTau]) + Math.exp(logNHe2[iTau]);
# //logNHe = Math.log(nHe);
# //logKapHemff = Math.log(alphaHemff) + Math.log(AHe) + pe[1][iTau] + logNHe1[iTau] - logNHe;
logKapHemff = logAHemff + math.log(alphaHemff) + pe[1][iTau] + logNHe1[iTau] - logNH[iTau]
#//Stimulated emission already accounted for
#//Add it in to total - opacity per H particle, so multiply by logNH
#// This is now linear opacity in cm^-1
logKapHemff = logKapHemff + logNH[iTau]
kappa = kappa + math.exp(logKapHemff)
#//System.out.println("Hemff " + log10E*logKapHemff);
#//if (iTau == 36 && iLam == 155){
#//if (iLam == 155){
#// System.out.println("logKapHemff " + log10E*(logKapHemff)); //-rho[1][iTau]));
#//}
#//wavelength condition
#// temperature condition
#//
#// electron (e^-1) scattering (Thomson scattering)
#//coefficient per *"hydrogen atom"* (NOT per e^-!!) (neutral or total H??):
logKapE = logAlphaE + Ne[1][iTau] - logNH[iTau]
#//Stimulated emission not relevent
#//Add it in to total - opacity per H particle, so multiply by logNH
#// This is now linear opacity in cm^-1
#//I know, we're adding logNH right back in after subtracting it off, but this is for dlarity and consistency for now... :
logKapE = logKapE + logNH[iTau]
kappa = kappa + math.exp(logKapE)
#//System.out.println("E " + log10E*logKapE);
#//if (iTau == 36 && iLam == 142){
#// System.out.println("logKapE " + log10E*(logKapE)); //-rho[1][iTau]));
#//}
#//Metal b-f
#//Fig. 8.6 Gray 3rd Ed.
#//
#//
#// This is now linear opacity in cm^-1
#// Divide by mass density
#// This is now mass extinction in cm^2/g
#//
logKappa[iLam][iTau] = math.log(kappa) - rho[1][iTau]
#// Fudge is in cm^2/g: Converto to natural log:
logEKapFudge = logE10 * logKapFudge
logKappa[iLam][iTau] = logKappa[iLam][iTau] + logEKapFudge
#//if (iTau == 36 && iLam == 142){
#//System.out.println(" " + log10E*(logKappa[iLam][iTau]+rho[1][iTau]));
#//}
#// close iTau depth loop
#//
#//close iLam wavelength loop
return logKappa
#} //end method kappas2
def kapRos(numDeps, numLams, lambdas, logKappa, temp):
kappaRos = [ [0.0 for i in range(numDeps)] for j in range(2) ]
#double numerator, denominator, deltaLam, logdBdTau, logNumerator, logDenominator;
#double logTerm, logDeltaLam, logInvKap, logInvKapRos;
for iTau in range(numDeps):
numerator = 0.0 #//initialize accumulator
denominator = 0.0
for iLam in range(1, numLams):
deltaLam = lambdas[iLam] - lambdas[iLam-1] #//lambda in cm
logDeltaLam = math.log(deltaLam)
logInvKap = -1.0 * logKappa[iLam][iTau]
logdBdTau = Planck.dBdT(temp[0][iTau], lambdas[iLam])
logTerm = logdBdTau + logDeltaLam
denominator = denominator + math.exp(logTerm)
logTerm = logTerm + logInvKap;
numerator = numerator + math.exp(logTerm)
logNumerator = math.log(numerator)
logDenominator = math.log(denominator)
logInvKapRos = logNumerator - logDenominator
kappaRos[1][iTau] = -1.0 * logInvKapRos #//logarithmic
kappaRos[0][iTau] = math.exp(kappaRos[1][iTau])
return kappaRos
#} //end method kapRos | mit | -5,651,264,113,527,596,000 | 39.857322 | 142 | 0.555948 | false |
Aeva/silver | obj_parser.py | 1 | 1318 |
# This file is part of Silver.
#
# Silver is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Silver is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with VoxelPress. If not, see <http://www.gnu.org/licenses/>.
#
# Have a nice day!
from model_tools import *
def obj_parser(fileob):
"""
Parser for wavefront obj files. File format reference:
http://en.wikipedia.org/wiki/Wavefront_.obj_file
"""
verts = []
vertbuffer = []
for line in fileob:
if line.strip()[0] == "v":
verts.append(str2vector(line))
if line.strip()[0] == "f":
params = line.strip().split(" ")[1:]
if line.count("/"):
params = [p.split("/")[0] for p in params]
params = map(lambda x:int(x)-1, params)
for i in params:
vertbuffer += verts[i]
return vertbuffer
| gpl-3.0 | 7,802,623,028,144,939,000 | 27.652174 | 69 | 0.636571 | false |
pexip/os-python-suds-jurko | tests/test_date_time.py | 1 | 16814 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
Date & time related suds Python library unit tests.
Implemented using the 'pytest' testing framework.
"""
if __name__ == "__main__":
try:
import pytest
pytest.main(["--pyargs", __file__])
except ImportError:
print("'py.test' unit testing framework not available. Can not run "
"'{}' directly as a script.".format(__file__))
import sys
sys.exit(-2)
from suds.sax.date import (Date, DateTime, Time, UtcTimezone,
FixedOffsetTimezone)
from suds.xsd.sxbuiltin import XDate, XDateTime, XTime
import tests
import pytest
import datetime
tests.setup_logging()
class _Dummy:
"""Class for testing unknown object class handling."""
pass
"""Invalid date strings reused for both date & datetime testing."""
_invalid_date_strings = (
"",
"abla",
"12",
"12-01",
"-12-01",
"1900-01",
"+1900-10-01", # Plus sign not allowed.
"1900-13-01", # Invalid month.
"1900-02-30", # Invalid day.
"2001-02-29", # Not a leap year.
"2100-02-29", # Not a leap year.
" 1900-01-01",
"1900- 01-01",
"1900-01 -01",
"1900-01-01 ",
"1900-13-011",
"1900-01-01X",
"1900-01-01T", # 'T' is a date/time separator for DateTime.
# Invalid time zone indicators.
"1900-01-01 +17:00",
"1900-01-01+ 17:00",
"1900-01-01*17:00",
"1900-01-01 17:00",
"1900-01-01+17:",
"1900-01-01+170",
"1900-01-01+1730",
"1900-01-01+170:00",
"1900-01-01+17:00:00",
"1900-01-01-:4",
"1900-01-01-2a:00",
"1900-01-01-222:00",
"1900-01-01-12:000"
"1900-01-01+00:60",
"1900-01-01-00:99")
"""Invalid date strings reused for both time & datetime testing."""
_invalid_time_strings = (
"",
"bunga",
"12",
"::",
"12:",
"12:01",
"12:01:",
"12:01: 00",
"12:01: 00",
"23: 01:00",
" 23:01:00",
"23 :01:00",
"23::00",
"23:000:00",
"023:00:00",
"23:00:000",
"25:01:00",
"-1:01:00",
"24:01:00",
"23:-1:00",
"23:61:00",
"23:60:00",
"23:59:-1",
"23:59:61",
"23:59:60",
"7.59.13",
"7-59-13",
"-0:01:00",
"23:-0:00",
"23:59:-0",
"23:59:6.a",
"23:59:6.",
"23:59:6:0",
"23:59:6.12x",
"23:59:6.12x45",
"23:59:6.999999 ",
"23:59:6.999999x",
"T23:59:6",
# Invalid time zone indicators.
"13:27:04 -10:00",
"13:27:04- 10:00",
"13:27:04*17:00",
"13:27:04 17:00",
"13:27:04-003",
"13:27:04-003:00",
"13:27:04+00:002",
"13:27:04-13:60",
"13:27:04-121",
"13:27:04-1210",
"13:27:04-121:00",
"13:27:04+12:",
"13:27:04+12:00:00",
"13:27:04-:13"
"13:27:04-24:00"
"13:27:04+99:00")
class TestDate:
"""Tests for the suds.sax.date.Date class."""
def testConstructFromDate(self):
date = datetime.date(2001, 12, 10)
assert Date(date).value is date
def testConstructFromDateTime_naive(self):
date = datetime.datetime(2001, 12, 10, 10, 50, 21, 32132)
assert Date(date).value == datetime.date(2001, 12, 10)
@pytest.mark.parametrize("hours", (5, 20))
def testConstructFromDateTime_tzAware(self, hours):
tz = FixedOffsetTimezone(10)
date = datetime.datetime(2001, 12, 10, hours, 50, 21, 32132, tzinfo=tz)
assert Date(date).value == datetime.date(2001, 12, 10)
@pytest.mark.parametrize(("string", "y", "m", "d"), (
("1900-01-01", 1900, 1, 1),
("1900-1-1", 1900, 1, 1),
("1900-01-01z", 1900, 1, 1),
("1900-01-01Z", 1900, 1, 1),
("1900-01-01-02", 1900, 1, 1),
("1900-01-01+2", 1900, 1, 1),
("1900-01-01+02:00", 1900, 1, 1),
("1900-01-01+99:59", 1900, 1, 1),
("1900-01-01-21:13", 1900, 1, 1),
("2000-02-29", 2000, 2, 29))) # Leap year.
def testConstructFromString(self, string, y, m, d):
assert Date(string).value == datetime.date(y, m, d)
@pytest.mark.parametrize("string", _invalid_date_strings)
def testConstructFromString_failure(self, string):
pytest.raises(ValueError, Date, string)
@pytest.mark.parametrize("source", (
None,
object(),
_Dummy(),
datetime.time(10, 10)))
def testConstructFromUnknown(self, source):
pytest.raises(ValueError, Date, source)
@pytest.mark.parametrize(("input", "output"), (
("1900-01-01", "1900-01-01"),
("2000-02-29", "2000-02-29"),
("1900-1-1", "1900-01-01"),
("1900-01-01z", "1900-01-01"),
("1900-01-01Z", "1900-01-01"),
("1900-01-01-02", "1900-01-01"),
("1900-01-01+2", "1900-01-01"),
("1900-01-01+02:00", "1900-01-01"),
("1900-01-01+99:59", "1900-01-01"),
("1900-01-01-21:13", "1900-01-01")))
def testConvertToString(self, input, output):
assert str(Date(input)) == output
class TestDateTime:
"""Tests for the suds.sax.date.DateTime class."""
def testConstructFromDateTime(self):
dt = datetime.datetime(2001, 12, 10, 1, 1)
assert DateTime(dt).value is dt
dt.replace(tzinfo=UtcTimezone())
assert DateTime(dt).value is dt
@pytest.mark.parametrize(
("string", "y", "M", "d", "h", "m", "s", "micros"), (
("2013-11-19T14:05:23.428068", 2013, 11, 19, 14, 5, 23, 428068),
("2013-11-19 14:05:23.4280", 2013, 11, 19, 14, 5, 23, 428000)))
def testConstructFromString(self, string, y, M, d, h, m, s, micros):
assert DateTime(string).value == datetime.datetime(y, M, d, h, m, s,
micros)
@pytest.mark.parametrize("string",
[x + "T00:00:00" for x in _invalid_date_strings] +
["2000-12-31T" + x for x in _invalid_time_strings] + [
# Invalid date/time separator characters.
"2013-11-1914:05:23.428068",
"2013-11-19X14:05:23.428068"])
def testConstructFromString_failure(self, string):
pytest.raises(ValueError, DateTime, string)
@pytest.mark.parametrize(
("string", "y", "M", "d", "h", "m", "s", "micros"), (
("2000-2-28T23:59:59.9999995", 2000, 2, 29, 0, 0, 0, 0),
("2000-2-29T23:59:59.9999995", 2000, 3, 1, 0, 0, 0, 0),
("2013-12-31T23:59:59.9999994", 2013, 12, 31, 23, 59, 59, 999999),
("2013-12-31T23:59:59.99999949", 2013, 12, 31, 23, 59, 59, 999999),
("2013-12-31T23:59:59.9999995", 2014, 1, 1, 0, 0, 0, 0)))
def testConstructFromString_subsecondRounding(self, string, y, M, d, h, m,
s, micros):
ref = datetime.datetime(y, M, d, h, m, s, micros)
assert DateTime(string).value == ref
@pytest.mark.parametrize(
("string", "y", "M", "d", "h", "m", "s", "micros", "tz_h", "tz_m"), (
("2013-11-19T14:05:23.428068-3",
2013, 11, 19, 14, 5, 23, 428068, -3, 0),
("2013-11-19T14:05:23.068+03",
2013, 11, 19, 14, 5, 23, 68000, 3, 0),
("2013-11-19T14:05:23.428068-02:00",
2013, 11, 19, 14, 5, 23, 428068, -2, 0),
("2013-11-19T14:05:23.428068+02:00",
2013, 11, 19, 14, 5, 23, 428068, 2, 0),
("2013-11-19T14:05:23.428068-23:59",
2013, 11, 19, 14, 5, 23, 428068, -23, -59)))
def testConstructFromString_timezone(self, string, y, M, d, h, m, s,
micros, tz_h, tz_m):
tzdelta = datetime.timedelta(hours=tz_h, minutes=tz_m)
tzinfo = FixedOffsetTimezone(tzdelta)
ref = datetime.datetime(y, M, d, h, m, s, micros, tzinfo=tzinfo)
assert DateTime(string).value == ref
@pytest.mark.parametrize("source", (
None,
object(),
_Dummy(),
datetime.date(2010, 10, 27),
datetime.time(10, 10)))
def testConstructFromUnknown(self, source):
pytest.raises(ValueError, DateTime, source)
@pytest.mark.parametrize(("input", "output"), (
("2013-11-19T14:05:23.428068", "2013-11-19T14:05:23.428068"),
("2013-11-19 14:05:23.4280", "2013-11-19T14:05:23.428000"),
("2013-12-31T23:59:59.9999995", "2014-01-01T00:00:00"),
("2013-11-19T14:05:23.428068-3", "2013-11-19T14:05:23.428068-03:00"),
("2013-11-19T14:05:23.068+03", "2013-11-19T14:05:23.068000+03:00"),
("2013-11-19T14:05:23.4-02:00", "2013-11-19T14:05:23.400000-02:00"),
("2013-11-19T14:05:23.410+02:00", "2013-11-19T14:05:23.410000+02:00"),
("2013-11-19T14:05:23.428-23:59", "2013-11-19T14:05:23.428000-23:59")))
def testConvertToString(self, input, output):
assert str(DateTime(input)) == output
class TestTime:
"""Tests for the suds.sax.date.Time class."""
def testConstructFromTime(self):
time = datetime.time(1, 1)
assert Time(time).value is time
time.replace(tzinfo=UtcTimezone())
assert Time(time).value is time
@pytest.mark.parametrize(("string", "h", "m", "s", "micros"), (
("10:59:47", 10, 59, 47, 0),
("9:9:13", 9, 9, 13, 0),
("18:0:09.2139", 18, 0, 9, 213900),
("18:0:09.02139", 18, 0, 9, 21390),
("18:0:09.002139", 18, 0, 9, 2139),
("0:00:00.00013", 0, 0, 0, 130),
("0:00:00.000001", 0, 0, 0, 1),
("0:00:00.000000", 0, 0, 0, 0),
("23:59:6.999999", 23, 59, 6, 999999),
("1:13:50.0", 1, 13, 50, 0)))
def testConstructFromString(self, string, h, m, s, micros):
assert Time(string).value == datetime.time(h, m, s, micros)
@pytest.mark.parametrize("string", _invalid_time_strings)
def testConstructFromString_failure(self, string):
pytest.raises(ValueError, Time, string)
@pytest.mark.parametrize(("string", "h", "m", "s", "micros"), (
("0:0:0.0000000", 0, 0, 0, 0),
("0:0:0.0000001", 0, 0, 0, 0),
("0:0:0.0000004", 0, 0, 0, 0),
("0:0:0.0000005", 0, 0, 0, 1),
("0:0:0.0000006", 0, 0, 0, 1),
("0:0:0.0000009", 0, 0, 0, 1),
("0:0:0.5", 0, 0, 0, 500000),
("0:0:0.5000004", 0, 0, 0, 500000),
("0:0:0.5000005", 0, 0, 0, 500001),
("0:0:0.50000050", 0, 0, 0, 500001),
("0:0:0.50000051", 0, 0, 0, 500001),
("0:0:0.50000055", 0, 0, 0, 500001),
("0:0:0.50000059", 0, 0, 0, 500001),
("0:0:0.5000006", 0, 0, 0, 500001),
("0:0:0.9999990", 0, 0, 0, 999999),
("0:0:0.9999991", 0, 0, 0, 999999),
("0:0:0.9999994", 0, 0, 0, 999999),
("0:0:0.99999949", 0, 0, 0, 999999),
("0:0:0.9999995", 0, 0, 1, 0),
("0:0:0.9999996", 0, 0, 1, 0),
("0:0:0.9999999", 0, 0, 1, 0)))
def testConstructFromString_subsecondRounding(self, string, h, m, s,
micros):
assert Time(string).value == datetime.time(h, m, s, micros)
@pytest.mark.parametrize(
("string", "h", "m", "s", "micros", "tz_h", "tz_m"), (
("18:0:09.2139z", 18, 0, 9, 213900, 0, 0),
("18:0:09.2139Z", 18, 0, 9, 213900, 0, 0),
("18:0:09.2139+3", 18, 0, 9, 213900, 3, 0),
("18:0:09.2139-3", 18, 0, 9, 213900, -3, 0),
("18:0:09.2139-03", 18, 0, 9, 213900, -3, 0),
("18:0:09.2139+9:3", 18, 0, 9, 213900, 9, 3),
("18:0:09.2139+10:31", 18, 0, 9, 213900, 10, 31),
("18:0:09.2139-10:31", 18, 0, 9, 213900, -10, -31)))
def testConstructFromString_timezone(self, string, h, m, s, micros, tz_h,
tz_m):
tzdelta = datetime.timedelta(hours=tz_h, minutes=tz_m)
tzinfo = FixedOffsetTimezone(tzdelta)
ref = datetime.time(h, m, s, micros, tzinfo=tzinfo)
assert Time(string).value == ref
@pytest.mark.parametrize("source", (
None,
object(),
_Dummy(),
datetime.date(2010, 10, 27),
datetime.datetime(2010, 10, 27, 10, 10)))
def testConstructFromUnknown(self, source):
pytest.raises(ValueError, Time, source)
@pytest.mark.parametrize(("input", "output"), (
("14:05:23.428068", "14:05:23.428068"),
("14:05:23.4280", "14:05:23.428000"),
("23:59:59.9999995", "00:00:00"),
("14:05:23.428068-3", "14:05:23.428068-03:00"),
("14:05:23.068+03", "14:05:23.068000+03:00"),
("14:05:23.4-02:00", "14:05:23.400000-02:00"),
("14:05:23.410+02:00", "14:05:23.410000+02:00"),
("14:05:23.428-23:59", "14:05:23.428000-23:59")))
def testConvertToString(self, input, output):
assert str(Time(input)) == output
class TestXDate:
"""
Tests for the suds.xsd.sxbuiltin.XDate class.
Python object <--> string conversion details already tested in TestDate.
"""
def testTranslateEmptyStringToPythonObject(self):
assert XDate.translate("") == None
def testTranslateStringToPythonObject(self):
assert XDate.translate("1941-12-7") == datetime.date(1941, 12, 7)
def testTranslatePythonObjectToString(self):
date = datetime.date(2013, 7, 24)
translated = XDate.translate(date, topython=False)
assert isinstance(translated, str)
assert translated == "2013-07-24"
def testTranslatePythonObjectToString_datetime(self):
dt = datetime.datetime(2013, 7, 24, 11, 59, 4)
translated = XDate.translate(dt, topython=False)
assert isinstance(translated, str)
assert translated == "2013-07-24"
@pytest.mark.parametrize("source", (
None,
object(),
_Dummy(),
datetime.time()))
def testTranslatePythonObjectToString_failed(self, source):
assert XDate.translate(source, topython=False) is source
class TestXDateTime:
"""
Tests for the suds.xsd.sxbuiltin.XDateTime class.
Python object <--> string conversion details already tested in
TestDateTime.
"""
def testTranslateEmptyStringToPythonObject(self):
assert XDateTime.translate("") == None
def testTranslateStringToPythonObject(self):
dt = datetime.datetime(1941, 12, 7, 10, 30, 22, 454000)
assert XDateTime.translate("1941-12-7T10:30:22.454") == dt
def testTranslatePythonObjectToString(self):
dt = datetime.datetime(2021, 12, 31, 11, 25, tzinfo=UtcTimezone())
translated = XDateTime.translate(dt, topython=False)
assert isinstance(translated, str)
assert translated == "2021-12-31T11:25:00+00:00"
@pytest.mark.parametrize("source", (
None,
object(),
_Dummy(),
datetime.time(22, 47, 9, 981),
datetime.date(2101, 1, 1)))
def testTranslatePythonObjectToString_failed(self, source):
assert XDateTime.translate(source, topython=False) is source
class TestXTime:
"""
Tests for the suds.xsd.sxbuiltin.XTime class.
Python object <--> string conversion details already tested in
TestDateTime.
"""
def testTranslateEmptyStringToPythonObject(self):
assert XTime.translate("") == None
def testTranslateStringToPythonObject(self):
assert XTime.translate("10:30:22") == datetime.time(10, 30, 22)
def testTranslatePythonObjectToString(self):
time = datetime.time(16, 53, 12, tzinfo=FixedOffsetTimezone(4))
translated = XTime.translate(time, topython=False)
assert isinstance(translated, str)
assert translated == "16:53:12+04:00"
@pytest.mark.parametrize("source", (
None,
object(),
_Dummy(),
datetime.date(2101, 1, 1),
datetime.datetime(2101, 1, 1, 22, 47, 9, 981)))
def testTranslatePythonObjectToString_failed(self, source):
assert XTime.translate(source, topython=False) is source
| lgpl-3.0 | 1,870,702,405,981,344,800 | 34.622881 | 79 | 0.555073 | false |
STIXProject/python-stix | stix/ttp/infrastructure.py | 1 | 3544 | # Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
# mixbox
from mixbox import fields
# cybox
from cybox.core import Observables
# internal
import stix
from stix.common import StructuredTextList, VocabString
from stix.common.vocabs import AttackerInfrastructureType
import stix.bindings.ttp as ttp_binding
from mixbox import fields, entities
class Infrastructure(stix.Entity):
_binding = ttp_binding
_binding_class = _binding.InfrastructureType
_namespace = "http://stix.mitre.org/TTP-1"
id_ = fields.IdField("id")
idref = fields.IdrefField("idref")
title = fields.TypedField("Title")
descriptions = fields.TypedField("Description", StructuredTextList)
short_descriptions = fields.TypedField("Short_Description", StructuredTextList)
types = fields.TypedField("Type", VocabString, multiple=True, key_name="types")
observable_characterization = fields.TypedField("Observable_Characterization", Observables)
def __init__(self, id_=None, idref=None, title=None, description=None, short_description=None):
super(Infrastructure, self).__init__()
self.id_ = id_
self.idref = idref
self.title = title
self.description = StructuredTextList(description)
self.short_description = StructuredTextList(short_description)
@property
def description(self):
"""A single description about the contents or purpose of this object.
Default Value: ``None``
Note:
If this object has more than one description set, this will return
the description with the lowest ordinality value.
Returns:
An instance of :class:`.StructuredText`
"""
if self.descriptions is None:
self.descriptions = StructuredTextList()
return next(iter(self.descriptions), None)
@description.setter
def description(self, value):
self.descriptions = StructuredTextList(value)
def add_description(self, description):
"""Adds a description to the ``descriptions`` collection.
This is the same as calling "foo.descriptions.add(bar)".
"""
self.descriptions.add(description)
@property
def short_description(self):
"""A single short description about the contents or purpose of this
object.
Default Value: ``None``
Note:
If this object has more than one short description set, this will
return the description with the lowest ordinality value.
Returns:
An instance of :class:`.StructuredText`
"""
if self.short_descriptions is None:
self.short_descriptions = StructuredTextList()
return next(iter(self.short_descriptions), None)
@short_description.setter
def short_description(self, value):
self.short_descriptions = value
def add_short_description(self, description):
"""Adds a description to the ``short_descriptions`` collection.
This is the same as calling "foo.short_descriptions.add(bar)".
"""
self.short_descriptions.add(description)
def add_type(self, type_):
self.types.append(type_)
class InfraStructureTypes(stix.EntityList):
_namespace = "http://stix.mitre.org/TTP-1"
_contained_type = VocabString
@classmethod
def _dict_as_list(cls):
return True
def _fix_value(self, value):
return AttackerInfrastructureType(value)
| bsd-3-clause | -5,174,635,102,382,686,000 | 30.927928 | 99 | 0.670429 | false |
amjames/psi4 | psi4/share/psi4/databases/S66.py | 1 | 148397 | #
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2018 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""
| Database (Hobza) of interaction energies for bimolecular complexes.
| Geometries and reference energies from Rezac et al. JCTC 7 2427 (2011).
- **cp** ``'off'`` || ``'on'``
- **rlxd** ``'off'``
- **subset**
- ``'small'``
- ``'large'``
- ``'HB'`` hydrogen-bonded systems
- ``'MX'`` mixed-influence systems
- ``'DD'`` dispersion-dominated systems
"""
import re
import qcdb
# <<< S66 Database Module >>>
dbse = 'S66'
# <<< Database Members >>>
HRXN = range(1, 67)
HRXN_SM = [1, 12, 59]
HRXN_LG = [26, 34]
HB = range(1, 24)
MX = range(47, 67)
DD = range(24, 47)
# <<< Chemical Systems Involved >>>
RXNM = {} # reaction matrix of reagent contributions per reaction
ACTV = {} # order of active reagents per reaction
ACTV_CP = {} # order of active reagents per counterpoise-corrected reaction
ACTV_SA = {} # order of active reagents for non-supermolecular calculations
for rxn in HRXN:
RXNM[ '%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn) : +1,
'%s-%s-monoA-CP' % (dbse, rxn) : -1,
'%s-%s-monoB-CP' % (dbse, rxn) : -1,
'%s-%s-monoA-unCP' % (dbse, rxn) : -1,
'%s-%s-monoB-unCP' % (dbse, rxn) : -1 }
ACTV_SA['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
ACTV_CP['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn),
'%s-%s-monoB-CP' % (dbse, rxn) ]
ACTV[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn),
'%s-%s-monoB-unCP' % (dbse, rxn) ]
# <<< Reference Values [kcal/mol] >>>
BIND = {}
BIND['%s-%s' % (dbse, '1' )] = -4.918
BIND['%s-%s' % (dbse, '2' )] = -5.592
BIND['%s-%s' % (dbse, '3' )] = -6.908
BIND['%s-%s' % (dbse, '4' )] = -8.103
BIND['%s-%s' % (dbse, '5' )] = -5.757
BIND['%s-%s' % (dbse, '6' )] = -7.554
BIND['%s-%s' % (dbse, '7' )] = -8.230
BIND['%s-%s' % (dbse, '8' )] = -5.009
BIND['%s-%s' % (dbse, '9' )] = -3.059
BIND['%s-%s' % (dbse, '10' )] = -4.160
BIND['%s-%s' % (dbse, '11' )] = -5.419
BIND['%s-%s' % (dbse, '12' )] = -7.266
BIND['%s-%s' % (dbse, '13' )] = -6.187
BIND['%s-%s' % (dbse, '14' )] = -7.454
BIND['%s-%s' % (dbse, '15' )] = -8.630
BIND['%s-%s' % (dbse, '16' )] = -5.124
BIND['%s-%s' % (dbse, '17' )] = -17.182
BIND['%s-%s' % (dbse, '18' )] = -6.857
BIND['%s-%s' % (dbse, '19' )] = -7.410
BIND['%s-%s' % (dbse, '20' )] = -19.093
BIND['%s-%s' % (dbse, '21' )] = -16.265
BIND['%s-%s' % (dbse, '22' )] = -19.491
BIND['%s-%s' % (dbse, '23' )] = -19.189
BIND['%s-%s' % (dbse, '24' )] = -2.822
BIND['%s-%s' % (dbse, '25' )] = -3.895
BIND['%s-%s' % (dbse, '26' )] = -9.829
BIND['%s-%s' % (dbse, '27' )] = -3.439
BIND['%s-%s' % (dbse, '28' )] = -5.713
BIND['%s-%s' % (dbse, '29' )] = -6.819
BIND['%s-%s' % (dbse, '30' )] = -1.432
BIND['%s-%s' % (dbse, '31' )] = -3.380
BIND['%s-%s' % (dbse, '32' )] = -3.738
BIND['%s-%s' % (dbse, '33' )] = -1.872
BIND['%s-%s' % (dbse, '34' )] = -3.776
BIND['%s-%s' % (dbse, '35' )] = -2.613
BIND['%s-%s' % (dbse, '36' )] = -1.777
BIND['%s-%s' % (dbse, '37' )] = -2.404
BIND['%s-%s' % (dbse, '38' )] = -2.997
BIND['%s-%s' % (dbse, '39' )] = -3.575
BIND['%s-%s' % (dbse, '40' )] = -2.895
BIND['%s-%s' % (dbse, '41' )] = -4.848
BIND['%s-%s' % (dbse, '42' )] = -4.138
BIND['%s-%s' % (dbse, '43' )] = -3.712
BIND['%s-%s' % (dbse, '44' )] = -2.005
BIND['%s-%s' % (dbse, '45' )] = -1.748
BIND['%s-%s' % (dbse, '46' )] = -4.264
BIND['%s-%s' % (dbse, '47' )] = -2.876
BIND['%s-%s' % (dbse, '48' )] = -3.535
BIND['%s-%s' % (dbse, '49' )] = -3.331
BIND['%s-%s' % (dbse, '50' )] = -2.867
BIND['%s-%s' % (dbse, '51' )] = -1.524
BIND['%s-%s' % (dbse, '52' )] = -4.707
BIND['%s-%s' % (dbse, '53' )] = -4.361
BIND['%s-%s' % (dbse, '54' )] = -3.277
BIND['%s-%s' % (dbse, '55' )] = -4.188
BIND['%s-%s' % (dbse, '56' )] = -3.231
BIND['%s-%s' % (dbse, '57' )] = -5.282
BIND['%s-%s' % (dbse, '58' )] = -4.146
BIND['%s-%s' % (dbse, '59' )] = -2.850
BIND['%s-%s' % (dbse, '60' )] = -4.868
BIND['%s-%s' % (dbse, '61' )] = -2.912
BIND['%s-%s' % (dbse, '62' )] = -3.534
BIND['%s-%s' % (dbse, '63' )] = -3.801
BIND['%s-%s' % (dbse, '64' )] = -2.999
BIND['%s-%s' % (dbse, '65' )] = -3.991
BIND['%s-%s' % (dbse, '66' )] = -3.968
# <<< Comment Lines >>>
TAGL = {}
TAGL['%s-%s' % (dbse, '1' )] = """Water Dimer """
TAGL['%s-%s-dimer' % (dbse, '1' )] = """Dimer from Water Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '1' )] = """Monomer A from Water Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '1' )] = """Monomer B from Water Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '1' )] = """Monomer A from Water Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '1' )] = """Monomer B from Water Dimer """
TAGL['%s-%s' % (dbse, '2' )] = """Water-Methanol """
TAGL['%s-%s-dimer' % (dbse, '2' )] = """Dimer from Water-Methanol """
TAGL['%s-%s-monoA-CP' % (dbse, '2' )] = """Monomer A from Water-Methanol """
TAGL['%s-%s-monoB-CP' % (dbse, '2' )] = """Monomer B from Water-Methanol """
TAGL['%s-%s-monoA-unCP' % (dbse, '2' )] = """Monomer A from Water-Methanol """
TAGL['%s-%s-monoB-unCP' % (dbse, '2' )] = """Monomer B from Water-Methanol """
TAGL['%s-%s' % (dbse, '3' )] = """Water-Methylamine """
TAGL['%s-%s-dimer' % (dbse, '3' )] = """Dimer from Water-Methylamine """
TAGL['%s-%s-monoA-CP' % (dbse, '3' )] = """Monomer A from Water-Methylamine """
TAGL['%s-%s-monoB-CP' % (dbse, '3' )] = """Monomer B from Water-Methylamine """
TAGL['%s-%s-monoA-unCP' % (dbse, '3' )] = """Monomer A from Water-Methylamine """
TAGL['%s-%s-monoB-unCP' % (dbse, '3' )] = """Monomer B from Water-Methylamine """
TAGL['%s-%s' % (dbse, '4' )] = """Water-N-methylacetamide """
TAGL['%s-%s-dimer' % (dbse, '4' )] = """Dimer from Water-N-methylacetamide """
TAGL['%s-%s-monoA-CP' % (dbse, '4' )] = """Monomer A from Water-N-methylacetamide """
TAGL['%s-%s-monoB-CP' % (dbse, '4' )] = """Monomer B from Water-N-methylacetamide """
TAGL['%s-%s-monoA-unCP' % (dbse, '4' )] = """Monomer A from Water-N-methylacetamide """
TAGL['%s-%s-monoB-unCP' % (dbse, '4' )] = """Monomer B from Water-N-methylacetamide """
TAGL['%s-%s' % (dbse, '5' )] = """Methanol Dimer """
TAGL['%s-%s-dimer' % (dbse, '5' )] = """Dimer from Methanol Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '5' )] = """Monomer A from Methanol Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '5' )] = """Monomer B from Methanol Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '5' )] = """Monomer A from Methanol Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '5' )] = """Monomer B from Methanol Dimer """
TAGL['%s-%s' % (dbse, '6' )] = """Methanol-Methylamine """
TAGL['%s-%s-dimer' % (dbse, '6' )] = """Dimer from Methanol-Methylamine """
TAGL['%s-%s-monoA-CP' % (dbse, '6' )] = """Monomer A from Methanol-Methylamine """
TAGL['%s-%s-monoB-CP' % (dbse, '6' )] = """Monomer B from Methanol-Methylamine """
TAGL['%s-%s-monoA-unCP' % (dbse, '6' )] = """Monomer A from Methanol-Methylamine """
TAGL['%s-%s-monoB-unCP' % (dbse, '6' )] = """Monomer B from Methanol-Methylamine """
TAGL['%s-%s' % (dbse, '7' )] = """Methanol-N-methylacetamide """
TAGL['%s-%s-dimer' % (dbse, '7' )] = """Dimer from Methanol-N-methylacetamide """
TAGL['%s-%s-monoA-CP' % (dbse, '7' )] = """Monomer A from Methanol-N-methylacetamide """
TAGL['%s-%s-monoB-CP' % (dbse, '7' )] = """Monomer B from Methanol-N-methylacetamide """
TAGL['%s-%s-monoA-unCP' % (dbse, '7' )] = """Monomer A from Methanol-N-methylacetamide """
TAGL['%s-%s-monoB-unCP' % (dbse, '7' )] = """Monomer B from Methanol-N-methylacetamide """
TAGL['%s-%s' % (dbse, '8' )] = """Methanol-Water """
TAGL['%s-%s-dimer' % (dbse, '8' )] = """Dimer from Methanol-Water """
TAGL['%s-%s-monoA-CP' % (dbse, '8' )] = """Monomer A from Methanol-Water """
TAGL['%s-%s-monoB-CP' % (dbse, '8' )] = """Monomer B from Methanol-Water """
TAGL['%s-%s-monoA-unCP' % (dbse, '8' )] = """Monomer A from Methanol-Water """
TAGL['%s-%s-monoB-unCP' % (dbse, '8' )] = """Monomer B from Methanol-Water """
TAGL['%s-%s' % (dbse, '9' )] = """Methylamine-Methanol """
TAGL['%s-%s-dimer' % (dbse, '9' )] = """Dimer from Methylamine-Methanol """
TAGL['%s-%s-monoA-CP' % (dbse, '9' )] = """Monomer A from Methylamine-Methanol """
TAGL['%s-%s-monoB-CP' % (dbse, '9' )] = """Monomer B from Methylamine-Methanol """
TAGL['%s-%s-monoA-unCP' % (dbse, '9' )] = """Monomer A from Methylamine-Methanol """
TAGL['%s-%s-monoB-unCP' % (dbse, '9' )] = """Monomer B from Methylamine-Methanol """
TAGL['%s-%s' % (dbse, '10' )] = """Methylamine Dimer """
TAGL['%s-%s-dimer' % (dbse, '10' )] = """Dimer from Methylamine Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '10' )] = """Monomer A from Methylamine Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '10' )] = """Monomer B from Methylamine Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '10' )] = """Monomer A from Methylamine Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '10' )] = """Monomer B from Methylamine Dimer """
TAGL['%s-%s' % (dbse, '11' )] = """Methylamine-N-methylacetamide """
TAGL['%s-%s-dimer' % (dbse, '11' )] = """Dimer from Methylamine-N-methylacetamide """
TAGL['%s-%s-monoA-CP' % (dbse, '11' )] = """Monomer A from Methylamine-N-methylacetamide """
TAGL['%s-%s-monoB-CP' % (dbse, '11' )] = """Monomer B from Methylamine-N-methylacetamide """
TAGL['%s-%s-monoA-unCP' % (dbse, '11' )] = """Monomer A from Methylamine-N-methylacetamide """
TAGL['%s-%s-monoB-unCP' % (dbse, '11' )] = """Monomer B from Methylamine-N-methylacetamide """
TAGL['%s-%s' % (dbse, '12' )] = """Methylamine-Water """
TAGL['%s-%s-dimer' % (dbse, '12' )] = """Dimer from Methylamine-Water """
TAGL['%s-%s-monoA-CP' % (dbse, '12' )] = """Monomer A from Methylamine-Water """
TAGL['%s-%s-monoB-CP' % (dbse, '12' )] = """Monomer B from Methylamine-Water """
TAGL['%s-%s-monoA-unCP' % (dbse, '12' )] = """Monomer A from Methylamine-Water """
TAGL['%s-%s-monoB-unCP' % (dbse, '12' )] = """Monomer B from Methylamine-Water """
TAGL['%s-%s' % (dbse, '13' )] = """N-methylacetamide-Methanol """
TAGL['%s-%s-dimer' % (dbse, '13' )] = """Dimer from N-methylacetamide-Methanol """
TAGL['%s-%s-monoA-CP' % (dbse, '13' )] = """Monomer A from N-methylacetamide-Methanol """
TAGL['%s-%s-monoB-CP' % (dbse, '13' )] = """Monomer B from N-methylacetamide-Methanol """
TAGL['%s-%s-monoA-unCP' % (dbse, '13' )] = """Monomer A from N-methylacetamide-Methanol """
TAGL['%s-%s-monoB-unCP' % (dbse, '13' )] = """Monomer B from N-methylacetamide-Methanol """
TAGL['%s-%s' % (dbse, '14' )] = """N-methylacetamide-Methylamine """
TAGL['%s-%s-dimer' % (dbse, '14' )] = """Dimer from N-methylacetamide-Methylamine """
TAGL['%s-%s-monoA-CP' % (dbse, '14' )] = """Monomer A from N-methylacetamide-Methylamine """
TAGL['%s-%s-monoB-CP' % (dbse, '14' )] = """Monomer B from N-methylacetamide-Methylamine """
TAGL['%s-%s-monoA-unCP' % (dbse, '14' )] = """Monomer A from N-methylacetamide-Methylamine """
TAGL['%s-%s-monoB-unCP' % (dbse, '14' )] = """Monomer B from N-methylacetamide-Methylamine """
TAGL['%s-%s' % (dbse, '15' )] = """N-methylacetamide Dimer """
TAGL['%s-%s-dimer' % (dbse, '15' )] = """Dimer from N-methylacetamide Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '15' )] = """Monomer A from N-methylacetamide Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '15' )] = """Monomer B from N-methylacetamide Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '15' )] = """Monomer A from N-methylacetamide Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '15' )] = """Monomer B from N-methylacetamide Dimer """
TAGL['%s-%s' % (dbse, '16' )] = """N-methylacetamide-Water """
TAGL['%s-%s-dimer' % (dbse, '16' )] = """Dimer from N-methylacetamide-Water """
TAGL['%s-%s-monoA-CP' % (dbse, '16' )] = """Monomer A from N-methylacetamide-Water """
TAGL['%s-%s-monoB-CP' % (dbse, '16' )] = """Monomer B from N-methylacetamide-Water """
TAGL['%s-%s-monoA-unCP' % (dbse, '16' )] = """Monomer A from N-methylacetamide-Water """
TAGL['%s-%s-monoB-unCP' % (dbse, '16' )] = """Monomer B from N-methylacetamide-Water """
TAGL['%s-%s' % (dbse, '17' )] = """Uracil Dimer, HB """
TAGL['%s-%s-dimer' % (dbse, '17' )] = """Dimer from Uracil Dimer, HB """
TAGL['%s-%s-monoA-CP' % (dbse, '17' )] = """Monomer A from Uracil Dimer, HB """
TAGL['%s-%s-monoB-CP' % (dbse, '17' )] = """Monomer B from Uracil Dimer, HB """
TAGL['%s-%s-monoA-unCP' % (dbse, '17' )] = """Monomer A from Uracil Dimer, HB """
TAGL['%s-%s-monoB-unCP' % (dbse, '17' )] = """Monomer B from Uracil Dimer, HB """
TAGL['%s-%s' % (dbse, '18' )] = """Water-Pyridine """
TAGL['%s-%s-dimer' % (dbse, '18' )] = """Dimer from Water-Pyridine """
TAGL['%s-%s-monoA-CP' % (dbse, '18' )] = """Monomer A from Water-Pyridine """
TAGL['%s-%s-monoB-CP' % (dbse, '18' )] = """Monomer B from Water-Pyridine """
TAGL['%s-%s-monoA-unCP' % (dbse, '18' )] = """Monomer A from Water-Pyridine """
TAGL['%s-%s-monoB-unCP' % (dbse, '18' )] = """Monomer B from Water-Pyridine """
TAGL['%s-%s' % (dbse, '19' )] = """Methanol-Pyridine """
TAGL['%s-%s-dimer' % (dbse, '19' )] = """Dimer from Methanol-Pyridine """
TAGL['%s-%s-monoA-CP' % (dbse, '19' )] = """Monomer A from Methanol-Pyridine """
TAGL['%s-%s-monoB-CP' % (dbse, '19' )] = """Monomer B from Methanol-Pyridine """
TAGL['%s-%s-monoA-unCP' % (dbse, '19' )] = """Monomer A from Methanol-Pyridine """
TAGL['%s-%s-monoB-unCP' % (dbse, '19' )] = """Monomer B from Methanol-Pyridine """
TAGL['%s-%s' % (dbse, '20' )] = """Acetic Acid Dimer """
TAGL['%s-%s-dimer' % (dbse, '20' )] = """Dimer from Acetic Acid Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '20' )] = """Monomer A from Acetic Acid Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '20' )] = """Monomer B from Acetic Acid Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '20' )] = """Monomer A from Acetic Acid Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '20' )] = """Monomer B from Acetic Acid Dimer """
TAGL['%s-%s' % (dbse, '21' )] = """Acetamide Dimer """
TAGL['%s-%s-dimer' % (dbse, '21' )] = """Dimer from Acetamide Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '21' )] = """Monomer A from Acetamide Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '21' )] = """Monomer B from Acetamide Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '21' )] = """Monomer A from Acetamide Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '21' )] = """Monomer B from Acetamide Dimer """
TAGL['%s-%s' % (dbse, '22' )] = """Acetic Acid-Uracil """
TAGL['%s-%s-dimer' % (dbse, '22' )] = """Dimer from Acetic Acid-Uracil """
TAGL['%s-%s-monoA-CP' % (dbse, '22' )] = """Monomer A from Acetic Acid-Uracil """
TAGL['%s-%s-monoB-CP' % (dbse, '22' )] = """Monomer B from Acetic Acid-Uracil """
TAGL['%s-%s-monoA-unCP' % (dbse, '22' )] = """Monomer A from Acetic Acid-Uracil """
TAGL['%s-%s-monoB-unCP' % (dbse, '22' )] = """Monomer B from Acetic Acid-Uracil """
TAGL['%s-%s' % (dbse, '23' )] = """Acetamide-Uracil """
TAGL['%s-%s-dimer' % (dbse, '23' )] = """Dimer from Acetamide-Uracil """
TAGL['%s-%s-monoA-CP' % (dbse, '23' )] = """Monomer A from Acetamide-Uracil """
TAGL['%s-%s-monoB-CP' % (dbse, '23' )] = """Monomer B from Acetamide-Uracil """
TAGL['%s-%s-monoA-unCP' % (dbse, '23' )] = """Monomer A from Acetamide-Uracil """
TAGL['%s-%s-monoB-unCP' % (dbse, '23' )] = """Monomer B from Acetamide-Uracil """
TAGL['%s-%s' % (dbse, '24' )] = """Benzene Dimer, pi-pi """
TAGL['%s-%s-dimer' % (dbse, '24' )] = """Dimer from Benzene Dimer, pi-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '24' )] = """Monomer A from Benzene Dimer, pi-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '24' )] = """Monomer B from Benzene Dimer, pi-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '24' )] = """Monomer A from Benzene Dimer, pi-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '24' )] = """Monomer B from Benzene Dimer, pi-pi """
TAGL['%s-%s' % (dbse, '25' )] = """Pyridine Dimer, pi-pi """
TAGL['%s-%s-dimer' % (dbse, '25' )] = """Dimer from Pyridine Dimer, pi-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '25' )] = """Monomer A from Pyridine Dimer, pi-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '25' )] = """Monomer B from Pyridine Dimer, pi-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '25' )] = """Monomer A from Pyridine Dimer, pi-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '25' )] = """Monomer B from Pyridine Dimer, pi-pi """
TAGL['%s-%s' % (dbse, '26' )] = """Uracil Dimer, pi-pi """
TAGL['%s-%s-dimer' % (dbse, '26' )] = """Dimer from Uracil Dimer, pi-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '26' )] = """Monomer A from Uracil Dimer, pi-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '26' )] = """Monomer B from Uracil Dimer, pi-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '26' )] = """Monomer A from Uracil Dimer, pi-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '26' )] = """Monomer B from Uracil Dimer, pi-pi """
TAGL['%s-%s' % (dbse, '27' )] = """Benzene-Pyridine, pi-pi """
TAGL['%s-%s-dimer' % (dbse, '27' )] = """Dimer from Benzene-Pyridine, pi-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '27' )] = """Monomer A from Benzene-Pyridine, pi-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '27' )] = """Monomer B from Benzene-Pyridine, pi-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '27' )] = """Monomer A from Benzene-Pyridine, pi-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '27' )] = """Monomer B from Benzene-Pyridine, pi-pi """
TAGL['%s-%s' % (dbse, '28' )] = """Benzene-Uracil, pi-pi """
TAGL['%s-%s-dimer' % (dbse, '28' )] = """Dimer from Benzene-Uracil, pi-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '28' )] = """Monomer A from Benzene-Uracil, pi-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '28' )] = """Monomer B from Benzene-Uracil, pi-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '28' )] = """Monomer A from Benzene-Uracil, pi-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '28' )] = """Monomer B from Benzene-Uracil, pi-pi """
TAGL['%s-%s' % (dbse, '29' )] = """Pyridine-Uracil, pi-pi """
TAGL['%s-%s-dimer' % (dbse, '29' )] = """Dimer from Pyridine-Uracil, pi-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '29' )] = """Monomer A from Pyridine-Uracil, pi-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '29' )] = """Monomer B from Pyridine-Uracil, pi-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '29' )] = """Monomer A from Pyridine-Uracil, pi-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '29' )] = """Monomer B from Pyridine-Uracil, pi-pi """
TAGL['%s-%s' % (dbse, '30' )] = """Benzene-Ethene """
TAGL['%s-%s-dimer' % (dbse, '30' )] = """Dimer from Benzene-Ethene """
TAGL['%s-%s-monoA-CP' % (dbse, '30' )] = """Monomer A from Benzene-Ethene """
TAGL['%s-%s-monoB-CP' % (dbse, '30' )] = """Monomer B from Benzene-Ethene """
TAGL['%s-%s-monoA-unCP' % (dbse, '30' )] = """Monomer A from Benzene-Ethene """
TAGL['%s-%s-monoB-unCP' % (dbse, '30' )] = """Monomer B from Benzene-Ethene """
TAGL['%s-%s' % (dbse, '31' )] = """Uracil-Ethene """
TAGL['%s-%s-dimer' % (dbse, '31' )] = """Dimer from Uracil-Ethene """
TAGL['%s-%s-monoA-CP' % (dbse, '31' )] = """Monomer A from Uracil-Ethene """
TAGL['%s-%s-monoB-CP' % (dbse, '31' )] = """Monomer B from Uracil-Ethene """
TAGL['%s-%s-monoA-unCP' % (dbse, '31' )] = """Monomer A from Uracil-Ethene """
TAGL['%s-%s-monoB-unCP' % (dbse, '31' )] = """Monomer B from Uracil-Ethene """
TAGL['%s-%s' % (dbse, '32' )] = """Uracil-Ethyne """
TAGL['%s-%s-dimer' % (dbse, '32' )] = """Dimer from Uracil-Ethyne """
TAGL['%s-%s-monoA-CP' % (dbse, '32' )] = """Monomer A from Uracil-Ethyne """
TAGL['%s-%s-monoB-CP' % (dbse, '32' )] = """Monomer B from Uracil-Ethyne """
TAGL['%s-%s-monoA-unCP' % (dbse, '32' )] = """Monomer A from Uracil-Ethyne """
TAGL['%s-%s-monoB-unCP' % (dbse, '32' )] = """Monomer B from Uracil-Ethyne """
TAGL['%s-%s' % (dbse, '33' )] = """Pyridine-Ethene """
TAGL['%s-%s-dimer' % (dbse, '33' )] = """Dimer from Pyridine-Ethene """
TAGL['%s-%s-monoA-CP' % (dbse, '33' )] = """Monomer A from Pyridine-Ethene """
TAGL['%s-%s-monoB-CP' % (dbse, '33' )] = """Monomer B from Pyridine-Ethene """
TAGL['%s-%s-monoA-unCP' % (dbse, '33' )] = """Monomer A from Pyridine-Ethene """
TAGL['%s-%s-monoB-unCP' % (dbse, '33' )] = """Monomer B from Pyridine-Ethene """
TAGL['%s-%s' % (dbse, '34' )] = """Pentane Dimer """
TAGL['%s-%s-dimer' % (dbse, '34' )] = """Dimer from Pentane Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '34' )] = """Monomer A from Pentane Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '34' )] = """Monomer B from Pentane Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '34' )] = """Monomer A from Pentane Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '34' )] = """Monomer B from Pentane Dimer """
TAGL['%s-%s' % (dbse, '35' )] = """Neopentane-Pentane """
TAGL['%s-%s-dimer' % (dbse, '35' )] = """Dimer from Neopentane-Pentane """
TAGL['%s-%s-monoA-CP' % (dbse, '35' )] = """Monomer A from Neopentane-Pentane """
TAGL['%s-%s-monoB-CP' % (dbse, '35' )] = """Monomer B from Neopentane-Pentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '35' )] = """Monomer A from Neopentane-Pentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '35' )] = """Monomer B from Neopentane-Pentane """
TAGL['%s-%s' % (dbse, '36' )] = """Neopentane Dimer """
TAGL['%s-%s-dimer' % (dbse, '36' )] = """Dimer from Neopentane Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '36' )] = """Monomer A from Neopentane Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '36' )] = """Monomer B from Neopentane Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '36' )] = """Monomer A from Neopentane Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '36' )] = """Monomer B from Neopentane Dimer """
TAGL['%s-%s' % (dbse, '37' )] = """Cyclopentane-Neopentane """
TAGL['%s-%s-dimer' % (dbse, '37' )] = """Dimer from Cyclopentane-Neopentane """
TAGL['%s-%s-monoA-CP' % (dbse, '37' )] = """Monomer A from Cyclopentane-Neopentane """
TAGL['%s-%s-monoB-CP' % (dbse, '37' )] = """Monomer B from Cyclopentane-Neopentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '37' )] = """Monomer A from Cyclopentane-Neopentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '37' )] = """Monomer B from Cyclopentane-Neopentane """
TAGL['%s-%s' % (dbse, '38' )] = """Cyclopentane Dimer """
TAGL['%s-%s-dimer' % (dbse, '38' )] = """Dimer from Cyclopentane Dimer """
TAGL['%s-%s-monoA-CP' % (dbse, '38' )] = """Monomer A from Cyclopentane Dimer """
TAGL['%s-%s-monoB-CP' % (dbse, '38' )] = """Monomer B from Cyclopentane Dimer """
TAGL['%s-%s-monoA-unCP' % (dbse, '38' )] = """Monomer A from Cyclopentane Dimer """
TAGL['%s-%s-monoB-unCP' % (dbse, '38' )] = """Monomer B from Cyclopentane Dimer """
TAGL['%s-%s' % (dbse, '39' )] = """Benzene-Cyclopentane """
TAGL['%s-%s-dimer' % (dbse, '39' )] = """Dimer from Benzene-Cyclopentane """
TAGL['%s-%s-monoA-CP' % (dbse, '39' )] = """Monomer A from Benzene-Cyclopentane """
TAGL['%s-%s-monoB-CP' % (dbse, '39' )] = """Monomer B from Benzene-Cyclopentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '39' )] = """Monomer A from Benzene-Cyclopentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '39' )] = """Monomer B from Benzene-Cyclopentane """
TAGL['%s-%s' % (dbse, '40' )] = """Benzene-Neopentane """
TAGL['%s-%s-dimer' % (dbse, '40' )] = """Dimer from Benzene-Neopentane """
TAGL['%s-%s-monoA-CP' % (dbse, '40' )] = """Monomer A from Benzene-Neopentane """
TAGL['%s-%s-monoB-CP' % (dbse, '40' )] = """Monomer B from Benzene-Neopentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '40' )] = """Monomer A from Benzene-Neopentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '40' )] = """Monomer B from Benzene-Neopentane """
TAGL['%s-%s' % (dbse, '41' )] = """Uracil-Pentane """
TAGL['%s-%s-dimer' % (dbse, '41' )] = """Dimer from Uracil-Pentane """
TAGL['%s-%s-monoA-CP' % (dbse, '41' )] = """Monomer A from Uracil-Pentane """
TAGL['%s-%s-monoB-CP' % (dbse, '41' )] = """Monomer B from Uracil-Pentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '41' )] = """Monomer A from Uracil-Pentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '41' )] = """Monomer B from Uracil-Pentane """
TAGL['%s-%s' % (dbse, '42' )] = """Uracil-Cyclopentane """
TAGL['%s-%s-dimer' % (dbse, '42' )] = """Dimer from Uracil-Cyclopentane """
TAGL['%s-%s-monoA-CP' % (dbse, '42' )] = """Monomer A from Uracil-Cyclopentane """
TAGL['%s-%s-monoB-CP' % (dbse, '42' )] = """Monomer B from Uracil-Cyclopentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '42' )] = """Monomer A from Uracil-Cyclopentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '42' )] = """Monomer B from Uracil-Cyclopentane """
TAGL['%s-%s' % (dbse, '43' )] = """Uracil-Neopentane """
TAGL['%s-%s-dimer' % (dbse, '43' )] = """Dimer from Uracil-Neopentane """
TAGL['%s-%s-monoA-CP' % (dbse, '43' )] = """Monomer A from Uracil-Neopentane """
TAGL['%s-%s-monoB-CP' % (dbse, '43' )] = """Monomer B from Uracil-Neopentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '43' )] = """Monomer A from Uracil-Neopentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '43' )] = """Monomer B from Uracil-Neopentane """
TAGL['%s-%s' % (dbse, '44' )] = """Ethene-Pentane """
TAGL['%s-%s-dimer' % (dbse, '44' )] = """Dimer from Ethene-Pentane """
TAGL['%s-%s-monoA-CP' % (dbse, '44' )] = """Monomer A from Ethene-Pentane """
TAGL['%s-%s-monoB-CP' % (dbse, '44' )] = """Monomer B from Ethene-Pentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '44' )] = """Monomer A from Ethene-Pentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '44' )] = """Monomer B from Ethene-Pentane """
TAGL['%s-%s' % (dbse, '45' )] = """Ethyne-Pentane """
TAGL['%s-%s-dimer' % (dbse, '45' )] = """Dimer from Ethyne-Pentane """
TAGL['%s-%s-monoA-CP' % (dbse, '45' )] = """Monomer A from Ethyne-Pentane """
TAGL['%s-%s-monoB-CP' % (dbse, '45' )] = """Monomer B from Ethyne-Pentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '45' )] = """Monomer A from Ethyne-Pentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '45' )] = """Monomer B from Ethyne-Pentane """
TAGL['%s-%s' % (dbse, '46' )] = """N-methylacetamide-Pentane """
TAGL['%s-%s-dimer' % (dbse, '46' )] = """Dimer from N-methylacetamide-Pentane """
TAGL['%s-%s-monoA-CP' % (dbse, '46' )] = """Monomer A from N-methylacetamide-Pentane """
TAGL['%s-%s-monoB-CP' % (dbse, '46' )] = """Monomer B from N-methylacetamide-Pentane """
TAGL['%s-%s-monoA-unCP' % (dbse, '46' )] = """Monomer A from N-methylacetamide-Pentane """
TAGL['%s-%s-monoB-unCP' % (dbse, '46' )] = """Monomer B from N-methylacetamide-Pentane """
TAGL['%s-%s' % (dbse, '47' )] = """Benzene Dimer, CH-pi """
TAGL['%s-%s-dimer' % (dbse, '47' )] = """Dimer from Benzene Dimer, CH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '47' )] = """Monomer A from Benzene Dimer, CH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '47' )] = """Monomer B from Benzene Dimer, CH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '47' )] = """Monomer A from Benzene Dimer, CH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '47' )] = """Monomer B from Benzene Dimer, CH-pi """
TAGL['%s-%s' % (dbse, '48' )] = """Pyridine Dimer, CH-pi """
TAGL['%s-%s-dimer' % (dbse, '48' )] = """Dimer from Pyridine Dimer, CH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '48' )] = """Monomer A from Pyridine Dimer, CH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '48' )] = """Monomer B from Pyridine Dimer, CH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '48' )] = """Monomer A from Pyridine Dimer, CH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '48' )] = """Monomer B from Pyridine Dimer, CH-pi """
TAGL['%s-%s' % (dbse, '49' )] = """Benzene-Pyridine, CH-pi """
TAGL['%s-%s-dimer' % (dbse, '49' )] = """Dimer from Benzene-Pyridine, CH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '49' )] = """Monomer A from Benzene-Pyridine, CH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '49' )] = """Monomer B from Benzene-Pyridine, CH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '49' )] = """Monomer A from Benzene-Pyridine, CH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '49' )] = """Monomer B from Benzene-Pyridine, CH-pi """
TAGL['%s-%s' % (dbse, '50' )] = """Benzene-Ethyne, CH-pi """
TAGL['%s-%s-dimer' % (dbse, '50' )] = """Dimer from Benzene-Ethyne, CH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '50' )] = """Monomer A from Benzene-Ethyne, CH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '50' )] = """Monomer B from Benzene-Ethyne, CH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '50' )] = """Monomer A from Benzene-Ethyne, CH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '50' )] = """Monomer B from Benzene-Ethyne, CH-pi """
TAGL['%s-%s' % (dbse, '51' )] = """Ethyne Dimer, CH-pi """
TAGL['%s-%s-dimer' % (dbse, '51' )] = """Dimer from Ethyne Dimer, CH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '51' )] = """Monomer A from Ethyne Dimer, CH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '51' )] = """Monomer B from Ethyne Dimer, CH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '51' )] = """Monomer A from Ethyne Dimer, CH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '51' )] = """Monomer B from Ethyne Dimer, CH-pi """
TAGL['%s-%s' % (dbse, '52' )] = """Benzene-Acetic Acid, OH-pi """
TAGL['%s-%s-dimer' % (dbse, '52' )] = """Dimer from Benzene-Acetic Acid, OH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '52' )] = """Monomer A from Benzene-Acetic Acid, OH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '52' )] = """Monomer B from Benzene-Acetic Acid, OH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '52' )] = """Monomer A from Benzene-Acetic Acid, OH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '52' )] = """Monomer B from Benzene-Acetic Acid, OH-pi """
TAGL['%s-%s' % (dbse, '53' )] = """Benzene-Acetamide, NH-pi """
TAGL['%s-%s-dimer' % (dbse, '53' )] = """Dimer from Benzene-Acetamide, NH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '53' )] = """Monomer A from Benzene-Acetamide, NH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '53' )] = """Monomer B from Benzene-Acetamide, NH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '53' )] = """Monomer A from Benzene-Acetamide, NH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '53' )] = """Monomer B from Benzene-Acetamide, NH-pi """
TAGL['%s-%s' % (dbse, '54' )] = """Benzene-Water, OH-pi """
TAGL['%s-%s-dimer' % (dbse, '54' )] = """Dimer from Benzene-Water, OH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '54' )] = """Monomer A from Benzene-Water, OH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '54' )] = """Monomer B from Benzene-Water, OH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '54' )] = """Monomer A from Benzene-Water, OH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '54' )] = """Monomer B from Benzene-Water, OH-pi """
TAGL['%s-%s' % (dbse, '55' )] = """Benzene-Methanol, OH-pi """
TAGL['%s-%s-dimer' % (dbse, '55' )] = """Dimer from Benzene-Methanol, OH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '55' )] = """Monomer A from Benzene-Methanol, OH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '55' )] = """Monomer B from Benzene-Methanol, OH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '55' )] = """Monomer A from Benzene-Methanol, OH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '55' )] = """Monomer B from Benzene-Methanol, OH-pi """
TAGL['%s-%s' % (dbse, '56' )] = """Benzene-Methylamine, NH-pi """
TAGL['%s-%s-dimer' % (dbse, '56' )] = """Dimer from Benzene-Methylamine, NH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '56' )] = """Monomer A from Benzene-Methylamine, NH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '56' )] = """Monomer B from Benzene-Methylamine, NH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '56' )] = """Monomer A from Benzene-Methylamine, NH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '56' )] = """Monomer B from Benzene-Methylamine, NH-pi """
TAGL['%s-%s' % (dbse, '57' )] = """Benzene-N-methylacetamide, NH-pi """
TAGL['%s-%s-dimer' % (dbse, '57' )] = """Dimer from Benzene-N-methylacetamide, NH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '57' )] = """Monomer A from Benzene-N-methylacetamide, NH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '57' )] = """Monomer B from Benzene-N-methylacetamide, NH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '57' )] = """Monomer A from Benzene-N-methylacetamide, NH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '57' )] = """Monomer B from Benzene-N-methylacetamide, NH-pi """
TAGL['%s-%s' % (dbse, '58' )] = """Pyridine Dimer, CH-N """
TAGL['%s-%s-dimer' % (dbse, '58' )] = """Dimer from Pyridine Dimer, CH-N """
TAGL['%s-%s-monoA-CP' % (dbse, '58' )] = """Monomer A from Pyridine Dimer, CH-N """
TAGL['%s-%s-monoB-CP' % (dbse, '58' )] = """Monomer B from Pyridine Dimer, CH-N """
TAGL['%s-%s-monoA-unCP' % (dbse, '58' )] = """Monomer A from Pyridine Dimer, CH-N """
TAGL['%s-%s-monoB-unCP' % (dbse, '58' )] = """Monomer B from Pyridine Dimer, CH-N """
TAGL['%s-%s' % (dbse, '59' )] = """Ethyne-Water, CH-O """
TAGL['%s-%s-dimer' % (dbse, '59' )] = """Dimer from Ethyne-Water, CH-O """
TAGL['%s-%s-monoA-CP' % (dbse, '59' )] = """Monomer A from Ethyne-Water, CH-O """
TAGL['%s-%s-monoB-CP' % (dbse, '59' )] = """Monomer B from Ethyne-Water, CH-O """
TAGL['%s-%s-monoA-unCP' % (dbse, '59' )] = """Monomer A from Ethyne-Water, CH-O """
TAGL['%s-%s-monoB-unCP' % (dbse, '59' )] = """Monomer B from Ethyne-Water, CH-O """
TAGL['%s-%s' % (dbse, '60' )] = """Ethyne-Acetic Acid, OH-pi """
TAGL['%s-%s-dimer' % (dbse, '60' )] = """Dimer from Ethyne-Acetic Acid, OH-pi """
TAGL['%s-%s-monoA-CP' % (dbse, '60' )] = """Monomer A from Ethyne-Acetic Acid, OH-pi """
TAGL['%s-%s-monoB-CP' % (dbse, '60' )] = """Monomer B from Ethyne-Acetic Acid, OH-pi """
TAGL['%s-%s-monoA-unCP' % (dbse, '60' )] = """Monomer A from Ethyne-Acetic Acid, OH-pi """
TAGL['%s-%s-monoB-unCP' % (dbse, '60' )] = """Monomer B from Ethyne-Acetic Acid, OH-pi """
TAGL['%s-%s' % (dbse, '61' )] = """Pentane-Acetic Acid """
TAGL['%s-%s-dimer' % (dbse, '61' )] = """Dimer from Pentane-Acetic Acid """
TAGL['%s-%s-monoA-CP' % (dbse, '61' )] = """Monomer A from Pentane-Acetic Acid """
TAGL['%s-%s-monoB-CP' % (dbse, '61' )] = """Monomer B from Pentane-Acetic Acid """
TAGL['%s-%s-monoA-unCP' % (dbse, '61' )] = """Monomer A from Pentane-Acetic Acid """
TAGL['%s-%s-monoB-unCP' % (dbse, '61' )] = """Monomer B from Pentane-Acetic Acid """
TAGL['%s-%s' % (dbse, '62' )] = """Pentane-Acetamide """
TAGL['%s-%s-dimer' % (dbse, '62' )] = """Dimer from Pentane-Acetamide """
TAGL['%s-%s-monoA-CP' % (dbse, '62' )] = """Monomer A from Pentane-Acetamide """
TAGL['%s-%s-monoB-CP' % (dbse, '62' )] = """Monomer B from Pentane-Acetamide """
TAGL['%s-%s-monoA-unCP' % (dbse, '62' )] = """Monomer A from Pentane-Acetamide """
TAGL['%s-%s-monoB-unCP' % (dbse, '62' )] = """Monomer B from Pentane-Acetamide """
TAGL['%s-%s' % (dbse, '63' )] = """Benzene-Acetic Acid """
TAGL['%s-%s-dimer' % (dbse, '63' )] = """Dimer from Benzene-Acetic Acid """
TAGL['%s-%s-monoA-CP' % (dbse, '63' )] = """Monomer A from Benzene-Acetic Acid """
TAGL['%s-%s-monoB-CP' % (dbse, '63' )] = """Monomer B from Benzene-Acetic Acid """
TAGL['%s-%s-monoA-unCP' % (dbse, '63' )] = """Monomer A from Benzene-Acetic Acid """
TAGL['%s-%s-monoB-unCP' % (dbse, '63' )] = """Monomer B from Benzene-Acetic Acid """
TAGL['%s-%s' % (dbse, '64' )] = """N-methylacetamide-Ethene """
TAGL['%s-%s-dimer' % (dbse, '64' )] = """Dimer from N-methylacetamide-Ethene """
TAGL['%s-%s-monoA-CP' % (dbse, '64' )] = """Monomer A from N-methylacetamide-Ethene """
TAGL['%s-%s-monoB-CP' % (dbse, '64' )] = """Monomer B from N-methylacetamide-Ethene """
TAGL['%s-%s-monoA-unCP' % (dbse, '64' )] = """Monomer A from N-methylacetamide-Ethene """
TAGL['%s-%s-monoB-unCP' % (dbse, '64' )] = """Monomer B from N-methylacetamide-Ethene """
TAGL['%s-%s' % (dbse, '65' )] = """Pyridine-Ethyne """
TAGL['%s-%s-dimer' % (dbse, '65' )] = """Dimer from Pyridine-Ethyne """
TAGL['%s-%s-monoA-CP' % (dbse, '65' )] = """Monomer A from Pyridine-Ethyne """
TAGL['%s-%s-monoB-CP' % (dbse, '65' )] = """Monomer B from Pyridine-Ethyne """
TAGL['%s-%s-monoA-unCP' % (dbse, '65' )] = """Monomer A from Pyridine-Ethyne """
TAGL['%s-%s-monoB-unCP' % (dbse, '65' )] = """Monomer B from Pyridine-Ethyne """
TAGL['%s-%s' % (dbse, '66' )] = """Methylamine-Pyridine """
TAGL['%s-%s-dimer' % (dbse, '66' )] = """Dimer from Methylamine-Pyridine """
TAGL['%s-%s-monoA-CP' % (dbse, '66' )] = """Monomer A from Methylamine-Pyridine """
TAGL['%s-%s-monoB-CP' % (dbse, '66' )] = """Monomer B from Methylamine-Pyridine """
TAGL['%s-%s-monoA-unCP' % (dbse, '66' )] = """Monomer A from Methylamine-Pyridine """
TAGL['%s-%s-monoB-unCP' % (dbse, '66' )] = """Monomer B from Methylamine-Pyridine """
# <<< Geometry Specification Strings >>>
GEOS = {}
GEOS['%s-%s-dimer' % (dbse, '1')] = qcdb.Molecule("""
0 1
O -0.70219605 -0.05606026 0.00994226
H -1.02219322 0.84677578 -0.01148871
H 0.25752106 0.04212150 0.00521900
--
0 1
O 2.22087107 0.02671679 0.00062048
H 2.59749268 -0.41166327 0.76674486
H 2.59313538 -0.44949618 -0.74478203
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '2')] = qcdb.Molecule("""
0 1
O -0.52532979 -0.05097108 -0.31451686
H -0.94200663 0.74790163 0.01125282
H 0.40369652 0.05978598 -0.07356837
--
0 1
O 2.31663329 0.04550085 0.07185839
H 2.68461611 -0.52657655 0.74938672
C 2.78163836 -0.42612907 -1.19030072
H 2.35082127 0.22496462 -1.94341475
H 3.86760205 -0.37533621 -1.26461265
H 2.45329574 -1.44599856 -1.38938136
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '3')] = qcdb.Molecule("""
0 1
O -0.68746490 -0.11174433 -0.01962547
H -1.04612154 0.77593821 0.01270684
H 0.27404252 0.02585065 -0.00349726
--
0 1
N 2.23397617 0.10318260 0.00585368
H 2.52934060 -0.44945538 -0.78893718
H 2.54405666 -0.40753849 0.82271317
C 2.89331145 1.41154656 -0.03438796
H 2.58276902 1.99327152 0.83012746
H 3.98462074 1.37225159 -0.04334363
H 2.56659917 1.94746403 -0.92221177
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '4')] = qcdb.Molecule("""
0 1
O -0.39201845 -0.38471874 0.07607132
H -0.91146085 0.41381204 0.17764877
H 0.52490382 -0.06848469 0.09051136
--
0 1
C 2.19770521 -2.24540349 -0.23031325
H 2.84766805 -3.10651537 -0.36322864
H 1.51672924 -2.16793143 -1.07417853
H 1.58468831 -2.38419948 0.65669511
C 2.95243729 -0.94739061 -0.09771974
O 2.37572184 0.12790424 0.05886900
N 4.30307041 -1.04489330 -0.16233771
H 4.70402204 -1.95542728 -0.29185281
C 5.17131253 0.10707716 -0.05289463
H 4.53481840 0.97537761 0.08188998
H 5.83690203 0.01562196 0.80319825
H 5.76577825 0.23649765 -0.95515382
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '5')] = qcdb.Molecule("""
0 1
O -0.63613493 -0.02328241 0.28059932
H 0.30809737 -0.04707875 0.07646369
C -1.15206541 -1.31128778 0.01525955
H -2.20994502 -1.29626539 0.26395586
H -1.05661024 -1.59267086 -1.03619061
H -0.67483575 -2.08627276 0.62051145
--
0 1
O 2.21041928 -0.12212177 -0.01210270
H 2.67920859 0.49226275 -0.58176865
C 2.71925320 0.03489717 1.30961462
H 2.16568412 -0.65329926 1.93974550
H 3.77824931 -0.21554173 1.36633776
H 2.56681356 1.04559122 1.68750717
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '6')] = qcdb.Molecule("""
0 1
O -0.70692019 0.04583037 0.00638610
H 0.26562361 0.07171014 0.00133929
C -1.07667067 -1.31391581 0.00161428
H -2.16292358 -1.36319577 0.00586542
H -0.72340594 -1.84465168 -0.88774350
H -0.71607978 -1.85282083 0.88307978
--
0 1
N 2.20127244 -0.03642087 -0.00333839
H 2.57189199 0.47135563 0.78979400
H 2.57201528 0.42791769 -0.82259722
C 2.67902438 -1.42245432 0.03412282
H 2.28713954 -1.95647960 -0.82806891
H 3.76573553 -1.52918949 0.03715731
H 2.28689798 -1.90918449 0.92375496
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '7')] = qcdb.Molecule("""
0 1
O -0.20877739 -0.21687067 -1.03240597
H 0.71112593 -0.38689175 -0.77396240
C -1.02217337 -0.74117114 -0.00545419
H -2.05749119 -0.53870733 -0.26859725
H -0.90774336 -1.82182632 0.10853710
H -0.82463111 -0.27549472 0.96464547
--
0 1
C 1.97349049 1.90322403 0.43230118
H 2.47988412 2.86467311 0.39743082
H 1.56294637 1.75708815 1.43017782
H 1.14384269 1.89371075 -0.26920435
C 2.88912087 0.74828521 0.11638497
O 2.46492608 -0.37162558 -0.16869657
N 4.21525779 1.01000949 0.17558433
H 4.51327024 1.92043762 0.47327152
C 5.19766382 -0.03010182 -0.04715949
H 4.84110663 -0.68103914 -0.83933645
H 6.13803306 0.42342202 -0.34567319
H 5.35717393 -0.63462872 0.84491605
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '8')] = qcdb.Molecule("""
0 1
O -0.78656202 0.04516844 -0.00718912
H 0.17770677 0.01269590 -0.00683539
C -1.24799094 -1.29028354 0.00108362
H -2.33427744 -1.25889710 0.00022120
H -0.92596575 -1.84976810 -0.88044538
H -0.92702783 -1.83846288 0.89007652
--
0 1
O 2.12888314 -0.05133660 -0.00474093
H 2.56808728 0.33681560 -0.76461362
H 2.56676744 0.35126768 0.74834860
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '9')] = qcdb.Molecule("""
0 1
N -0.89345122 -0.04384432 -0.04299745
H 0.09694826 -0.25605945 -0.07106993
H -1.36843879 -0.93339065 0.03383773
C -1.17578248 0.75790769 1.14523719
H -2.24162660 0.97221601 1.19502464
H -0.88078955 0.30424674 2.09720910
H -0.66300572 1.71432940 1.06080916
--
0 1
O 2.28445953 -0.04747650 0.02782522
H 2.56648565 0.32247227 -0.81203886
C 2.67037338 0.86410776 1.04726138
H 2.34719033 0.43447509 1.99032792
H 3.75142862 1.00319123 1.08630135
H 2.19189882 1.83770561 0.93208484
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '10')] = qcdb.Molecule("""
0 1
N -0.63864138 0.47091637 0.04456848
H 0.18995436 -0.11393716 -0.00577361
H -1.30046894 0.08125680 -0.61366848
C -1.19865882 0.39139858 1.39194660
H -2.09273777 1.00924471 1.45316749
H -1.46274551 -0.61584367 1.72945219
H -0.48027554 0.79867491 2.10108731
--
0 1
N 2.39889347 -0.45552115 0.19704452
H 2.69516214 -0.18098342 -0.73094072
H 3.02244314 -1.20321147 0.47223938
C 2.55912345 0.67968944 1.11071982
H 2.28893315 0.36499366 2.11637293
H 3.56653376 1.10146600 1.14769156
H 1.86658307 1.46546492 0.81806258
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '11')] = qcdb.Molecule("""
0 1
N -0.56970824 0.81437245 0.10109775
H 0.13087774 0.56141065 -0.58761455
H -1.46125215 0.52691480 -0.28042996
C -0.30551437 0.06571030 1.32879173
H -1.05714948 0.31427017 2.07595940
H -0.28802353 -1.02229248 1.21484626
H 0.66045772 0.36850913 1.73024224
--
0 1
C 2.25689155 2.69009990 -0.14932730
H 2.38151002 3.10127663 -1.14837163
H 2.76346292 3.33109245 0.56845722
H 1.19047979 2.66357037 0.06909413
C 2.76888324 1.27230222 -0.14703327
O 2.30890335 0.40656580 -0.88620788
N 3.75536621 0.99926987 0.74529744
H 4.15512723 1.75420265 1.27065019
C 4.34381155 -0.32032067 0.82279701
H 3.55563493 -1.06165082 0.72977641
H 5.06507133 -0.49231605 0.02425262
H 4.83846506 -0.43618886 1.78273654
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '12')] = qcdb.Molecule("""
0 1
N -0.53346397 -0.27959351 0.10699576
H -0.62915138 -1.24842455 0.38284867
H -1.12260363 -0.16615944 -0.70776410
C -1.01690943 0.58848610 1.18737346
H -0.91275967 1.62555174 0.87952116
H -2.05473726 0.41508213 1.47850360
H -0.38502338 0.44880090 2.06061419
--
0 1
O 2.09326841 0.91731136 0.21209725
H 1.27575101 0.42103887 0.03894435
H 2.67516986 0.65881349 -0.50364884
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '13')] = qcdb.Molecule("""
0 1
C -0.84931672 -0.33949876 2.49171664
H 0.18434396 -0.01104732 2.41618542
H -0.88249791 -1.34205140 2.91270310
H -1.39080263 0.31687828 3.16842897
C -1.56403192 -0.35332311 1.15947545
O -2.74952638 -0.65153776 1.05676087
N -0.80165352 -0.02735461 0.08834167
H 0.16118756 0.24036035 0.21871364
C -1.38534986 -0.00235149 -1.23413683
H -1.89161720 -0.94280123 -1.44009631
H -2.11997230 0.79621180 -1.33087952
H -0.59464593 0.14957065 -1.96312772
--
0 1
O 2.13706570 0.25201737 0.45371880
H 2.85792051 0.87931700 0.54413361
C 2.65614986 -1.05334828 0.68760059
H 1.82357836 -1.74213597 0.58202402
H 3.42228862 -1.32234103 -0.03928018
H 3.06424691 -1.15479748 1.69323508
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '14')] = qcdb.Molecule("""
0 1
C -0.77857334 -0.46332064 2.49038768
H 0.22474462 -0.05095294 2.41348355
H -0.72247994 -1.48709180 2.85458464
H -1.35190757 0.11081693 3.21368365
C -1.52050259 -0.45662769 1.17232500
O -2.70083521 -0.78358573 1.08959682
N -0.79195361 -0.06964048 0.10058937
H 0.19411165 0.14570790 0.20292464
C -1.39779834 -0.05608245 -1.21131793
H -2.31492801 0.52889121 -1.19970991
H -0.69880422 0.38726130 -1.91536621
H -1.65298232 -1.06152895 -1.54543495
--
0 1
N 2.23828822 0.25457428 0.28251924
H 2.64195454 0.79449381 1.03771933
H 2.65629209 0.62195553 -0.56312668
C 2.61059106 -1.15660854 0.43627199
H 2.18430366 -1.72764112 -0.38510346
H 3.68598970 -1.34329798 0.46205539
H 2.17611849 -1.54101555 1.35610799
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '15')] = qcdb.Molecule("""
0 1
C -0.70150294 -0.29062770 2.40688440
H -1.18329596 0.39564777 3.09887422
H 0.34956157 -0.03032157 2.30783303
H -0.79405685 -1.29160545 2.82403929
C -1.44854625 -0.24487664 1.09181530
O -2.66045000 -0.42847909 1.03434577
N -0.67005656 0.00591656 0.00977691
H 0.32667532 0.12256396 0.14159284
C -1.22705457 0.08979374 -1.31996754
H -2.29202426 -0.10650119 -1.24087756
H -1.07780169 1.07994030 -1.74854354
H -0.77662849 -0.64799919 -1.98337273
--
0 1
C 2.04177491 -2.35169797 0.68639761
H 2.59999972 -3.26170120 0.48048961
H 1.11308306 -2.35822742 0.12207220
H 1.78255599 -2.32825127 1.74333861
C 2.80941086 -1.09728593 0.35016088
O 2.26422421 0.00415088 0.29318848
N 4.13616907 -1.26609970 0.13641291
H 4.51249037 -2.19334539 0.21317023
C 5.02340725 -0.15963372 -0.15253563
H 4.40921487 0.73117605 -0.23235934
H 5.75082180 -0.02016799 0.64486768
H 5.54839755 -0.31961545 -1.09167796
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '16')] = qcdb.Molecule("""
0 1
C -0.72430464 -0.70493582 2.28386786
H 0.33531828 -0.62994325 2.05318235
H -0.95169666 -1.71198961 2.62565146
H -0.96962784 -0.02207955 3.09376537
C -1.61493501 -0.38742925 1.10406897
O -2.83732387 -0.41502209 1.19413277
N -0.95342037 -0.07640442 -0.04081980
H 0.05380860 -0.07556651 -0.03664022
C -1.65812397 0.25009358 -1.25855306
H -2.72037197 0.17694444 -1.04665270
H -1.43030493 1.26296263 -1.58809384
H -1.40562611 -0.44433518 -2.05858358
--
0 1
O 2.10277707 -0.05840697 -0.15507669
H 2.66775436 -0.77136560 -0.46027609
H 2.68252869 0.70578659 -0.13117819
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '17')] = qcdb.Molecule("""
0 1
N -0.72999913 0.02276763 0.00091465
H 0.29842255 0.07400447 0.00162304
C -1.29682453 -1.24042682 0.00150234
O -0.59409886 -2.25351751 0.00263371
C -2.74362229 -1.26233170 0.00047938
H -3.24959045 -2.21183517 0.00083311
C -3.42201997 -0.09590921 -0.00092259
H -4.50089709 -0.04921603 -0.00174546
N -2.77483684 1.10540895 -0.00141807
H -3.28383807 1.97387739 -0.00248574
C -1.39147866 1.23701978 -0.00052538
O -0.83984371 2.31703528 -0.00100125
--
0 1
N 4.14382946 -1.08570382 0.00049928
H 4.59107325 -0.17913062 0.00088609
C 4.99987723 -2.20032161 -0.00100060
O 6.20932926 -2.04861719 -0.00174980
C 4.28565880 -3.46249515 -0.00150500
H 4.85224335 -4.37752590 -0.00264363
C 2.93548983 -3.46631302 -0.00054490
H 2.35852659 -4.37927779 -0.00086358
N 2.19749842 -2.31543218 0.00090551
H 1.17116216 -2.33687498 0.00158258
C 2.77026935 -1.07076714 0.00145616
O 2.11994847 -0.02954883 0.00269255
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '18')] = qcdb.Molecule("""
0 1
O -0.55283102 -0.10169749 -0.00049879
H -0.87175963 0.80179220 0.00014440
H 0.41265950 -0.00183225 -0.00025181
--
0 1
N 2.36402099 0.09662268 0.00014680
C 3.05992763 0.06265189 1.14489465
H 2.47525508 0.08626283 2.05576267
C 4.44895122 -0.00253054 1.19489071
H 4.95485760 -0.02738470 2.14921983
C 5.16011436 -0.03565634 -0.00002044
H 6.23995431 -0.08742989 -0.00010086
C 4.44880607 -0.00259720 -1.19482173
H 4.95460301 -0.02747022 -2.14922033
C 3.05977605 0.06259779 -1.14467547
H 2.47500717 0.08619845 -2.05546803
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '19')] = qcdb.Molecule("""
0 1
O -0.62765177 0.08746727 0.00147128
H 0.34360203 0.12230333 -0.00060045
C -0.97793123 -1.27855601 0.00123841
H -2.06339209 -1.34204332 0.00500898
H -0.61488369 -1.80637584 -0.88538395
H -0.60864033 -1.80823682 0.88417273
--
0 1
N 2.27233665 0.01643230 -0.00162684
C 2.96870504 -0.00800303 -1.14634644
H 2.38422645 0.01522051 -2.05732188
C 4.35834211 -0.05774589 -1.19503169
H 4.86569445 -0.07503793 -2.14881442
C 5.06871533 -0.08345851 0.00058133
H 6.14905134 -0.12122326 0.00143063
C 4.35646788 -0.05843740 1.19512119
H 4.86226662 -0.07626173 2.14960688
C 2.96691424 -0.00868772 1.14416710
H 2.38090845 0.01398671 2.05428579
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '20')] = qcdb.Molecule("""
0 1
C -1.06170920 1.29714057 0.29206000
O -0.35816112 2.27045861 0.53181267
O -0.58930352 0.09491776 0.00378881
H 0.40443566 0.12772262 0.01841184
C -2.55842780 1.34254982 0.29625732
H -2.89599798 2.34746400 0.51831634
H -2.93288928 1.02239045 -0.67299555
H -2.93721196 0.64491043 1.03955708
--
0 1
C 2.78934845 1.10841924 0.27118376
O 2.08573008 0.13510475 0.03139616
O 2.31692211 2.31085463 0.55896223
H 1.32313357 2.27795640 0.54456172
C 4.28606090 1.06251650 0.26921936
H 4.62364046 0.06119730 0.03169387
H 4.66755944 1.77286944 -0.46024953
H 4.65757721 1.36521101 1.24527472
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '21')] = qcdb.Molecule("""
0 1
C -1.30974974 1.18017617 -0.02517034
O -0.72530044 2.15514767 0.45271335
N -0.66562116 0.09505470 -0.49199449
H 0.35458266 0.05144817 -0.45930922
H -1.18362704 -0.67359969 -0.87075610
C -2.81671934 1.15599865 -0.11060597
H -3.22062895 1.26254146 0.89308239
H -3.20942754 0.24863402 -0.56190009
H -3.14315813 2.01659563 -0.68889311
--
0 1
C 2.77960183 1.06388568 0.13435724
O 2.19518007 0.08986525 -0.34537373
N 2.13551426 2.14862891 0.60220379
H 1.11540890 2.19306669 0.56790248
H 2.65353833 2.91659011 0.98232444
C 4.28660101 1.08817006 0.21958232
H 4.67847207 1.98781958 0.68676633
H 4.69015720 1.00062503 -0.78619798
H 4.61437977 0.21759516 0.78176266
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '22')] = qcdb.Molecule("""
0 1
C -1.11362611 1.32702009 0.27516705
O -0.46708264 2.34938778 0.46153746
O -0.57808939 0.13692049 0.04961747
H 0.41332036 0.20325661 0.05548711
C -2.61142469 1.28618957 0.27736131
H -3.00664872 2.27688545 0.46578983
H -2.96425623 0.91525868 -0.68200123
H -2.95311421 0.59179821 1.04124041
--
0 1
N 4.18869738 1.08795338 0.18288157
H 4.58190249 0.17256315 0.01116215
C 5.11022529 2.13606900 0.36433468
O 6.30737167 1.91777319 0.31145472
C 4.47115922 3.41553138 0.60494183
H 5.09069398 4.28245626 0.75641911
C 3.12407502 3.49552153 0.63432307
H 2.60123483 4.42396853 0.80962128
N 2.32034427 2.40483955 0.44391704
H 1.29629244 2.47478724 0.46770730
C 2.82027675 1.15461676 0.20974482
O 2.10824430 0.16511187 0.03627464
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '23')] = qcdb.Molecule("""
0 1
C -1.23272700 1.21163896 -0.14162406
O -0.57127667 2.24201573 0.02561679
N -0.67058051 0.00388878 -0.31428147
H 0.34384695 -0.09056011 -0.30832667
H -1.24421373 -0.80632370 -0.44668271
C -2.73824495 1.26675766 -0.15588657
H -3.07797534 1.64660511 0.80450159
H -3.20211503 0.30286549 -0.34621112
H -3.04998747 1.97549049 -0.91859737
--
0 1
N 4.19521289 1.11742864 -0.11954193
H 4.68524234 0.24147146 -0.23748040
C 4.99883890 2.26027358 0.03093977
O 6.21440093 2.16465126 0.01575499
C 4.22624673 3.47559007 0.19408371
H 4.74800972 4.40878293 0.31711883
C 2.87708602 3.41391454 0.18840695
H 2.25668197 4.29027492 0.30608385
N 2.19200391 2.24163303 0.03384119
H 1.15921343 2.23257196 0.03300387
C 2.82289388 1.03716353 -0.12841885
O 2.22570515 -0.02675243 -0.27022634
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '24')] = qcdb.Molecule("""
0 1
C 0.71264532 1.12099570 0.06054078
H 1.35784165 1.98639917 0.12773717
C 1.25823573 -0.15925190 0.12423352
H 2.32495428 -0.28709988 0.24674303
C 0.42688496 -1.27452666 0.04265043
H 0.85044465 -2.26843268 0.09474995
C -0.94957784 -1.11007406 -0.10031360
H -1.59445570 -1.97627370 -0.16371348
C -1.49552564 0.17105056 -0.16154602
H -2.56378279 0.29922115 -0.27370311
C -0.66382760 1.28664289 -0.08340143
H -1.08690070 2.28100020 -0.13288613
--
0 1
C 1.98776046 1.10975720 3.71031958
H 2.63260558 1.97594094 3.77407030
C 2.53371358 -0.17139390 3.77183931
H 3.60192047 -0.29954095 3.88458353
C 1.70206410 -1.28699400 3.69318889
H 2.12514581 -2.28134643 3.74284255
C 0.32566254 -1.12135897 3.54847214
H -0.31944006 -1.98676921 3.48083951
C -0.21989733 0.15887378 3.48450631
H -1.28652536 0.28670299 3.36132755
C 0.61137962 1.27415454 3.56657725
H 0.18785474 2.26805957 3.51420832
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '25')] = qcdb.Molecule("""
0 1
N 1.57248145 0.25454916 -0.25648131
C 0.96935990 -0.90316032 0.04452614
H 1.61363891 -1.77218120 0.10234520
C -0.39815811 -1.02881911 0.28096043
H -0.81842477 -1.99173710 0.53356364
C -1.19580525 0.10655779 0.19539732
H -2.26068964 0.04953865 0.37344280
C -0.58712829 1.31741239 -0.12010544
H -1.16181223 2.22950003 -0.20046257
C 0.78854733 1.33970567 -0.33224053
H 1.28843202 2.26879436 -0.57852690
--
0 1
N -0.53372327 -1.51586163 3.84414371
C -1.46620136 -0.55523217 3.91799487
H -2.46899061 -0.88618697 4.16018773
C -1.20419832 0.79583625 3.70861549
H -2.00275608 1.52034169 3.78688658
C 0.09522901 1.18507754 3.39834708
H 0.33721357 2.22407602 3.22247582
C 1.07478832 0.20217938 3.31498561
H 2.09708956 0.44892512 3.06654863
C 0.71230860 -1.12295838 3.54817861
H 1.45616936 -1.90851301 3.49173001
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '26')] = qcdb.Molecule("""
0 1
N 1.37690111 0.83974747 0.73462494
H 1.05181240 1.38622385 1.52335563
C 1.30898271 1.45752981 -0.52065500
O 0.92056136 2.61107777 -0.62597673
N 2.01142293 -1.21320830 -0.09807182
H 1.72728551 0.99084268 -2.61199556
C 2.02573687 -0.69717123 -1.36439740
H 2.29751698 -1.39106004 -2.14564531
C 1.71451235 0.59193780 -1.61248722
H 2.12945422 -2.20152091 0.05682913
C 1.64594503 -0.48520598 1.01871830
O 1.56111602 -0.97181638 2.12980905
--
0 1
N -1.35546089 -0.83604594 0.73462494
H -1.03037218 -1.38252232 1.52335563
C -1.28754249 -1.45382828 -0.52065500
O -0.89912114 -2.60737623 -0.62597673
N -1.98998271 1.21690983 -0.09807182
H -1.70584529 -0.98714115 -2.61199556
C -2.00429665 0.70087276 -1.36439740
H -2.27607676 1.39476157 -2.14564531
C -1.69307213 -0.58823627 -1.61248722
H -2.10801399 2.20522244 0.05682913
C -1.62450481 0.48890751 1.01871830
O -1.53967580 0.97551791 2.12980905
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '27')] = qcdb.Molecule("""
0 1
C 0.81874699 0.86417234 0.18828612
H 1.46611361 1.71666767 0.34472141
C 1.36899712 -0.39052394 -0.06669818
H 2.44303637 -0.51186194 -0.11057444
C 0.53437860 -1.48849320 -0.27188804
H 0.96084825 -2.46156422 -0.47550749
C -0.84911561 -1.33050735 -0.21989643
H -1.49706942 -2.18186028 -0.37955321
C -1.39948546 -0.07603020 0.04043417
H -2.47268667 0.04490778 0.09338206
C -0.56529230 1.02140336 0.24227921
H -0.99255667 1.99366131 0.44625817
--
0 1
N -2.39843199 0.16214088 3.52041137
C -1.78354606 1.31980869 3.80047556
H -2.43115011 2.17298014 3.96298765
C -0.40133116 1.46065642 3.89064637
H 0.03051760 2.42430654 4.12186267
C 0.39962023 0.34367712 3.67643246
H 1.47718940 0.41406140 3.73126697
C -0.22093167 -0.86497792 3.38277288
H 0.35484284 -1.76059980 3.19869795
C -1.61144595 -0.90301580 3.31732347
H -2.12029887 -1.83146918 3.08848079
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '28')] = qcdb.Molecule("""
0 1
C 0.82576911 1.23652484 -0.04025044
H 1.52101317 2.06312520 -0.08247145
C 1.30015992 -0.06294088 0.12725601
H 2.36365753 -0.24226113 0.20767420
C 0.40352312 -1.12855218 0.19824486
H 0.77375338 -2.13742677 0.32412109
C -0.96780949 -0.89519049 0.10313994
H -1.66520900 -1.71998342 0.16042745
C -1.44350838 0.40448328 -0.06244130
H -2.50751124 0.58550112 -0.12415016
C -0.54575549 1.46876875 -0.13624741
H -0.91422190 2.47742220 -0.26785516
--
0 1
N -0.27488064 0.67158742 3.21864568
H -0.64818803 1.57334885 2.95575271
C 1.11726604 0.59860052 3.35065902
O 1.80817636 1.59302421 3.20582496
C 1.59616616 -0.73547719 3.66876922
H 2.65321825 -0.88769313 3.80289036
C 0.71645693 -1.74985837 3.79498575
H 1.02238445 -2.75827898 4.03151011
N -0.62878896 -1.56482645 3.62489361
H -1.27753679 -2.32738539 3.72376278
C -1.20323727 -0.34002542 3.32547899
O -2.40102568 -0.18920215 3.18336680
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '29')] = qcdb.Molecule("""
0 1
N 1.21075533 0.02867578 0.32971111
C 0.61193497 -1.15844901 0.15345176
H 1.25147791 -2.02952340 0.21929295
C -0.75131399 -1.30864956 -0.08883407
H -1.17041577 -2.29686932 -0.21338320
C -1.54786767 -0.16994027 -0.15646691
H -2.61101275 -0.24595469 -0.33875574
C -0.94362237 1.07063612 0.01982310
H -1.51881431 1.98450028 -0.01164403
C 0.42771857 1.11610863 0.25734879
H 0.92469451 2.06805173 0.39754798
--
0 1
N -0.71316758 -0.28394932 3.29752332
H -1.60805660 -0.71581281 3.11291983
C -0.71291270 1.11386048 3.39053432
O -1.75279577 1.74206028 3.27568419
C 0.60658206 1.67294182 3.61809739
H 0.70789842 2.74016399 3.71396557
C 1.67645565 0.85424952 3.68961744
H 2.68033469 1.22291422 3.83804398
N 1.55839451 -0.50304375 3.57706278
H 2.37183050 -1.09523110 3.56889514
C 0.35794757 -1.15027617 3.35068108
O 0.26581032 -2.35569425 3.21710180
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '30')] = qcdb.Molecule("""
0 1
C 0.83551718 1.11516693 0.02140131
H 1.48432398 1.98060858 0.01953430
C 1.38327497 -0.16614721 0.02376531
H 2.45714902 -0.29520468 0.02277108
C 0.54755466 -1.28131632 0.02168563
H 0.97293610 -2.27580453 0.01977853
C -0.83552313 -1.11516159 0.02139907
H -1.48433419 -1.98060640 0.01953009
C -1.38328358 0.16615413 0.02375775
H -2.45715618 0.29520906 0.02275707
C -0.54756577 1.28132347 0.02168025
H -0.97294284 2.27580548 0.01976873
--
0 1
C 0.65578060 -0.11679048 3.53075174
H 1.04724138 -1.12390931 3.52628348
H 1.37085438 0.69327350 3.52625015
C -0.65577592 0.11679215 3.53076063
H -1.37084787 -0.69327237 3.52626454
H -1.04723903 1.12391105 3.52630243
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '31')] = qcdb.Molecule("""
0 1
N -0.05087365 -0.98008127 0.03396219
H -0.05322205 -1.99069374 0.04982167
C -1.30881316 -0.36187638 0.00402596
O -2.32722000 -1.03255492 -0.00582886
C -1.23681849 1.08804829 -0.01222440
H -2.15273897 1.65146044 -0.05477443
C -0.03519433 1.69783584 0.03370483
H 0.07036636 2.77247575 0.03188224
N 1.13452913 0.99028251 0.09184461
H 2.02372032 1.45677218 0.15569277
C 1.19318599 -0.39183287 0.11577512
O 2.23639797 -1.01118826 0.19418562
--
0 1
C 0.72600726 0.02505349 3.39819044
H 1.24312499 -0.84593440 3.02096384
H 1.33161826 0.81204754 3.82550477
C -0.60276924 0.12564394 3.34894351
H -1.21477213 -0.66183565 2.93204279
H -1.11459423 0.99671353 3.73294327
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '32')] = qcdb.Molecule("""
0 1
N -0.05545357 -0.94799090 0.01001028
H -0.05731609 -1.95771330 0.05505287
C -1.31395971 -0.33514498 -0.06458622
O -2.32889664 -1.00790087 -0.12310273
C -1.24835877 1.11605191 -0.06650860
H -2.16434937 1.67533298 -0.14710244
C -0.05308010 1.73142748 0.03419541
H 0.04811054 2.80642986 0.04341968
N 1.11592628 1.02759107 0.13516893
H 1.99665515 1.49727976 0.26162029
C 1.17534700 -0.35380470 0.17616616
O 2.21463146 -0.96646542 0.33517250
--
0 1
C 0.70785184 -0.17230221 3.27635136
H 1.70367011 -0.52628807 3.16213263
C -0.43675225 0.21415547 3.38254320
H -1.44163480 0.54285582 3.48290737
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '33')] = qcdb.Molecule("""
0 1
N 1.38138219 -0.00023348 0.13146374
C 0.67935079 -1.14023946 0.09207966
H 1.25871960 -2.05496223 0.12588361
C -0.70972232 -1.19311407 0.00666426
H -1.21408768 -2.14856163 -0.02530851
C -1.42161357 0.00013343 -0.04081690
H -2.50069615 0.00025757 -0.10916973
C -0.70940120 1.19317538 0.00652198
H -1.21351163 2.14874784 -0.02552831
C 0.67965167 1.13995623 0.09189303
H 1.25926073 2.05451090 0.12550248
--
0 1
C 0.01960458 0.66643934 3.48727228
H 0.93007858 1.22592506 3.32815744
H -0.88994292 1.22884357 3.64423278
C 0.01993726 -0.66624796 3.48740452
H 0.93067296 -1.22533044 3.32839408
H -0.88935083 -1.22907273 3.64449367
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '34')] = qcdb.Molecule("""
0 1
C -2.53330865 -0.29487907 0.71314876
H -2.56362682 -0.97708181 -0.13642264
H -2.56697835 -0.89587590 1.62173177
H -3.43442611 0.31595713 0.68410447
C -1.27188487 0.55765547 0.67435468
H -1.27102630 1.25656571 1.51431940
H -1.26663255 1.16789581 -0.23182653
C -0.00013504 -0.27841822 0.71960315
H -0.00015938 -0.88722952 1.62863709
H -0.00036543 -0.98071418 -0.11940439
C 1.27189476 0.55738219 0.67406108
H 1.27097175 1.25663331 1.51370541
H 1.26663649 1.16718250 -0.23238692
C 2.53340376 -0.29494176 0.71328015
H 2.56391919 -0.97777410 -0.13577836
H 3.43430956 0.31625432 0.68359945
H 2.56755821 -0.89520887 1.62232865
--
0 1
C 2.53355730 0.29502133 4.51309986
H 2.56814179 0.89482803 3.60377431
H 2.56406061 0.97822791 5.36184468
H 3.43423799 -0.31647598 4.54330880
C 1.27173110 -0.55686594 4.55240411
H 1.26628739 -1.16659365 5.45890107
H 1.27060059 -1.25621968 3.71282305
C -0.00004389 0.27923316 4.50678767
H -0.00019882 0.98154314 5.34577214
H 0.00003301 0.88800958 3.59771803
C -1.27180473 -0.55690882 4.55205921
H -1.26642249 -1.16701827 5.45830931
H -1.27069839 -1.25593171 3.71219555
C -2.53352396 0.29513749 4.51308150
H -2.56771726 0.89567116 3.60420474
H -3.43432593 -0.31616087 4.54259468
H -2.56406349 0.97772373 5.36234289
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '35')] = qcdb.Molecule("""
0 1
C -2.53038287 -0.41757533 0.68130643
H -2.55988603 -0.98278998 -0.25015619
H -2.55403625 -1.13386495 1.50265790
H -3.43621355 0.18414376 0.73677133
C -1.27615683 0.44363493 0.75002483
H -1.27808384 1.02521785 1.67508548
H -1.28033899 1.16855564 -0.06715806
C 0.00220470 -0.38071620 0.67899257
H 0.00782894 -1.11141304 1.49383122
H 0.00624866 -0.96052270 -0.24882046
C 1.26833347 0.46239635 0.74936913
H 1.26201986 1.04425029 1.67424645
H 1.26163488 1.18705711 -0.06803458
C 2.53496627 -0.38042469 0.68068636
H 2.57244024 -0.94571652 -0.25045186
H 3.43198117 0.23441492 0.73557772
H 2.56920771 -1.09581003 1.50245608
--
0 1
C -0.00052120 0.06397129 5.24130633
C 0.00055054 -0.07615981 6.76103928
H -0.88648549 0.38791623 7.19440870
H 0.00980204 -1.12694006 7.05404915
H 0.87921076 0.40350475 7.19468235
C -1.23997654 -0.61768074 4.66740782
H -1.26327576 -0.52872361 3.58057863
H -1.25206217 -1.67895713 4.92042102
H -2.15092026 -0.16538948 5.06249294
C 1.25208391 -0.59356951 4.66783599
H 1.27341069 -0.50528385 3.58086503
H 1.28521444 -1.65413035 4.92192831
H 2.15389614 -0.12292620 5.06225711
C -0.01476908 1.54376378 4.86668505
H 0.86299692 2.05435080 5.26564018
H -0.01529328 1.67021871 3.78303336
H -0.90287503 2.03709750 5.26447319
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '36')] = qcdb.Molecule("""
0 1
C 0.38252221 -0.07060697 0.76689582
C -1.04063947 0.39681125 1.06093593
H -1.77157460 -0.28150025 0.61833023
H -1.22471777 0.43573509 2.13551890
H -1.21406603 1.39372444 0.65309065
C 0.59084747 -1.46681814 1.34797791
H 1.60291380 -1.82295000 1.15010285
H 0.43896858 -1.46674598 2.42828668
H -0.10991906 -2.17868425 0.90931390
C 1.37826905 0.89843536 1.39914944
H 2.40439397 0.58544074 1.20073365
H 1.24378092 0.94597430 2.48070991
H 1.24837318 1.90502262 0.99895071
C 0.60196094 -0.11103419 -0.74309659
H 0.45921182 0.87703910 -1.18289819
H 1.61369399 -0.44345945 -0.97967210
H -0.09953078 -0.79754982 -1.21922069
--
0 1
C -0.37502842 0.06931363 5.96648833
C 1.04778403 -0.39965237 5.67308879
H 1.23222323 -0.43898152 4.59856833
H 1.77921818 0.27802046 6.11582437
H 1.22004770 -1.39665841 6.08120936
C -0.58142523 1.46587516 5.38565786
H -1.59338833 1.82286061 5.58250538
H 0.11949337 2.17694663 5.82537963
H -0.42831602 1.46607177 4.30551550
C -0.59532291 0.10948985 7.47634196
H -1.60653907 0.44376683 7.71241515
H 0.10718954 0.79443888 7.95318018
H -0.45475982 -0.87903049 7.91579370
C -1.37149114 -0.89846403 5.33334194
H -1.24256513 -1.90543941 5.73292091
H -2.39738024 -0.58469117 5.53172979
H -1.23678678 -0.94543842 4.25176527
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '37')] = qcdb.Molecule("""
0 1
C 0.79991408 -1.02205164 0.68773696
H 0.85355588 -1.12205101 -0.39801435
H 1.49140210 -1.74416936 1.11972040
C 1.11688700 0.42495279 1.09966205
H 1.83814230 0.89014504 0.43045256
H 1.55556959 0.43982464 2.09708356
C -0.24455916 1.16568959 1.10297714
H -0.25807760 2.00086313 0.40532333
H -0.44880450 1.57699582 2.09098447
C -1.29871418 0.10381191 0.73930899
H -1.47356078 0.10524338 -0.33800545
H -2.25673428 0.27804118 1.22715843
C -0.64687993 -1.22006836 1.13630660
H -1.12443918 -2.08762702 0.68299327
H -0.68601864 -1.34528332 2.22022006
--
0 1
C 0.04984615 0.09420760 5.61627735
C -0.04649805 -0.05787837 7.13191782
H 0.94604832 -0.07334458 7.58427505
H -0.60542282 0.77000613 7.57035274
H -0.55366275 -0.98654445 7.39726741
C 0.76389939 1.40111272 5.28065247
H 0.84541894 1.53461185 4.20097059
H 0.22042700 2.25580115 5.68615385
H 1.77150393 1.41176313 5.69888547
C -1.35516567 0.11403225 5.01895782
H -1.31823408 0.23122219 3.93510886
H -1.93746520 0.94145581 5.42730374
H -1.88506873 -0.81375459 5.24028712
C 0.83774596 -1.07927730 5.03893917
H 0.34252564 -2.02626804 5.25918232
H 0.93258913 -0.99209454 3.95580439
H 1.84246405 -1.11668194 5.46268763
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '38')] = qcdb.Molecule("""
0 1
C 0.95688019 -0.89184563 1.14195000
H 1.50456597 -1.27835762 0.28342019
H 1.42138447 -1.31477793 2.03102546
C 0.99094943 0.65850830 1.14550384
H 1.51059446 1.02309646 0.25994788
H 1.51625823 1.05981813 2.01053703
C -0.47945194 1.10231879 1.10387910
H -0.61626861 2.06487722 0.61356737
H -0.87474223 1.18907144 2.11806960
C -1.18210650 -0.05279656 0.39334575
H -0.94888216 -0.02683030 -0.67380459
H -2.26566452 -0.03356474 0.50127403
C -0.53065958 -1.27488954 1.03930959
H -0.69039061 -2.19702093 0.48299221
H -0.95084939 -1.41541197 2.03674782
--
0 1
C -1.13198517 -0.38391856 5.05596626
H -1.46511966 -0.14721994 4.04338190
H -1.93677357 -0.92701702 5.54895277
C 0.18162128 -1.17946347 5.00820507
H 0.23156623 -1.83720616 4.14207124
H 0.26190891 -1.81082110 5.89259036
C 1.31093651 -0.11675764 5.00880116
H 1.93220146 -0.17743649 4.11692754
H 1.96834600 -0.26664069 5.86420633
C 0.60076314 1.24491110 5.11666799
H 0.42089996 1.65340289 4.12066887
H 1.18114710 1.97931461 5.67264126
C -0.74128932 0.91043867 5.76647985
H -1.48095789 1.70295043 5.66159855
H -0.60124939 0.71879862 6.83302881
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '39')] = qcdb.Molecule("""
0 1
C 0.76554546 0.86824433 0.82099095
H 1.43747647 1.68000664 1.06510281
C 1.23765260 -0.44283807 0.79388795
H 2.27575877 -0.64853808 1.01771141
C 0.37223723 -1.48853667 0.47726862
H 0.73818789 -2.50608012 0.45705609
C -0.96493318 -1.22297162 0.18687834
H -1.63645949 -2.03456079 -0.05777362
C -1.43706509 0.08840558 0.21327714
H -2.47468432 0.29430216 -0.01146746
C -0.57190649 1.13402416 0.53081281
H -0.93769935 2.15171058 0.55107764
--
0 1
C -0.76345318 -0.72677383 4.05982770
H -0.86970702 -0.55182467 2.98752083
H -1.41509075 -1.55603772 4.33297836
C 0.70608801 -0.98383692 4.40395757
H 1.20131879 -1.62142197 3.67337330
H 0.76936719 -1.48405069 5.37142421
C 1.34622506 0.42155976 4.49491043
H 1.99649337 0.61423069 3.64305751
H 1.95909224 0.51072918 5.39063579
C 0.16717893 1.42073677 4.52178247
H 0.05002744 1.87970717 3.53949713
H 0.31277252 2.22224160 5.24418107
C -1.06659283 0.56364158 4.81743133
H -1.99758134 1.03937903 4.51151819
H -1.13201859 0.35432067 5.88796657
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '40')] = qcdb.Molecule("""
0 1
C 0.31195353 0.56102334 0.49669886
H 0.74213608 1.55336911 0.48156571
C 1.14218235 -0.55807461 0.53606185
H 2.21651131 -0.43425014 0.55235015
C 0.58780415 -1.83668705 0.55414435
H 1.23191239 -2.70484153 0.58522179
C -0.79665772 -1.99637562 0.53296300
H -1.22677442 -2.98844427 0.54863708
C -1.62689297 -0.87747365 0.49416828
H -2.70112211 -1.00134997 0.47981498
C -1.07266525 0.40120590 0.47597397
H -1.71697357 1.26940117 0.44591995
--
0 1
C 0.17046797 0.50613197 4.83469402
C 1.61671665 0.68491933 4.37973254
H 2.03257337 1.61819721 4.76315552
H 2.24011597 -0.13569629 4.73858640
H 1.67732578 0.70431062 3.29079832
C 0.11607660 0.47476083 6.35955934
H -0.90971343 0.34734041 6.70864711
H 0.71148250 -0.35092603 6.75211308
H 0.50437108 1.40264546 6.78246492
C -0.37891207 -0.80336000 4.27439800
H -1.41378567 -0.95363504 4.58706959
H 0.20754451 -1.65233376 4.63020927
H -0.35013224 -0.80381278 3.18408376
C -0.67090481 1.67070366 4.31848855
H -0.64936386 1.70673405 3.22848999
H -1.71069396 1.56693409 4.63297103
H -0.29525222 2.62139813 4.70059546
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '41')] = qcdb.Molecule("""
0 1
N -0.20890478 -0.96458262 0.53476104
H -0.22415099 -1.97310940 0.60508386
C -1.44634208 -0.34458112 0.30665858
O -2.46123675 -1.01079161 0.19789196
C -1.35778219 1.10318559 0.22814378
H -2.25657214 1.66773071 0.04984731
C -0.16300320 1.70989257 0.38112632
H -0.04629046 2.78244591 0.33334968
N 0.98545210 1.00082412 0.61120636
H 1.86755978 1.46692777 0.74478430
C 1.02702092 -0.37917011 0.71264723
O 2.04919670 -0.99739548 0.93725979
--
0 1
C 1.14141247 2.35703152 4.05707817
H 0.71056385 2.66808022 3.10429560
H 0.50717856 2.76246464 4.84532582
H 2.12429249 2.81747894 4.15019966
C 1.21442893 0.83816057 4.14659651
H 1.64481257 0.54859772 5.10788747
H 1.88901852 0.44700002 3.38147835
C -0.15035626 0.17999392 3.99177975
H -0.82160052 0.54886973 4.77339899
H -0.59782713 0.49025894 3.04187953
C -0.09406732 -1.34069263 4.05141525
H 0.32953817 -1.64312304 5.01205144
H 0.59745442 -1.70257157 3.28691282
C -1.46335024 -1.98256584 3.86764160
H -1.90172924 -1.70910816 2.90745609
H -1.40641145 -3.06933423 3.91169879
H -2.15131302 -1.65421986 4.64687465
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '42')] = qcdb.Molecule("""
0 1
N 0.19572959 -0.84468925 0.82384642
H 0.45039753 -1.79675294 1.04976794
C -1.17904919 -0.57368440 0.75948349
O -1.99364624 -1.45626526 0.96690066
C -1.47671471 0.81115567 0.43755952
H -2.50635592 1.11565059 0.36389469
C -0.46811280 1.68296245 0.23489084
H -0.63843522 2.72164296 -0.00616410
N 0.84562854 1.30599113 0.32683051
H 1.58969256 1.96887924 0.18595979
C 1.25426147 0.01946187 0.63624397
O 2.42230438 -0.30171639 0.73187948
--
0 1
C 1.05672314 -0.86351031 4.39874366
H 1.51057565 -0.95556655 3.41076111
H 1.60122564 -1.52749058 5.06794134
C 1.11103661 0.60244169 4.83167965
H 2.06932660 1.07534062 4.62095536
H 0.92292133 0.68407923 5.90490278
C -0.05631497 1.21525617 4.06090845
H 0.21798930 1.30403777 3.00743682
H -0.34072939 2.20639729 4.41254246
C -1.17325946 0.17768426 4.23193676
H -1.89879874 0.20129811 3.42056485
H -1.71734509 0.38238141 5.15418538
C -0.45022312 -1.18886357 4.33559365
H -0.69288766 -1.83301970 3.49223397
H -0.76532935 -1.71626599 5.23468007
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '43')] = qcdb.Molecule("""
0 1
N 0.62608128 -0.85091265 0.80591569
H 0.40918989 -1.81150056 1.03440142
C -0.43245619 -0.08733581 0.29466376
O -1.53077162 -0.58840313 0.12359257
C -0.06687462 1.29127521 0.01963739
H -0.80974352 1.95181039 -0.39283965
C 1.18354208 1.71793501 0.29053321
H 1.50185022 2.73387064 0.10983284
N 2.13412979 0.88660160 0.81908177
H 3.05533594 1.22390137 1.04342778
C 1.90278319 -0.44317844 1.12831175
O 2.74380631 -1.16392354 1.62858730
--
0 1
C -0.62370220 -0.02971796 4.73188916
C -1.94044838 0.71157084 4.94676206
H -2.64751979 0.09336465 5.50162440
H -1.78094882 1.63175538 5.51094708
H -2.39815816 0.97306786 3.99160840
C -0.00826558 -0.38315588 6.08316660
H 0.93489659 -0.91552919 5.95238477
H 0.18875537 0.51658585 6.66796874
H -0.67955960 -1.02089289 6.65990335
C 0.34142207 0.86375986 3.95610006
H 1.28999256 0.35116515 3.78574607
H 0.54671227 1.78189631 4.50952643
H -0.08097331 1.14224647 2.98863562
C -0.88501939 -1.30975236 3.94152426
H -1.34875779 -1.08791865 2.97889962
H 0.04755691 -1.84815128 3.76188758
H -1.55552720 -1.97156632 4.49170918
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '44')] = qcdb.Molecule("""
0 1
C 0.66640038 0.18381078 0.41973683
H 1.22888182 -0.32988301 1.18625971
H 1.22803556 0.69720813 -0.34760989
C -0.66597358 0.18297343 0.41961191
H -1.22792171 -0.33149890 1.18610334
H -1.22818427 0.69564575 -0.34774808
--
0 1
C -2.53275995 -0.39365922 4.14534248
H -2.56225339 -1.00668000 3.24415261
H -2.56889390 -1.06787984 5.00095950
H -3.43393131 0.21735721 4.16258843
C -1.27132347 0.45901620 4.18116042
H -1.27172933 1.07910977 5.08055437
H -1.26293512 1.14592451 3.33210001
C -0.00004920 -0.37854138 4.15421721
H -0.00020326 -1.06521408 5.00604923
H 0.00009186 -1.00611921 3.25757472
C 1.27117120 0.45904505 4.18162175
H 1.27144420 1.07885580 5.08110716
H 1.26297638 1.14611970 3.33271412
C 2.53262258 -0.39367946 4.14579757
H 2.56224605 -1.00653596 3.24448839
H 3.43380069 0.21725671 4.16337561
H 2.56854094 -1.06813554 5.00130328
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '45')] = qcdb.Molecule("""
0 1
C -0.60618936 0.05587406 0.58900491
H -1.66803667 0.05577624 0.58901162
C 0.60584873 0.05554087 0.58926624
H 1.66767817 0.05486328 0.58972794
--
0 1
C -2.53040391 -0.34745600 4.21851416
H -2.53877054 -1.00940954 3.35210357
H -2.58232224 -0.97372522 5.10910493
H -3.43281853 0.26144806 4.18575253
C -1.26987178 0.50714472 4.22958343
H -1.28652345 1.18014394 5.08999255
H -1.24460479 1.14136072 3.34078732
C 0.00004684 -0.33118629 4.27003876
H 0.00004957 -0.94897593 5.17310016
H 0.00011393 -1.01948544 3.42079757
C 1.26994540 0.50718978 4.22967030
H 1.28657322 1.18015690 5.09009161
H 1.24480048 1.14136210 3.34086911
C 2.53046789 -0.34744680 4.21872389
H 2.53884766 -1.00942955 3.35234481
H 3.43284666 0.26148455 4.18599753
H 2.58228512 -0.97366153 5.10935743
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '46')] = qcdb.Molecule("""
0 1
C 1.37219093 1.01247736 0.97082468
H 0.95217623 2.01404955 1.03311725
H 1.94742170 0.92651560 0.05071776
H 2.05170208 0.85182517 1.80295247
C 0.32673706 -0.07764727 0.98819876
O 0.61882128 -1.25248130 1.17128126
N -0.95002884 0.34488680 0.77391491
H -1.10467156 1.32202550 0.60611216
C -2.05985440 -0.57736895 0.68015349
H -1.66935602 -1.56679601 0.89718425
H -2.83459176 -0.33138032 1.40366139
H -2.49097050 -0.57892483 -0.31993926
--
0 1
C 2.66066552 0.46274539 4.85334645
H 2.77750480 1.21716129 4.07460163
H 2.57455515 0.98763172 5.80500251
H 3.57275696 -0.13149652 4.88015446
C 1.43239329 -0.40064212 4.59579490
H 1.33782394 -1.14609612 5.38884574
H 1.54881342 -0.95410645 3.66195110
C 0.14985545 0.41797183 4.53049355
H 0.03828513 0.99570671 5.45357719
H 0.22908959 1.15078674 3.72084090
C -1.09450084 -0.43236340 4.31361365
H -1.18530281 -1.14684989 5.13503088
H -0.96669384 -1.02130113 3.40339920
C -2.36133934 0.40792810 4.22349893
H -2.29442610 1.11497908 3.39572969
H -3.24668156 -0.20808939 4.06966602
H -2.51169538 0.98413919 5.13671852
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '47')] = qcdb.Molecule("""
0 1
C 0.72918867 1.11310122 0.32672825
H 1.30321590 2.01422234 0.15916027
C 1.37508737 -0.11936635 0.41277695
H 2.45051474 -0.17462400 0.31330720
C 0.63503981 -1.28055339 0.62938541
H 1.13633448 -2.23601747 0.70021716
C -0.75098563 -1.20965430 0.75789034
H -1.32452590 -2.11141283 0.92419891
C -1.39703443 0.02267081 0.67308963
H -2.47242537 0.07848826 0.77399799
C -0.65689731 1.18429622 0.45833859
H -1.15782845 2.14058713 0.39509608
--
0 1
C 0.15810619 0.15289032 4.08588285
H 0.28023260 0.37837378 3.03545641
C -0.93297537 -0.60200829 4.51321912
H -1.65347990 -0.95852255 3.78952470
C -1.09367536 -0.89613361 5.86616918
H -1.94078294 -1.48210218 6.19641672
C -0.16179279 -0.43508023 6.79466326
H -0.28568629 -0.66304639 7.84467076
C 0.92979230 0.32002182 6.36942298
H 1.65291139 0.67785500 7.08980563
C 1.08859620 0.61350684 5.01593166
H 1.93585412 1.19958163 4.68588434
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '48')] = qcdb.Molecule("""
0 1
N 1.32276272 -0.01037598 1.01918373
C 0.65128601 -1.14899203 0.79680119
H 1.20041842 -2.06552808 0.97367282
C -0.67268130 -1.19471172 0.36665693
H -1.15719362 -2.14732141 0.20646407
C -1.34719676 0.00313399 0.15214401
H -2.37535653 0.00840542 -0.18229302
C -0.66455797 1.19409062 0.37900199
H -1.14262633 2.15155765 0.22872051
C 0.65889576 1.13497854 0.80885987
H 1.21410272 2.04591045 0.99543831
--
0 1
N 0.45011507 0.00130104 6.78095972
C 1.32078309 -0.00431175 5.76154669
H 2.36863966 -0.00306323 6.03584948
C 0.94739735 -0.01137951 4.41971862
H 1.69485802 -0.01554353 3.63861897
C -0.40865120 -0.01279358 4.10730315
H -0.73837988 -0.01824905 3.07702170
C -1.32675447 -0.00707849 5.15247277
H -2.39120450 -0.00792788 4.96373698
C -0.85115066 -0.00016084 6.46143162
H -1.54333433 0.00442229 7.29462282
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '49')] = qcdb.Molecule("""
0 1
C 0.84507720 1.05791869 0.69945490
H 1.50640601 1.90322178 0.83338235
C 1.37550931 -0.21745534 0.51116093
H 2.44718367 -0.36147258 0.50285232
C 0.52406810 -1.30704432 0.33319233
H 0.93572726 -2.29602641 0.18492305
C -0.85771573 -1.12146341 0.34638409
H -1.51838119 -1.96645805 0.20836325
C -1.38804570 0.15363438 0.53761349
H -2.45971752 0.29741587 0.55003229
C -0.53661315 1.24342221 0.71273882
H -0.94892427 2.23280628 0.85736635
--
0 1
N 0.02311730 0.35202455 6.77454464
C 0.17780112 1.28998616 5.82966776
H 0.31957195 2.30251216 6.18756949
C 0.16359185 1.02269639 4.46316833
H 0.29383191 1.82372219 3.74928292
C -0.02074646 -0.28893329 4.03787790
H -0.03731291 -0.53205196 2.98452996
C -0.18259538 -1.27396762 5.00673698
H -0.32913840 -2.30917859 4.73196547
C -0.15339291 -0.90663452 6.34982649
H -0.27698904 -1.65414849 7.12392749
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '50')] = qcdb.Molecule("""
0 1
C 0.83661195 1.11485600 0.23100790
H 1.48545250 1.97968049 0.21470491
C 1.38418781 -0.16696533 0.26005688
H 2.45768419 -0.29628753 0.26605977
C 0.54747934 -1.28184652 0.28693051
H 0.97191784 -2.27597918 0.31387670
C -0.83666710 -1.11500365 0.28456279
H -1.48555353 -1.97956851 0.30969784
C -1.38416274 0.16685015 0.25560540
H -2.45764469 0.29645927 0.25854055
C -0.54749833 1.28174826 0.22897743
H -0.97214124 2.27600137 0.21116093
--
0 1
C 0.00585466 0.07515017 3.77945155
H 0.00284553 0.05759463 2.71537604
C 0.00951511 0.09473103 4.99182772
H 0.01262752 0.11190396 6.05302473
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '51')] = qcdb.Molecule("""
0 1
C -0.60172996 -0.02857012 0.38493492
H -1.66373543 -0.02852657 0.37901431
C 0.61010917 -0.02866364 0.38816379
H 1.67213544 -0.02879308 0.38796752
--
0 1
C -0.00735998 0.10033739 4.14281190
H -0.00396560 0.06660234 3.07951502
C -0.01129640 0.13862741 5.35427728
H -0.01456263 0.17200329 6.41518870
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '52')] = qcdb.Molecule("""
0 1
C 0.96408039 0.87509331 0.37801364
H 1.65982961 1.69993082 0.44604227
C 1.43105709 -0.41313344 0.11899152
H 2.48952453 -0.58720917 -0.01701261
C 0.53412766 -1.47763890 0.04241755
H 0.89696129 -2.47738839 -0.15201199
C -0.83032682 -1.25360409 0.22085611
H -1.52576001 -2.07962435 0.16411655
C -1.29758715 0.03441261 0.48024263
H -2.35439607 0.20801612 0.62856096
C -0.40044509 1.09977921 0.56160137
H -0.76045514 2.09376880 0.78475698
--
0 1
C -0.11985517 0.53438939 4.36008118
O -0.58804476 1.58383601 3.98082079
O 0.28335741 -0.44317387 3.52079591
H 0.11465259 -0.11726029 2.61939066
C 0.09009913 0.13740231 5.79148697
H -0.21986702 0.94673889 6.44147585
H -0.48598160 -0.75922167 6.00843808
H 1.13859655 -0.09872978 5.95650555
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '53')] = qcdb.Molecule("""
0 1
C 0.85556074 0.35853244 1.04975426
H 1.51382550 0.90267956 1.71276582
C 1.34289713 -0.67537866 0.25115740
H 2.39288384 -0.93334472 0.28196305
C 0.47780661 -1.37670110 -0.58781577
H 0.85608399 -2.17890753 -1.20682428
C -0.87482983 -1.04255615 -0.63045178
H -1.54540573 -1.58570014 -1.28241614
C -1.36239729 -0.00701391 0.16584645
H -2.41157102 0.25346723 0.13077885
C -0.49844404 0.69315695 1.00699199
H -0.86611090 1.49033989 1.63803696
--
0 1
C 0.08192937 0.49753072 4.80472861
O 0.32841872 1.54095697 4.21748933
N -0.22211788 -0.65747581 4.15356127
H -0.19691756 -0.66449114 3.14692466
H -0.37789436 -1.51296813 4.64926298
C 0.10477407 0.40263889 6.31314609
H 1.13648787 0.48685118 6.64821988
H -0.31712984 -0.52400410 6.69417176
H -0.44469059 1.24648520 6.71991660
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '54')] = qcdb.Molecule("""
0 1
C 0.78014717 -0.60991473 -1.20755689
H 0.89619160 -1.13763959 -2.14414463
C 0.47794275 0.75099363 -1.20789541
H 0.35696423 1.27816780 -2.14405407
C 0.32728928 1.43186787 -0.00000000
H 0.09146503 2.48713922 0.00000000
C 0.47794275 0.75099363 1.20789541
H 0.35696423 1.27816780 2.14405407
C 0.78014717 -0.60991473 1.20755689
H 0.89619160 -1.13763959 2.14414463
C 0.93164831 -1.28998134 0.00000000
H 1.16848573 -2.34521369 -0.00000000
--
0 1
O -2.74383121 -0.26926257 0.00000000
H -2.57902721 -1.21398410 0.00000000
H -1.85653027 0.10232776 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '55')] = qcdb.Molecule("""
0 1
C 0.75974918 1.03127506 0.37377239
H 1.43501626 1.87566427 0.37470462
C 1.26661779 -0.26736234 0.42127308
H 2.33491597 -0.42918019 0.45943234
C 0.39532054 -1.35599116 0.42490511
H 0.78866193 -2.36249259 0.46303549
C -0.98220564 -1.14665441 0.38127024
H -1.65765632 -1.99114019 0.38512100
C -1.48934612 0.15114979 0.33757234
H -2.55794704 0.31375049 0.30771900
C -0.61877516 1.24033121 0.33388373
H -1.01176161 2.24710690 0.30436922
--
0 1
O 0.04701895 0.30618537 3.68511328
H 0.13311917 0.35605847 2.72791973
C -0.84913165 -0.75142870 3.96816832
H -0.94485234 -0.80816328 5.04910445
H -1.84128123 -0.57973096 3.54437811
H -0.48267133 -1.71446977 3.60525680
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '56')] = qcdb.Molecule("""
0 1
C 0.69231523 1.08829204 0.32484124
H 1.28194880 1.99194678 0.25251578
C 1.31818722 -0.15687008 0.28689607
H 2.39314337 -0.21947636 0.18840681
C 0.55801841 -1.32195045 0.38139986
H 1.04391922 -2.28757380 0.35761542
C -0.82755236 -1.24142187 0.51168501
H -1.41670095 -2.14525152 0.58533927
C -1.45341138 0.00367145 0.54838107
H -2.52823255 0.06570272 0.64984254
C -0.69346094 1.16840108 0.45622907
H -1.17873534 2.13440989 0.48572685
--
0 1
N 0.27506479 -0.22271725 3.85890709
H 0.40968315 -0.17867675 2.85583573
H 0.41655736 0.72242949 4.19137936
C -1.10103469 -0.62910066 4.13634288
H -1.25891125 -0.65764767 5.21289841
H -1.87233687 0.01128013 3.69622388
H -1.25572667 -1.63866846 3.76072118
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '57')] = qcdb.Molecule("""
0 1
C 0.40877989 1.05102502 0.37553605
H 1.01193875 1.94854570 0.36807788
C 1.01916788 -0.19976963 0.28905343
H 2.09557130 -0.27183333 0.21719099
C 0.24172263 -1.35688270 0.29668995
H 0.71521633 -2.32658869 0.22807218
C -1.14617971 -1.26425757 0.39390198
H -1.74918186 -2.16192663 0.39940980
C -1.75727780 -0.01396023 0.48295173
H -2.83351378 0.05824368 0.55903918
C -0.97968602 1.14420653 0.47228370
H -1.45405142 2.11400088 0.53713589
--
0 1
C 0.24562178 1.95675759 4.25663541
H -0.11252332 2.12248844 3.24334264
H 1.27020534 2.31346716 4.33807692
H -0.35847510 2.53039342 4.95498813
C 0.20877544 0.50359448 4.67234424
O 0.49340385 0.15123306 5.81088230
N -0.16361983 -0.36212226 3.69310315
H -0.32474773 -0.00413152 2.76703481
C -0.20041270 -1.78900149 3.91119021
H -0.12232513 -1.95590903 4.98118644
H -1.13565324 -2.20735207 3.54445210
H 0.62871378 -2.29287426 3.41385278
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '58')] = qcdb.Molecule("""
0 1
N -0.94121124 0.79004136 0.01171891
C -0.92275524 -0.55237814 0.03537875
H 0.05724051 -1.01558800 0.05135491
C -2.07651907 -1.33301813 0.03929035
H -1.99652895 -2.41058573 0.05887720
C -3.31631294 -0.70333955 0.01759905
H -4.23157489 -1.27908429 0.01979377
C -3.34889528 0.68701881 -0.00708596
H -4.28544414 1.22610455 -0.02465899
C -2.14310382 1.38263356 -0.00889005
H -2.13809974 2.46565258 -0.02778297
--
0 1
N 2.53321129 -0.95002930 0.04251789
C 3.73499010 -1.54320554 0.04459773
H 3.72976625 -2.62616799 0.06648690
C 4.94092634 -0.84824698 0.02059635
H 5.87736466 -1.38778216 0.02369036
C 4.90860873 0.54205748 -0.00715036
H 5.82398367 1.11730853 -0.02633187
C 3.66892840 1.17234361 -0.00962746
H 3.58915567 2.24990219 -0.03071603
C 2.51501483 0.39233399 0.01556620
H 1.53510443 0.85599657 0.01390336
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '59')] = qcdb.Molecule("""
0 1
C -1.00686722 -0.03056821 -0.02477285
H 0.05900333 -0.06093974 -0.04936562
C -2.21874380 0.00317347 0.00259920
H -3.27927730 0.03352491 0.02720048
--
0 1
O 2.26390460 -0.14557006 -0.11547082
H 2.83426102 -0.73533944 0.38155611
H 2.83590044 0.20541797 -0.80084297
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '60')] = qcdb.Molecule("""
0 1
C -0.61056257 0.22750310 -0.17060207
H 0.10738506 0.86143603 -0.63420924
C -1.38627573 -0.52532550 0.37997353
H -2.08070324 -1.17406739 0.85437937
--
0 1
C 2.83444960 -0.64143137 0.46593603
O 2.58027054 0.31467087 -0.23290172
O 1.88654498 -1.41577160 1.03362263
H 1.02554559 -1.04847261 0.76585149
C 4.21008475 -1.12288120 0.81608694
H 4.94847057 -0.48533112 0.34523661
H 4.33629527 -1.11102648 1.89612226
H 4.33236190 -2.15072575 0.48285261
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '61')] = qcdb.Molecule("""
0 1
C -2.27534498 -0.13507494 0.83133387
H -2.49071776 -0.72792669 -0.05756635
H -2.22632382 -0.81844641 1.67882341
H -3.11202566 0.54494342 0.98740008
C -0.96169812 0.61927789 0.66939920
H -0.78869920 1.25043181 1.54470266
H -1.02617687 1.29544524 -0.18645838
C 0.22650217 -0.31471031 0.47998579
H 0.30944439 -0.97513911 1.34803794
H 0.03915056 -0.96599875 -0.37878983
C 1.54300168 0.42117452 0.26899951
H 1.71163863 1.10777177 1.10244654
H 1.46609466 1.04374331 -0.62529358
C 2.72757633 -0.52686091 0.13745931
H 2.58874155 -1.20321391 -0.70575734
H 3.66150100 0.01169308 -0.01596863
H 2.83519407 -1.13740994 1.03407512
--
0 1
C -0.48356149 -0.28786315 4.12125154
O -0.90617543 -1.40304340 3.92410496
O -1.29725385 0.77110237 4.35384102
H -2.19801596 0.41672183 4.31330528
C 0.95670557 0.12180293 4.13845692
H 1.58252864 -0.74837801 3.98030176
H 1.13274299 0.85607656 3.35533234
H 1.19401682 0.59110388 5.09025931
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '62')] = qcdb.Molecule("""
0 1
C -2.58777605 -0.32310566 0.46945828
H -2.61038910 -0.87636604 -0.46961946
H -2.65974410 -1.05188654 1.27771411
H -3.47603507 0.30562460 0.50896129
C -1.30955982 0.49739424 0.58506260
H -1.31725060 1.08326190 1.50634108
H -1.26237673 1.21557375 -0.23677617
C -0.05682966 -0.36826029 0.55844017
H -0.08617526 -1.07335882 1.39587537
H -0.05380919 -0.97684333 -0.35147393
C 1.23159606 0.44006559 0.63203246
H 1.21328340 1.05356193 1.53459305
H 1.26629733 1.13137662 -0.21310563
C 2.47257523 -0.44314441 0.61922148
H 2.52071888 -1.03526342 -0.29489695
H 3.38773437 0.14408974 0.68390871
H 2.45929703 -1.13936423 1.45861821
--
0 1
C 0.04216222 0.20124208 4.11650819
O 0.06907449 1.38631556 3.82466701
N 1.17474249 -0.55063556 4.21932814
H 2.04568275 -0.12805505 3.95066588
H 1.13580453 -1.54252223 4.35075106
C -1.24805876 -0.53769541 4.38096202
H -1.10080876 -1.49841677 4.86808639
H -1.75428629 -0.69600434 3.43014867
H -1.88600271 0.08954102 4.99623387
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '63')] = qcdb.Molecule("""
0 1
C 0.60678496 1.33042185 0.31643451
H 1.24649846 2.20226434 0.33035231
C 1.11808466 0.08724886 0.68511652
H 2.15005753 -0.00388678 0.99375824
C 0.29290229 -1.03608737 0.66910727
H 0.68849686 -2.00096149 0.95537797
C -1.04283174 -0.91671112 0.28818964
H -1.68270956 -1.78848825 0.27934903
C -1.55358838 0.32734899 -0.07994317
H -2.58923495 0.42028908 -0.37734619
C -0.72804164 1.45084316 -0.06684834
H -1.12362379 2.41565865 -0.35386143
--
0 1
C 0.41898688 -0.27167884 4.02497697
O 1.61447955 -0.10772809 4.10149274
O -0.16051479 -1.48308380 4.22441532
H 0.57393607 -2.08419229 4.41745344
C -0.60289735 0.77225268 3.70429579
H -0.12460293 1.74319903 3.65747301
H -1.05569745 0.53905649 2.74158774
H -1.38774836 0.76671618 4.45679527
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '64')] = qcdb.Molecule("""
0 1
C 1.62971482 0.50301252 0.27011189
H 1.64157338 1.45923792 -0.24808286
H 2.31531919 -0.18355470 -0.21758635
H 1.96974564 0.64936024 1.29398105
C 0.26182776 -0.13286122 0.31456221
O 0.09925265 -1.30961602 0.61183995
N -0.77350225 0.70251214 0.02207590
H -0.56901138 1.66655677 -0.16581434
C -2.15001214 0.26596865 0.09505328
H -2.14473761 -0.81940745 0.10091210
H -2.64054318 0.61582035 1.00360442
H -2.70774393 0.62075110 -0.76826057
--
0 1
C -0.04575608 0.51799706 3.77621664
H -0.05063764 1.26017087 4.56209922
H -0.69428883 0.68576570 2.92753308
C 0.72275422 -0.56896486 3.84602626
H 1.36805919 -0.74079051 4.69615412
H 0.71764224 -1.30416499 3.05371698
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '65')] = qcdb.Molecule("""
0 1
N -0.08303249 0.00071459 1.05519999
C -0.20285376 -1.14172585 0.36493369
H -0.09848563 -2.05509795 0.93743262
C -0.44678144 -1.19176367 -1.00451226
H -0.53364921 -2.14585511 -1.50417155
C -0.57468209 0.00343953 -1.70430948
H -0.76368391 0.00448010 -2.76872670
C -0.45345675 1.19724254 -1.00091647
H -0.54563080 2.15227264 -1.49779508
C -0.20931111 1.14450759 0.36836730
H -0.11016707 2.05669726 0.94357396
--
0 1
C 0.47183602 -0.00605819 5.54171896
H 0.58724607 -0.00548400 6.59673278
C 0.33976626 -0.00660792 4.33547166
H 0.22161814 -0.00634549 3.27096619
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '66')] = qcdb.Molecule("""
0 1
N -0.54105920 0.02957620 -0.20899508
H 0.05555335 -0.78611810 -0.13029335
H -1.46966940 -0.27470845 0.05314338
C -0.07879927 1.04239036 0.73845886
H -0.72015294 1.91941377 0.67198026
H -0.05075819 0.72382293 1.78551453
H 0.92643072 1.35660379 0.46199919
--
0 1
N 2.34185022 -1.25680010 0.03015300
C 2.68028654 -0.44445604 -0.98155948
H 2.13761932 -0.58899402 -1.90694084
C 3.65161580 0.54767776 -0.88119247
H 3.87646824 1.17201804 -1.73404317
C 4.31245587 0.71721920 0.33107196
H 5.07030981 1.47945653 0.44745609
C 3.97232296 -0.11774333 1.39019492
H 4.45491136 -0.02728109 2.35289557
C 2.98854139 -1.08253234 1.19101154
H 2.70245706 -1.74627994 1.99762219
units angstrom
""")
# <<< Derived Geometry Strings >>>
for rxn in HRXN:
GEOS['%s-%s-monoA-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1)
GEOS['%s-%s-monoB-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2)
GEOS['%s-%s-monoA-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1, 2)
GEOS['%s-%s-monoB-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2, 1)
#########################################################################
# <<< Supplementary Quantum Chemical Results >>>
DATA = {}
DATA['NUCLEAR REPULSION ENERGY'] = {}
DATA['NUCLEAR REPULSION ENERGY']['S66-1-dimer' ] = 36.51369349
DATA['NUCLEAR REPULSION ENERGY']['S66-1-monoA-unCP' ] = 9.15671411
DATA['NUCLEAR REPULSION ENERGY']['S66-1-monoB-unCP' ] = 9.17259114
DATA['NUCLEAR REPULSION ENERGY']['S66-2-dimer' ] = 79.98338083
DATA['NUCLEAR REPULSION ENERGY']['S66-2-monoA-unCP' ] = 9.14996836
DATA['NUCLEAR REPULSION ENERGY']['S66-2-monoB-unCP' ] = 40.29463192
DATA['NUCLEAR REPULSION ENERGY']['S66-3-dimer' ] = 79.77996002
DATA['NUCLEAR REPULSION ENERGY']['S66-3-monoA-unCP' ] = 9.12565570
DATA['NUCLEAR REPULSION ENERGY']['S66-3-monoB-unCP' ] = 42.06267577
DATA['NUCLEAR REPULSION ENERGY']['S66-4-dimer' ] = 246.86074225
DATA['NUCLEAR REPULSION ENERGY']['S66-4-monoA-unCP' ] = 9.13184124
DATA['NUCLEAR REPULSION ENERGY']['S66-4-monoB-unCP' ] = 180.56084030
DATA['NUCLEAR REPULSION ENERGY']['S66-5-dimer' ] = 129.52156842
DATA['NUCLEAR REPULSION ENERGY']['S66-5-monoA-unCP' ] = 40.41731272
DATA['NUCLEAR REPULSION ENERGY']['S66-5-monoB-unCP' ] = 40.29806380
DATA['NUCLEAR REPULSION ENERGY']['S66-6-dimer' ] = 131.81617640
DATA['NUCLEAR REPULSION ENERGY']['S66-6-monoA-unCP' ] = 40.42467073
DATA['NUCLEAR REPULSION ENERGY']['S66-6-monoB-unCP' ] = 42.05202847
DATA['NUCLEAR REPULSION ENERGY']['S66-7-dimer' ] = 313.95975412
DATA['NUCLEAR REPULSION ENERGY']['S66-7-monoA-unCP' ] = 40.41876218
DATA['NUCLEAR REPULSION ENERGY']['S66-7-monoB-unCP' ] = 180.73873695
DATA['NUCLEAR REPULSION ENERGY']['S66-8-dimer' ] = 78.74537406
DATA['NUCLEAR REPULSION ENERGY']['S66-8-monoA-unCP' ] = 40.42326344
DATA['NUCLEAR REPULSION ENERGY']['S66-8-monoB-unCP' ] = 9.17236900
DATA['NUCLEAR REPULSION ENERGY']['S66-9-dimer' ] = 129.31867271
DATA['NUCLEAR REPULSION ENERGY']['S66-9-monoA-unCP' ] = 42.10593235
DATA['NUCLEAR REPULSION ENERGY']['S66-9-monoB-unCP' ] = 40.34710761
DATA['NUCLEAR REPULSION ENERGY']['S66-10-dimer' ] = 131.71717765
DATA['NUCLEAR REPULSION ENERGY']['S66-10-monoA-unCP' ] = 42.09217552
DATA['NUCLEAR REPULSION ENERGY']['S66-10-monoB-unCP' ] = 42.05982938
DATA['NUCLEAR REPULSION ENERGY']['S66-11-dimer' ] = 320.50976921
DATA['NUCLEAR REPULSION ENERGY']['S66-11-monoA-unCP' ] = 42.09328618
DATA['NUCLEAR REPULSION ENERGY']['S66-11-monoB-unCP' ] = 180.72211450
DATA['NUCLEAR REPULSION ENERGY']['S66-12-dimer' ] = 81.87844165
DATA['NUCLEAR REPULSION ENERGY']['S66-12-monoA-unCP' ] = 42.04336531
DATA['NUCLEAR REPULSION ENERGY']['S66-12-monoB-unCP' ] = 9.12312499
DATA['NUCLEAR REPULSION ENERGY']['S66-13-dimer' ] = 314.84789007
DATA['NUCLEAR REPULSION ENERGY']['S66-13-monoA-unCP' ] = 180.80545988
DATA['NUCLEAR REPULSION ENERGY']['S66-13-monoB-unCP' ] = 40.30378877
DATA['NUCLEAR REPULSION ENERGY']['S66-14-dimer' ] = 315.64348724
DATA['NUCLEAR REPULSION ENERGY']['S66-14-monoA-unCP' ] = 180.81499576
DATA['NUCLEAR REPULSION ENERGY']['S66-14-monoB-unCP' ] = 42.03791353
DATA['NUCLEAR REPULSION ENERGY']['S66-15-dimer' ] = 540.42243680
DATA['NUCLEAR REPULSION ENERGY']['S66-15-monoA-unCP' ] = 180.53794513
DATA['NUCLEAR REPULSION ENERGY']['S66-15-monoB-unCP' ] = 180.54327910
DATA['NUCLEAR REPULSION ENERGY']['S66-16-dimer' ] = 243.51194018
DATA['NUCLEAR REPULSION ENERGY']['S66-16-monoA-unCP' ] = 180.57089645
DATA['NUCLEAR REPULSION ENERGY']['S66-16-monoB-unCP' ] = 9.17374713
DATA['NUCLEAR REPULSION ENERGY']['S66-17-dimer' ] = 1040.55250335
DATA['NUCLEAR REPULSION ENERGY']['S66-17-monoA-unCP' ] = 357.25263911
DATA['NUCLEAR REPULSION ENERGY']['S66-17-monoB-unCP' ] = 357.22824169
DATA['NUCLEAR REPULSION ENERGY']['S66-18-dimer' ] = 269.39653929
DATA['NUCLEAR REPULSION ENERGY']['S66-18-monoA-unCP' ] = 9.12915636
DATA['NUCLEAR REPULSION ENERGY']['S66-18-monoB-unCP' ] = 206.28546361
DATA['NUCLEAR REPULSION ENERGY']['S66-19-dimer' ] = 337.49486033
DATA['NUCLEAR REPULSION ENERGY']['S66-19-monoA-unCP' ] = 40.42190801
DATA['NUCLEAR REPULSION ENERGY']['S66-19-monoB-unCP' ] = 206.28426737
DATA['NUCLEAR REPULSION ENERGY']['S66-20-dimer' ] = 381.47467603
DATA['NUCLEAR REPULSION ENERGY']['S66-20-monoA-unCP' ] = 121.35354216
DATA['NUCLEAR REPULSION ENERGY']['S66-20-monoB-unCP' ] = 121.35037507
DATA['NUCLEAR REPULSION ENERGY']['S66-21-dimer' ] = 373.66110820
DATA['NUCLEAR REPULSION ENERGY']['S66-21-monoA-unCP' ] = 121.85534909
DATA['NUCLEAR REPULSION ENERGY']['S66-21-monoB-unCP' ] = 121.85562743
DATA['NUCLEAR REPULSION ENERGY']['S66-22-dimer' ] = 685.96293615
DATA['NUCLEAR REPULSION ENERGY']['S66-22-monoA-unCP' ] = 121.30606379
DATA['NUCLEAR REPULSION ENERGY']['S66-22-monoB-unCP' ] = 357.30242624
DATA['NUCLEAR REPULSION ENERGY']['S66-23-dimer' ] = 682.46450694
DATA['NUCLEAR REPULSION ENERGY']['S66-23-monoA-unCP' ] = 121.91206440
DATA['NUCLEAR REPULSION ENERGY']['S66-23-monoB-unCP' ] = 357.16987646
DATA['NUCLEAR REPULSION ENERGY']['S66-24-dimer' ] = 623.71187998
DATA['NUCLEAR REPULSION ENERGY']['S66-24-monoA-unCP' ] = 203.71200257
DATA['NUCLEAR REPULSION ENERGY']['S66-24-monoB-unCP' ] = 203.71172379
DATA['NUCLEAR REPULSION ENERGY']['S66-25-dimer' ] = 637.14156863
DATA['NUCLEAR REPULSION ENERGY']['S66-25-monoA-unCP' ] = 206.22564193
DATA['NUCLEAR REPULSION ENERGY']['S66-25-monoB-unCP' ] = 206.22748415
DATA['NUCLEAR REPULSION ENERGY']['S66-26-dimer' ] = 1163.54572871
DATA['NUCLEAR REPULSION ENERGY']['S66-26-monoA-unCP' ] = 357.16027337
DATA['NUCLEAR REPULSION ENERGY']['S66-26-monoB-unCP' ] = 357.16027370
DATA['NUCLEAR REPULSION ENERGY']['S66-27-dimer' ] = 630.67443466
DATA['NUCLEAR REPULSION ENERGY']['S66-27-monoA-unCP' ] = 203.68422363
DATA['NUCLEAR REPULSION ENERGY']['S66-27-monoB-unCP' ] = 206.25955744
DATA['NUCLEAR REPULSION ENERGY']['S66-28-dimer' ] = 878.32907732
DATA['NUCLEAR REPULSION ENERGY']['S66-28-monoA-unCP' ] = 203.65134501
DATA['NUCLEAR REPULSION ENERGY']['S66-28-monoB-unCP' ] = 357.16948119
DATA['NUCLEAR REPULSION ENERGY']['S66-29-dimer' ] = 885.28192562
DATA['NUCLEAR REPULSION ENERGY']['S66-29-monoA-unCP' ] = 206.16040036
DATA['NUCLEAR REPULSION ENERGY']['S66-29-monoB-unCP' ] = 357.23565563
DATA['NUCLEAR REPULSION ENERGY']['S66-30-dimer' ] = 327.62509332
DATA['NUCLEAR REPULSION ENERGY']['S66-30-monoA-unCP' ] = 203.74228045
DATA['NUCLEAR REPULSION ENERGY']['S66-30-monoB-unCP' ] = 33.43000301
DATA['NUCLEAR REPULSION ENERGY']['S66-31-dimer' ] = 518.26358403
DATA['NUCLEAR REPULSION ENERGY']['S66-31-monoA-unCP' ] = 357.18726739
DATA['NUCLEAR REPULSION ENERGY']['S66-31-monoB-unCP' ] = 33.40409180
DATA['NUCLEAR REPULSION ENERGY']['S66-32-dimer' ] = 495.33117294
DATA['NUCLEAR REPULSION ENERGY']['S66-32-monoA-unCP' ] = 357.24995067
DATA['NUCLEAR REPULSION ENERGY']['S66-32-monoB-unCP' ] = 24.63459975
DATA['NUCLEAR REPULSION ENERGY']['S66-33-dimer' ] = 332.11307535
DATA['NUCLEAR REPULSION ENERGY']['S66-33-monoA-unCP' ] = 206.29228895
DATA['NUCLEAR REPULSION ENERGY']['S66-33-monoB-unCP' ] = 33.42391806
DATA['NUCLEAR REPULSION ENERGY']['S66-34-dimer' ] = 577.94330068
DATA['NUCLEAR REPULSION ENERGY']['S66-34-monoA-unCP' ] = 185.63664994
DATA['NUCLEAR REPULSION ENERGY']['S66-34-monoB-unCP' ] = 185.63558546
DATA['NUCLEAR REPULSION ENERGY']['S66-35-dimer' ] = 574.13141612
DATA['NUCLEAR REPULSION ENERGY']['S66-35-monoA-unCP' ] = 185.63471242
DATA['NUCLEAR REPULSION ENERGY']['S66-35-monoB-unCP' ] = 199.36895747
DATA['NUCLEAR REPULSION ENERGY']['S66-36-dimer' ] = 573.01241887
DATA['NUCLEAR REPULSION ENERGY']['S66-36-monoA-unCP' ] = 199.35493735
DATA['NUCLEAR REPULSION ENERGY']['S66-36-monoB-unCP' ] = 199.35496470
DATA['NUCLEAR REPULSION ENERGY']['S66-37-dimer' ] = 569.42803611
DATA['NUCLEAR REPULSION ENERGY']['S66-37-monoA-unCP' ] = 188.28929834
DATA['NUCLEAR REPULSION ENERGY']['S66-37-monoB-unCP' ] = 199.34481507
DATA['NUCLEAR REPULSION ENERGY']['S66-38-dimer' ] = 562.36494675
DATA['NUCLEAR REPULSION ENERGY']['S66-38-monoA-unCP' ] = 188.38358820
DATA['NUCLEAR REPULSION ENERGY']['S66-38-monoB-unCP' ] = 188.37865241
DATA['NUCLEAR REPULSION ENERGY']['S66-39-dimer' ] = 594.82529945
DATA['NUCLEAR REPULSION ENERGY']['S66-39-monoA-unCP' ] = 203.67735882
DATA['NUCLEAR REPULSION ENERGY']['S66-39-monoB-unCP' ] = 188.40454306
DATA['NUCLEAR REPULSION ENERGY']['S66-40-dimer' ] = 598.08168004
DATA['NUCLEAR REPULSION ENERGY']['S66-40-monoA-unCP' ] = 203.68538784
DATA['NUCLEAR REPULSION ENERGY']['S66-40-monoB-unCP' ] = 199.37329650
DATA['NUCLEAR REPULSION ENERGY']['S66-41-dimer' ] = 843.32242800
DATA['NUCLEAR REPULSION ENERGY']['S66-41-monoA-unCP' ] = 357.06617642
DATA['NUCLEAR REPULSION ENERGY']['S66-41-monoB-unCP' ] = 185.61673585
DATA['NUCLEAR REPULSION ENERGY']['S66-42-dimer' ] = 830.51659591
DATA['NUCLEAR REPULSION ENERGY']['S66-42-monoA-unCP' ] = 357.04169352
DATA['NUCLEAR REPULSION ENERGY']['S66-42-monoB-unCP' ] = 188.33728572
DATA['NUCLEAR REPULSION ENERGY']['S66-43-dimer' ] = 830.36688604
DATA['NUCLEAR REPULSION ENERGY']['S66-43-monoA-unCP' ] = 357.12713115
DATA['NUCLEAR REPULSION ENERGY']['S66-43-monoB-unCP' ] = 199.36153551
DATA['NUCLEAR REPULSION ENERGY']['S66-44-dimer' ] = 303.64951312
DATA['NUCLEAR REPULSION ENERGY']['S66-44-monoA-unCP' ] = 33.42556566
DATA['NUCLEAR REPULSION ENERGY']['S66-44-monoB-unCP' ] = 185.65594848
DATA['NUCLEAR REPULSION ENERGY']['S66-45-dimer' ] = 285.69697355
DATA['NUCLEAR REPULSION ENERGY']['S66-45-monoA-unCP' ] = 24.64923587
DATA['NUCLEAR REPULSION ENERGY']['S66-45-monoB-unCP' ] = 185.73197134
DATA['NUCLEAR REPULSION ENERGY']['S66-46-dimer' ] = 576.36980953
DATA['NUCLEAR REPULSION ENERGY']['S66-46-monoA-unCP' ] = 180.49044991
DATA['NUCLEAR REPULSION ENERGY']['S66-46-monoB-unCP' ] = 185.67687994
DATA['NUCLEAR REPULSION ENERGY']['S66-47-dimer' ] = 592.90348525
DATA['NUCLEAR REPULSION ENERGY']['S66-47-monoA-unCP' ] = 203.66921988
DATA['NUCLEAR REPULSION ENERGY']['S66-47-monoB-unCP' ] = 203.67694204
DATA['NUCLEAR REPULSION ENERGY']['S66-48-dimer' ] = 601.34387795
DATA['NUCLEAR REPULSION ENERGY']['S66-48-monoA-unCP' ] = 206.19608668
DATA['NUCLEAR REPULSION ENERGY']['S66-48-monoB-unCP' ] = 206.19869697
DATA['NUCLEAR REPULSION ENERGY']['S66-49-dimer' ] = 596.54644729
DATA['NUCLEAR REPULSION ENERGY']['S66-49-monoA-unCP' ] = 203.65045916
DATA['NUCLEAR REPULSION ENERGY']['S66-49-monoB-unCP' ] = 206.22459403
DATA['NUCLEAR REPULSION ENERGY']['S66-50-dimer' ] = 300.96547874
DATA['NUCLEAR REPULSION ENERGY']['S66-50-monoA-unCP' ] = 203.65156163
DATA['NUCLEAR REPULSION ENERGY']['S66-50-monoB-unCP' ] = 24.63554547
DATA['NUCLEAR REPULSION ENERGY']['S66-51-dimer' ] = 73.51391626
DATA['NUCLEAR REPULSION ENERGY']['S66-51-monoA-unCP' ] = 24.65072244
DATA['NUCLEAR REPULSION ENERGY']['S66-51-monoB-unCP' ] = 24.64312912
DATA['NUCLEAR REPULSION ENERGY']['S66-52-dimer' ] = 488.72204285
DATA['NUCLEAR REPULSION ENERGY']['S66-52-monoA-unCP' ] = 203.60587521
DATA['NUCLEAR REPULSION ENERGY']['S66-52-monoB-unCP' ] = 121.22680816
DATA['NUCLEAR REPULSION ENERGY']['S66-53-dimer' ] = 475.54833273
DATA['NUCLEAR REPULSION ENERGY']['S66-53-monoA-unCP' ] = 203.61290966
DATA['NUCLEAR REPULSION ENERGY']['S66-53-monoB-unCP' ] = 121.83743933
DATA['NUCLEAR REPULSION ENERGY']['S66-54-dimer' ] = 274.02041197
DATA['NUCLEAR REPULSION ENERGY']['S66-54-monoA-unCP' ] = 203.63390042
DATA['NUCLEAR REPULSION ENERGY']['S66-54-monoB-unCP' ] = 9.16766818
DATA['NUCLEAR REPULSION ENERGY']['S66-55-dimer' ] = 349.34385129
DATA['NUCLEAR REPULSION ENERGY']['S66-55-monoA-unCP' ] = 203.62143957
DATA['NUCLEAR REPULSION ENERGY']['S66-55-monoB-unCP' ] = 40.41522246
DATA['NUCLEAR REPULSION ENERGY']['S66-56-dimer' ] = 347.25412940
DATA['NUCLEAR REPULSION ENERGY']['S66-56-monoA-unCP' ] = 203.65859480
DATA['NUCLEAR REPULSION ENERGY']['S66-56-monoB-unCP' ] = 42.10725315
DATA['NUCLEAR REPULSION ENERGY']['S66-57-dimer' ] = 584.88796485
DATA['NUCLEAR REPULSION ENERGY']['S66-57-monoA-unCP' ] = 203.60060155
DATA['NUCLEAR REPULSION ENERGY']['S66-57-monoB-unCP' ] = 180.55180987
DATA['NUCLEAR REPULSION ENERGY']['S66-58-dimer' ] = 577.23538658
DATA['NUCLEAR REPULSION ENERGY']['S66-58-monoA-unCP' ] = 206.16864626
DATA['NUCLEAR REPULSION ENERGY']['S66-58-monoB-unCP' ] = 206.16860003
DATA['NUCLEAR REPULSION ENERGY']['S66-59-dimer' ] = 53.29797952
DATA['NUCLEAR REPULSION ENERGY']['S66-59-monoA-unCP' ] = 24.62604423
DATA['NUCLEAR REPULSION ENERGY']['S66-59-monoB-unCP' ] = 9.17684034
DATA['NUCLEAR REPULSION ENERGY']['S66-60-dimer' ] = 206.60195669
DATA['NUCLEAR REPULSION ENERGY']['S66-60-monoA-unCP' ] = 24.62574637
DATA['NUCLEAR REPULSION ENERGY']['S66-60-monoB-unCP' ] = 121.22795347
DATA['NUCLEAR REPULSION ENERGY']['S66-61-dimer' ] = 475.00612950
DATA['NUCLEAR REPULSION ENERGY']['S66-61-monoA-unCP' ] = 185.62492607
DATA['NUCLEAR REPULSION ENERGY']['S66-61-monoB-unCP' ] = 121.23972648
DATA['NUCLEAR REPULSION ENERGY']['S66-62-dimer' ] = 478.48168724
DATA['NUCLEAR REPULSION ENERGY']['S66-62-monoA-unCP' ] = 185.65184859
DATA['NUCLEAR REPULSION ENERGY']['S66-62-monoB-unCP' ] = 121.86597939
DATA['NUCLEAR REPULSION ENERGY']['S66-63-dimer' ] = 496.78090588
DATA['NUCLEAR REPULSION ENERGY']['S66-63-monoA-unCP' ] = 203.66095658
DATA['NUCLEAR REPULSION ENERGY']['S66-63-monoB-unCP' ] = 121.23566219
DATA['NUCLEAR REPULSION ENERGY']['S66-64-dimer' ] = 300.38789564
DATA['NUCLEAR REPULSION ENERGY']['S66-64-monoA-unCP' ] = 180.56185111
DATA['NUCLEAR REPULSION ENERGY']['S66-64-monoB-unCP' ] = 33.41895147
DATA['NUCLEAR REPULSION ENERGY']['S66-65-dimer' ] = 292.14525417
DATA['NUCLEAR REPULSION ENERGY']['S66-65-monoA-unCP' ] = 206.26607138
DATA['NUCLEAR REPULSION ENERGY']['S66-65-monoB-unCP' ] = 24.59915901
DATA['NUCLEAR REPULSION ENERGY']['S66-66-dimer' ] = 349.09867633
DATA['NUCLEAR REPULSION ENERGY']['S66-66-monoA-unCP' ] = 42.09376472
DATA['NUCLEAR REPULSION ENERGY']['S66-66-monoB-unCP' ] = 206.23491680
DATA['NUCLEAR REPULSION ENERGY']['S66-1-monoA-CP' ] = 9.15671411
DATA['NUCLEAR REPULSION ENERGY']['S66-1-monoB-CP' ] = 9.17259114
DATA['NUCLEAR REPULSION ENERGY']['S66-2-monoA-CP' ] = 9.14996836
DATA['NUCLEAR REPULSION ENERGY']['S66-2-monoB-CP' ] = 40.29463192
DATA['NUCLEAR REPULSION ENERGY']['S66-3-monoA-CP' ] = 9.12565570
DATA['NUCLEAR REPULSION ENERGY']['S66-3-monoB-CP' ] = 42.06267577
DATA['NUCLEAR REPULSION ENERGY']['S66-4-monoA-CP' ] = 9.13184124
DATA['NUCLEAR REPULSION ENERGY']['S66-4-monoB-CP' ] = 180.56084030
DATA['NUCLEAR REPULSION ENERGY']['S66-5-monoA-CP' ] = 40.41731272
DATA['NUCLEAR REPULSION ENERGY']['S66-5-monoB-CP' ] = 40.29806380
DATA['NUCLEAR REPULSION ENERGY']['S66-6-monoA-CP' ] = 40.42467073
DATA['NUCLEAR REPULSION ENERGY']['S66-6-monoB-CP' ] = 42.05202847
DATA['NUCLEAR REPULSION ENERGY']['S66-7-monoA-CP' ] = 40.41876218
DATA['NUCLEAR REPULSION ENERGY']['S66-7-monoB-CP' ] = 180.73873695
DATA['NUCLEAR REPULSION ENERGY']['S66-8-monoA-CP' ] = 40.42326344
DATA['NUCLEAR REPULSION ENERGY']['S66-8-monoB-CP' ] = 9.17236900
DATA['NUCLEAR REPULSION ENERGY']['S66-9-monoA-CP' ] = 42.10593235
DATA['NUCLEAR REPULSION ENERGY']['S66-9-monoB-CP' ] = 40.34710761
DATA['NUCLEAR REPULSION ENERGY']['S66-10-monoA-CP' ] = 42.09217552
DATA['NUCLEAR REPULSION ENERGY']['S66-10-monoB-CP' ] = 42.05982938
DATA['NUCLEAR REPULSION ENERGY']['S66-11-monoA-CP' ] = 42.09328618
DATA['NUCLEAR REPULSION ENERGY']['S66-11-monoB-CP' ] = 180.72211450
DATA['NUCLEAR REPULSION ENERGY']['S66-12-monoA-CP' ] = 42.04336531
DATA['NUCLEAR REPULSION ENERGY']['S66-12-monoB-CP' ] = 9.12312499
DATA['NUCLEAR REPULSION ENERGY']['S66-13-monoA-CP' ] = 180.80545988
DATA['NUCLEAR REPULSION ENERGY']['S66-13-monoB-CP' ] = 40.30378877
DATA['NUCLEAR REPULSION ENERGY']['S66-14-monoA-CP' ] = 180.81499576
DATA['NUCLEAR REPULSION ENERGY']['S66-14-monoB-CP' ] = 42.03791353
DATA['NUCLEAR REPULSION ENERGY']['S66-15-monoA-CP' ] = 180.53794513
DATA['NUCLEAR REPULSION ENERGY']['S66-15-monoB-CP' ] = 180.54327910
DATA['NUCLEAR REPULSION ENERGY']['S66-16-monoA-CP' ] = 180.57089645
DATA['NUCLEAR REPULSION ENERGY']['S66-16-monoB-CP' ] = 9.17374713
DATA['NUCLEAR REPULSION ENERGY']['S66-17-monoA-CP' ] = 357.25263911
DATA['NUCLEAR REPULSION ENERGY']['S66-17-monoB-CP' ] = 357.22824169
DATA['NUCLEAR REPULSION ENERGY']['S66-18-monoA-CP' ] = 9.12915636
DATA['NUCLEAR REPULSION ENERGY']['S66-18-monoB-CP' ] = 206.28546361
DATA['NUCLEAR REPULSION ENERGY']['S66-19-monoA-CP' ] = 40.42190801
DATA['NUCLEAR REPULSION ENERGY']['S66-19-monoB-CP' ] = 206.28426737
DATA['NUCLEAR REPULSION ENERGY']['S66-20-monoA-CP' ] = 121.35354216
DATA['NUCLEAR REPULSION ENERGY']['S66-20-monoB-CP' ] = 121.35037507
DATA['NUCLEAR REPULSION ENERGY']['S66-21-monoA-CP' ] = 121.85534909
DATA['NUCLEAR REPULSION ENERGY']['S66-21-monoB-CP' ] = 121.85562743
DATA['NUCLEAR REPULSION ENERGY']['S66-22-monoA-CP' ] = 121.30606379
DATA['NUCLEAR REPULSION ENERGY']['S66-22-monoB-CP' ] = 357.30242624
DATA['NUCLEAR REPULSION ENERGY']['S66-23-monoA-CP' ] = 121.91206440
DATA['NUCLEAR REPULSION ENERGY']['S66-23-monoB-CP' ] = 357.16987646
DATA['NUCLEAR REPULSION ENERGY']['S66-24-monoA-CP' ] = 203.71200257
DATA['NUCLEAR REPULSION ENERGY']['S66-24-monoB-CP' ] = 203.71172379
DATA['NUCLEAR REPULSION ENERGY']['S66-25-monoA-CP' ] = 206.22564193
DATA['NUCLEAR REPULSION ENERGY']['S66-25-monoB-CP' ] = 206.22748415
DATA['NUCLEAR REPULSION ENERGY']['S66-26-monoA-CP' ] = 357.16027337
DATA['NUCLEAR REPULSION ENERGY']['S66-26-monoB-CP' ] = 357.16027370
DATA['NUCLEAR REPULSION ENERGY']['S66-27-monoA-CP' ] = 203.68422363
DATA['NUCLEAR REPULSION ENERGY']['S66-27-monoB-CP' ] = 206.25955744
DATA['NUCLEAR REPULSION ENERGY']['S66-28-monoA-CP' ] = 203.65134501
DATA['NUCLEAR REPULSION ENERGY']['S66-28-monoB-CP' ] = 357.16948119
DATA['NUCLEAR REPULSION ENERGY']['S66-29-monoA-CP' ] = 206.16040036
DATA['NUCLEAR REPULSION ENERGY']['S66-29-monoB-CP' ] = 357.23565563
DATA['NUCLEAR REPULSION ENERGY']['S66-30-monoA-CP' ] = 203.74228045
DATA['NUCLEAR REPULSION ENERGY']['S66-30-monoB-CP' ] = 33.43000301
DATA['NUCLEAR REPULSION ENERGY']['S66-31-monoA-CP' ] = 357.18726739
DATA['NUCLEAR REPULSION ENERGY']['S66-31-monoB-CP' ] = 33.40409180
DATA['NUCLEAR REPULSION ENERGY']['S66-32-monoA-CP' ] = 357.24995067
DATA['NUCLEAR REPULSION ENERGY']['S66-32-monoB-CP' ] = 24.63459975
DATA['NUCLEAR REPULSION ENERGY']['S66-33-monoA-CP' ] = 206.29228895
DATA['NUCLEAR REPULSION ENERGY']['S66-33-monoB-CP' ] = 33.42391806
DATA['NUCLEAR REPULSION ENERGY']['S66-34-monoA-CP' ] = 185.63664994
DATA['NUCLEAR REPULSION ENERGY']['S66-34-monoB-CP' ] = 185.63558546
DATA['NUCLEAR REPULSION ENERGY']['S66-35-monoA-CP' ] = 185.63471242
DATA['NUCLEAR REPULSION ENERGY']['S66-35-monoB-CP' ] = 199.36895747
DATA['NUCLEAR REPULSION ENERGY']['S66-36-monoA-CP' ] = 199.35493735
DATA['NUCLEAR REPULSION ENERGY']['S66-36-monoB-CP' ] = 199.35496470
DATA['NUCLEAR REPULSION ENERGY']['S66-37-monoA-CP' ] = 188.28929834
DATA['NUCLEAR REPULSION ENERGY']['S66-37-monoB-CP' ] = 199.34481507
DATA['NUCLEAR REPULSION ENERGY']['S66-38-monoA-CP' ] = 188.38358820
DATA['NUCLEAR REPULSION ENERGY']['S66-38-monoB-CP' ] = 188.37865241
DATA['NUCLEAR REPULSION ENERGY']['S66-39-monoA-CP' ] = 203.67735882
DATA['NUCLEAR REPULSION ENERGY']['S66-39-monoB-CP' ] = 188.40454306
DATA['NUCLEAR REPULSION ENERGY']['S66-40-monoA-CP' ] = 203.68538784
DATA['NUCLEAR REPULSION ENERGY']['S66-40-monoB-CP' ] = 199.37329650
DATA['NUCLEAR REPULSION ENERGY']['S66-41-monoA-CP' ] = 357.06617642
DATA['NUCLEAR REPULSION ENERGY']['S66-41-monoB-CP' ] = 185.61673585
DATA['NUCLEAR REPULSION ENERGY']['S66-42-monoA-CP' ] = 357.04169352
DATA['NUCLEAR REPULSION ENERGY']['S66-42-monoB-CP' ] = 188.33728572
DATA['NUCLEAR REPULSION ENERGY']['S66-43-monoA-CP' ] = 357.12713115
DATA['NUCLEAR REPULSION ENERGY']['S66-43-monoB-CP' ] = 199.36153551
DATA['NUCLEAR REPULSION ENERGY']['S66-44-monoA-CP' ] = 33.42556566
DATA['NUCLEAR REPULSION ENERGY']['S66-44-monoB-CP' ] = 185.65594848
DATA['NUCLEAR REPULSION ENERGY']['S66-45-monoA-CP' ] = 24.64923587
DATA['NUCLEAR REPULSION ENERGY']['S66-45-monoB-CP' ] = 185.73197134
DATA['NUCLEAR REPULSION ENERGY']['S66-46-monoA-CP' ] = 180.49044991
DATA['NUCLEAR REPULSION ENERGY']['S66-46-monoB-CP' ] = 185.67687994
DATA['NUCLEAR REPULSION ENERGY']['S66-47-monoA-CP' ] = 203.66921988
DATA['NUCLEAR REPULSION ENERGY']['S66-47-monoB-CP' ] = 203.67694204
DATA['NUCLEAR REPULSION ENERGY']['S66-48-monoA-CP' ] = 206.19608668
DATA['NUCLEAR REPULSION ENERGY']['S66-48-monoB-CP' ] = 206.19869697
DATA['NUCLEAR REPULSION ENERGY']['S66-49-monoA-CP' ] = 203.65045916
DATA['NUCLEAR REPULSION ENERGY']['S66-49-monoB-CP' ] = 206.22459403
DATA['NUCLEAR REPULSION ENERGY']['S66-50-monoA-CP' ] = 203.65156163
DATA['NUCLEAR REPULSION ENERGY']['S66-50-monoB-CP' ] = 24.63554547
DATA['NUCLEAR REPULSION ENERGY']['S66-51-monoA-CP' ] = 24.65072244
DATA['NUCLEAR REPULSION ENERGY']['S66-51-monoB-CP' ] = 24.64312912
DATA['NUCLEAR REPULSION ENERGY']['S66-52-monoA-CP' ] = 203.60587521
DATA['NUCLEAR REPULSION ENERGY']['S66-52-monoB-CP' ] = 121.22680816
DATA['NUCLEAR REPULSION ENERGY']['S66-53-monoA-CP' ] = 203.61290966
DATA['NUCLEAR REPULSION ENERGY']['S66-53-monoB-CP' ] = 121.83743933
DATA['NUCLEAR REPULSION ENERGY']['S66-54-monoA-CP' ] = 203.63390042
DATA['NUCLEAR REPULSION ENERGY']['S66-54-monoB-CP' ] = 9.16766818
DATA['NUCLEAR REPULSION ENERGY']['S66-55-monoA-CP' ] = 203.62143957
DATA['NUCLEAR REPULSION ENERGY']['S66-55-monoB-CP' ] = 40.41522246
DATA['NUCLEAR REPULSION ENERGY']['S66-56-monoA-CP' ] = 203.65859480
DATA['NUCLEAR REPULSION ENERGY']['S66-56-monoB-CP' ] = 42.10725315
DATA['NUCLEAR REPULSION ENERGY']['S66-57-monoA-CP' ] = 203.60060155
DATA['NUCLEAR REPULSION ENERGY']['S66-57-monoB-CP' ] = 180.55180987
DATA['NUCLEAR REPULSION ENERGY']['S66-58-monoA-CP' ] = 206.16864626
DATA['NUCLEAR REPULSION ENERGY']['S66-58-monoB-CP' ] = 206.16860003
DATA['NUCLEAR REPULSION ENERGY']['S66-59-monoA-CP' ] = 24.62604423
DATA['NUCLEAR REPULSION ENERGY']['S66-59-monoB-CP' ] = 9.17684034
DATA['NUCLEAR REPULSION ENERGY']['S66-60-monoA-CP' ] = 24.62574637
DATA['NUCLEAR REPULSION ENERGY']['S66-60-monoB-CP' ] = 121.22795347
DATA['NUCLEAR REPULSION ENERGY']['S66-61-monoA-CP' ] = 185.62492607
DATA['NUCLEAR REPULSION ENERGY']['S66-61-monoB-CP' ] = 121.23972648
DATA['NUCLEAR REPULSION ENERGY']['S66-62-monoA-CP' ] = 185.65184859
DATA['NUCLEAR REPULSION ENERGY']['S66-62-monoB-CP' ] = 121.86597939
DATA['NUCLEAR REPULSION ENERGY']['S66-63-monoA-CP' ] = 203.66095658
DATA['NUCLEAR REPULSION ENERGY']['S66-63-monoB-CP' ] = 121.23566219
DATA['NUCLEAR REPULSION ENERGY']['S66-64-monoA-CP' ] = 180.56185111
DATA['NUCLEAR REPULSION ENERGY']['S66-64-monoB-CP' ] = 33.41895147
DATA['NUCLEAR REPULSION ENERGY']['S66-65-monoA-CP' ] = 206.26607138
DATA['NUCLEAR REPULSION ENERGY']['S66-65-monoB-CP' ] = 24.59915901
DATA['NUCLEAR REPULSION ENERGY']['S66-66-monoA-CP' ] = 42.09376472
DATA['NUCLEAR REPULSION ENERGY']['S66-66-monoB-CP' ] = 206.23491680
| lgpl-3.0 | 3,411,172,288,187,390,000 | 54.537799 | 116 | 0.516459 | false |
jonyroda97/redbot-amigosprovaveis | lib/youtube_dl/extractor/go.py | 1 | 9185 | # coding: utf-8
from __future__ import unicode_literals
import re
from .adobepass import AdobePassIE
from ..utils import (
int_or_none,
determine_ext,
parse_age_limit,
urlencode_postdata,
ExtractorError,
)
class GoIE(AdobePassIE):
_SITE_INFO = {
'abc': {
'brand': '001',
'requestor_id': 'ABC',
},
'freeform': {
'brand': '002',
'requestor_id': 'ABCFamily',
},
'watchdisneychannel': {
'brand': '004',
'requestor_id': 'Disney',
},
'watchdisneyjunior': {
'brand': '008',
'requestor_id': 'DisneyJunior',
},
'watchdisneyxd': {
'brand': '009',
'requestor_id': 'DisneyXD',
}
}
_VALID_URL = r'https?://(?:(?P<sub_domain>%s)\.)?go\.com/(?:(?:[^/]+/)*(?P<id>vdka\w+)|(?:[^/]+/)*(?P<display_id>[^/?#]+))'\
% '|'.join(list(_SITE_INFO.keys()) + ['disneynow'])
_TESTS = [{
'url': 'http://abc.go.com/shows/designated-survivor/video/most-recent/VDKA3807643',
'info_dict': {
'id': 'VDKA3807643',
'ext': 'mp4',
'title': 'The Traitor in the White House',
'description': 'md5:05b009d2d145a1e85d25111bd37222e8',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://watchdisneyxd.go.com/doraemon',
'info_dict': {
'title': 'Doraemon',
'id': 'SH55574025',
},
'playlist_mincount': 51,
}, {
'url': 'http://abc.go.com/shows/the-catch/episode-guide/season-01/10-the-wedding',
'only_matching': True,
}, {
'url': 'http://abc.go.com/shows/world-news-tonight/episode-guide/2017-02/17-021717-intense-stand-off-between-man-with-rifle-and-police-in-oakland',
'only_matching': True,
}, {
# brand 004
'url': 'http://disneynow.go.com/shows/big-hero-6-the-series/season-01/episode-10-mr-sparkles-loses-his-sparkle/vdka4637915',
'only_matching': True,
}, {
# brand 008
'url': 'http://disneynow.go.com/shows/minnies-bow-toons/video/happy-campers/vdka4872013',
'only_matching': True,
}]
def _extract_videos(self, brand, video_id='-1', show_id='-1'):
display_id = video_id if video_id != '-1' else show_id
return self._download_json(
'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/%s/-1/%s/-1/-1.json' % (brand, show_id, video_id),
display_id)['video']
def _real_extract(self, url):
sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups()
site_info = self._SITE_INFO.get(sub_domain, {})
brand = site_info.get('brand')
if not video_id or not site_info:
webpage = self._download_webpage(url, display_id or video_id)
video_id = self._search_regex(
# There may be inner quotes, e.g. data-video-id="'VDKA3609139'"
# from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood
r'data-video-id=["\']*(VDKA\w+)', webpage, 'video id',
default=None)
if not site_info:
brand = self._search_regex(
(r'data-brand=\s*["\']\s*(\d+)',
r'data-page-brand=\s*["\']\s*(\d+)'), webpage, 'brand',
default='004')
site_info = next(
si for _, si in self._SITE_INFO.items()
if si.get('brand') == brand)
if not video_id:
# show extraction works for Disney, DisneyJunior and DisneyXD
# ABC and Freeform has different layout
show_id = self._search_regex(r'data-show-id=["\']*(SH\d+)', webpage, 'show id')
videos = self._extract_videos(brand, show_id=show_id)
show_title = self._search_regex(r'data-show-title="([^"]+)"', webpage, 'show title', fatal=False)
entries = []
for video in videos:
entries.append(self.url_result(
video['url'], 'Go', video.get('id'), video.get('title')))
entries.reverse()
return self.playlist_result(entries, show_id, show_title)
video_data = self._extract_videos(brand, video_id)[0]
video_id = video_data['id']
title = video_data['title']
formats = []
for asset in video_data.get('assets', {}).get('asset', []):
asset_url = asset.get('value')
if not asset_url:
continue
format_id = asset.get('format')
ext = determine_ext(asset_url)
if ext == 'm3u8':
video_type = video_data.get('type')
data = {
'video_id': video_data['id'],
'video_type': video_type,
'brand': brand,
'device': '001',
}
if video_data.get('accesslevel') == '1':
requestor_id = site_info['requestor_id']
resource = self._get_mvpd_resource(
requestor_id, title, video_id, None)
auth = self._extract_mvpd_auth(
url, video_id, requestor_id, resource)
data.update({
'token': auth,
'token_type': 'ap',
'adobe_requestor_id': requestor_id,
})
else:
self._initialize_geo_bypass({'countries': ['US']})
entitlement = self._download_json(
'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',
video_id, data=urlencode_postdata(data))
errors = entitlement.get('errors', {}).get('errors', [])
if errors:
for error in errors:
if error.get('code') == 1002:
self.raise_geo_restricted(
error['message'], countries=['US'])
error_message = ', '.join([error['message'] for error in errors])
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True)
asset_url += '?' + entitlement['uplynkData']['sessionKey']
formats.extend(self._extract_m3u8_formats(
asset_url, video_id, 'mp4', m3u8_id=format_id or 'hls', fatal=False))
else:
f = {
'format_id': format_id,
'url': asset_url,
'ext': ext,
}
if re.search(r'(?:/mp4/source/|_source\.mp4)', asset_url):
f.update({
'format_id': ('%s-' % format_id if format_id else '') + 'SOURCE',
'preference': 1,
})
else:
mobj = re.search(r'/(\d+)x(\d+)/', asset_url)
if mobj:
height = int(mobj.group(2))
f.update({
'format_id': ('%s-' % format_id if format_id else '') + '%dP' % height,
'width': int(mobj.group(1)),
'height': height,
})
formats.append(f)
self._sort_formats(formats)
subtitles = {}
for cc in video_data.get('closedcaption', {}).get('src', []):
cc_url = cc.get('value')
if not cc_url:
continue
ext = determine_ext(cc_url)
if ext == 'xml':
ext = 'ttml'
subtitles.setdefault(cc.get('lang'), []).append({
'url': cc_url,
'ext': ext,
})
thumbnails = []
for thumbnail in video_data.get('thumbnails', {}).get('thumbnail', []):
thumbnail_url = thumbnail.get('value')
if not thumbnail_url:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
})
return {
'id': video_id,
'title': title,
'description': video_data.get('longdescription') or video_data.get('description'),
'duration': int_or_none(video_data.get('duration', {}).get('value'), 1000),
'age_limit': parse_age_limit(video_data.get('tvrating', {}).get('rating')),
'episode_number': int_or_none(video_data.get('episodenumber')),
'series': video_data.get('show', {}).get('title'),
'season_number': int_or_none(video_data.get('season', {}).get('num')),
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
| gpl-3.0 | 2,572,050,881,610,135,600 | 41.133028 | 155 | 0.470876 | false |
SunDwarf/Kyoukai | kyoukai/app.py | 1 | 15848 | """
The core application.
"""
import asyncio
import logging
import traceback
from asphalt.core import Context, run_application
from werkzeug.exceptions import NotFound, MethodNotAllowed, HTTPException, InternalServerError, \
BadRequestKeyError
from werkzeug.routing import RequestRedirect, Map
from werkzeug.wrappers import Request, Response
from kyoukai.asphalt import HTTPRequestContext
from kyoukai.blueprint import Blueprint
__version__ = "2.2.1.post1"
version_format = "Kyoukai/{}".format(__version__)
logger = logging.getLogger("Kyoukai")
class Kyoukai(object):
"""
The Kyoukai type is the core of the Kyoukai framework, and the core of your web application
based upon the Kyoukai framework. It acts as a central router and request processor that
takes in requests from the protocols and returns responses.
The application name is currently unused, but it is good practice to set it correctly anyway in
case it is used in future editions of Kyoukai.
You normally create an application instance inside your component file, like so:
.. code-block:: python
from kyoukai.app import Kyoukai
... # setup code
kyk = Kyoukai("my_app")
kyk.register_blueprint(whatever)
... # other setup
class MyContainer(ContainerComponent):
async def start(self, ctx):
self.add_component('kyoukai', KyoukaiComponent, ip="127.0.0.1", port=4444,
app="app:app")
Of course, you can also embed Kyoukai inside another app, by awaiting :meth:`Kyoukai.start`.
"""
#: The class of request to spawn every request.
#: This should be a subclass of :class:`werkzeug.wrappers.Request`.
#: You can override this by passing ``request_class`` as a keyword argument to the app.
request_class = Request
#: The class of response to wrap automatically.
#: This should be a subclass of :class:`werkzeug.wrappers.Response`.
#: You can override this by passing ``response_class`` as a keyword argument to the app.
response_class = Response
def __init__(self,
application_name: str,
*,
server_name: str = None,
**kwargs):
"""
:param application_name: The name of the application that is being created. This is \
passed to the :class:`.Blueprint` being created as the root blueprint.
This is used in ``url_for``, for example, to get the endpoint of routes registered to \
the root Blueprint.
:param server_name: Keyword-only. The SERVER_NAME to use inside the fake WSGI environment \
created for ``url_for``, if applicable.
:param host_matching: Should host matching be enabled? This will be implicitly True if \
``host`` is not None.
:param host: The host used for host matching, to be passed to the root Blueprint.
By default, no host is used, so all hosts are matched on the root Blueprint.
:param application_root: Keyword-only. The APPLICATION_ROOT to use inside the fake WSGI \
environment created for ``url_for``, if applicable.
:param loop: Keyword-only. The asyncio event loop to use for this app. If no loop is \
specified it, will be automatically fetched using :meth:`asyncio.get_event_loop`.
:param request_class: Keyword-only. The custom request class to instantiate requests with.
:param response_class: Keyword-only. The custom response class to instantiate responses \
with.
:param context_class: Keyword-only. The :class:`.Context` subclass to use when creating a \
context. Defaults to :class:`.HTTPRequestContext`.
"""
self.name = application_name
self.server_name = server_name
# Try and get the loop from the keyword arguments - don't automatically perform
# `get_event_loop`.
self.loop = kwargs.pop("loop", None)
if not self.loop:
self.loop = asyncio.get_event_loop()
# Create the root blueprint.
self._root_bp = Blueprint(application_name, host=kwargs.get("host"),
host_matching=kwargs.get("host_matching", False))
# The current Component that is running this app.
self.component = None
# The Request/Response classes.
self.request_class = kwargs.pop("request_class", self.request_class)
self.response_class = kwargs.pop("response_class", self.response_class)
#: The context class.
self.context_class = kwargs.pop("context_class", HTTPRequestContext)
# Is this app set to debug mode?
self.debug = False
# Any extra config.
self.config = kwargs
@property
def root(self) -> Blueprint:
"""
:return: The root Blueprint for the routing tree.
"""
return self._root_bp
def register_blueprint(self, child: Blueprint):
"""
Registers a child blueprint to this app's root Blueprint.
This will set up the Blueprint tree, as well as setting up the routing table when finalized.
:param child: The child Blueprint to add. This must be an instance of :class:`~.Blueprint`.
"""
return self.root.add_child(child)
def finalize(self, **map_options) -> Map:
"""
Finalizes the app and blueprints.
This will calculate the current :class:`werkzeug.routing.Map` which is required for
routing to work.
:param map_options: The options to pass to the Map for routing.
"""
self.debug = self.config.get("debug", False)
return self.root.finalize(**map_options)
# Magic methods
def __getattr__(self, item: str) -> object:
"""
Override for __getattr__ to allow transparent mirroring onto the root Blueprint.
For example, this allows doing ``@app.route`` instead of ``@app.root.route``.
"""
if item in ("route", "errorhandler", "add_errorhandler", "add_route", "wrap_route",
"url_for", "before_request", "add_hook", "after_request",
"add_route_group"):
return getattr(self.root, item)
raise AttributeError("'{.__class__.__name__}' object has no attribute {}"
.format(self, item))
def log_route(self, request: Request, code: int):
"""
Logs a route invocation.
:param request: The request produced.
:param code: The response code of the route.
"""
fmtted = "{} {} - {}".format(request.method, request.path, code)
logger.info(fmtted)
async def handle_httpexception(self, ctx: HTTPRequestContext, exception: HTTPException,
environ: dict = None) -> Response:
"""
Handle a HTTP Exception.
:param ctx: The context of the request.
:param exception: The HTTPException to handle.
:param environ: The fake WSGI environment.
:return: A :class:`werkzeug.wrappers.Response` that handles this response.
"""
# Try and load the error handler recursively from the ctx.route.blueprint.
bp = ctx.bp or self.root
if environ is None:
environ = ctx.environ
cbl = lambda environ: Response("Internal server error during processing. Report this.",
status=500)
error_handler = bp.get_errorhandler(exception)
if not error_handler:
# Try the root Blueprint. This may happen if the blueprint requested isn't registered
# properly in the root, for some reason.
error_handler = self.root.get_errorhandler(exception)
if not error_handler:
# Just return the Exception's get_response.
cbl = exception.get_response
else:
# Try and invoke the error handler to get the Response.
# Wrap it in the try/except, so we can handle a default one.
try:
res = await error_handler.invoke(ctx, args=(exception,))
# hacky way of unifying everything
cbl = lambda environ: res
except HTTPException as e:
# why tho?
logger.warning("Error handler function raised another error, using the "
"response from that...")
cbl = e.get_response
except Exception as e:
logger.exception("Error in error handler!")
cbl = InternalServerError(e).get_response
# else:
# result = wrap_response(result, self.response_class)
try:
result = cbl(environ=environ)
except Exception:
# ok
logger.critical("Whilst handling a {}, response.get_response ({}) raised exception"
.format(exception.code, cbl), exc_info=True)
result = Response("Critical server error. Your application is broken.",
status=500)
if result.status_code != exception.code:
logger.warning("Error handler {} returned code {} when exception was code {}..."
.format(error_handler.callable_repr, result.status_code,
exception.code))
return result
async def process_request(self, request: Request, parent_context: Context) -> Response:
"""
Processes a Request and returns a Response object.
This is the main processing method of Kyoukai, and is meant to be used by one of the HTTP
server backends, and not by client code.
:param request: \
The :class:`werkzeug.wrappers.Request` object to process.
A new :class:`~.HTTPRequestContext` will be provided to wrap this request inside of \
to client code.
:param parent_context: \
The :class:`asphalt.core.Context` that is the parent context for this particular app.
It will be used as the parent for the HTTPRequestContext.
:return: A :class:`werkzeug.wrappers.Response` object that can be written to the client \
as a response.
"""
if not self.root.finalized:
raise RuntimeError("App was not finalized")
# Create a new HTTPRequestContext.
ctx = self.context_class(parent_context, request)
ctx.app = self
async with ctx:
# Call match on our Blueprint to find the request.
try:
matched, params, rule = self.root.match(request.environ)
ctx.params = params
ctx.rule = rule
except NotFound as e:
# No route matched.
self.log_route(ctx.request, 404)
logger.debug("Could not resolve route for {request.path}."
.format(request=request))
return await self.handle_httpexception(ctx, e, request.environ)
except MethodNotAllowed as e:
# 405 method not allowed
self.log_route(ctx.request, 405)
logger.debug("Could not resolve valid method for "
"{request.path} ({request.method})".format(request=request))
return await self.handle_httpexception(ctx, e, request.environ)
except RequestRedirect as e:
# slashes etc
# user code is not allowed to handle this
self.log_route(ctx.request, 307)
e.code = 307
return e.get_response(request.environ)
else:
ctx.route_matched.dispatch(ctx=ctx)
ctx.route = matched
ctx.bp = ctx.route.bp
result = None
# Invoke the route.
try:
ctx.route_invoked.dispatch(ctx=ctx)
# INTERCEPT
if ctx.request.method.upper() == "OPTIONS":
# NO USER CODE HERE HEHEHEHEHE
# instead, we need to return an Allow: header
# kyoukai autocalcs this
result = Response(status=204)
result.headers["Allow"] = ",".join(x for x in ctx.rule.methods if x !=
"OPTIONS")
else:
result = await matched.invoke(ctx, params=params)
except BadRequestKeyError as e:
logger.info("BadRequestKeyError: {}".format(' '.join(e.args)), exc_info=True)
result = await self.handle_httpexception(ctx, e, request.environ)
except HTTPException as e:
fmtted = traceback.format_exception(type(e), e, e.__traceback__)
logger.debug(''.join(fmtted))
logger.info(
"Hit HTTPException ({}) inside function, delegating.".format(str(e))
)
result = await self.handle_httpexception(ctx, e, request.environ)
except Exception as e:
logger.exception("Unhandled exception in route function")
new_e = InternalServerError()
new_e.__cause__ = e
result = await self.handle_httpexception(ctx, new_e, request.environ)
else:
ctx.route_completed.dispatch(ctx=ctx, result=result)
finally:
# result = wrap_response(result, self.response_class)
if result:
# edge cases
self.log_route(ctx.request, result.status_code)
# Update the Server header.
result.headers["Server"] = version_format
# list means wsgi response probably
if not isinstance(result.response, (bytes, str, list)):
result.set_data(str(result.response))
result.headers["X-Powered-By"] = version_format
# Return the new Response.
return result
async def start(self, ip: str = "127.0.0.1", port: int = 4444, *,
component=None, base_context: Context = None):
"""
Runs the Kyoukai component asynchronously.
This will bypass Asphalt's default runner, and allow you to run your app easily inside
something else, for example.
:param ip: The IP of the built-in server.
:param port: The port of the built-in server.
:param component: The component to start the app with. This should be an instance of \
:class:`~.KyoukaiComponent`.
:param base_context: The base context that the HTTPRequestContext should be started with.
"""
if not base_context:
base_context = Context()
if not component:
from kyoukai.asphalt import KyoukaiComponent
self.component = KyoukaiComponent(self, ip, port)
else:
self.component = component
# Start the app.
await self.component.start(base_context)
def run(self, ip: str = "127.0.0.1", port: int = 4444, *,
component=None):
"""
Runs the Kyoukai server from within your code.
This is not normally invoked - instead Asphalt should invoke the Kyoukai component.
However, this is here for convenience.
"""
if not component:
from kyoukai.asphalt import KyoukaiComponent
component = KyoukaiComponent(self, ip, port)
run_application(component)
| mit | 6,110,582,815,789,452,000 | 39.74036 | 100 | 0.585247 | false |
RoboCupULaval/UI-Debug | Controller/DrawingObject/TextDrawing.py | 1 | 1227 | # Under MIT License, see LICENSE.txt
from Controller.DrawingObject.BaseDrawingObject import BaseDrawingObject
from Controller.QtToolBox import QtToolBox
from Model.DataObject.DrawingData.DrawTextDataIn import DrawTextDataIn
__author__ = 'RoboCupULaval'
class TextDrawing(BaseDrawingObject):
def __init__(self, data_in):
BaseDrawingObject.__init__(self, data_in)
def draw(self, painter):
# TODO Add alignment
if self.isVisible():
data = self.data
painter.setPen(QtToolBox.create_pen(color=data['color'],
width=data['size']))
painter.setBrush(QtToolBox.create_brush(data['color']))
painter.setFont(QtToolBox.create_font(style=data['font'],
width=data['size'],
is_bold=data['has_bold'],
is_italic=data['has_italic']))
x, y, _ = QtToolBox.field_ctrl.convert_real_to_scene_pst(*data['position'])
painter.drawText(x, y, data['text'])
@staticmethod
def get_datain_associated():
return DrawTextDataIn.__name__
| mit | 5,168,425,059,868,672,000 | 38.580645 | 87 | 0.564792 | false |
anna-effeindzourou/trunk | examples/anna_scripts/triax/debug/triaxial.py | 1 | 3282 | # -*- coding: utf-8
from yade import ymport, utils,pack,export,qt
import gts,os
from yade import geom
#import matplotlib
from yade import plot
#from pylab import *
#import os.path, locale
#################################
##### FUNCTIONS ####
#################################
def writeFile():
yade.export.text('spheres_1e-02.txt')
####################
### MATERIAL ###
####################
poisson=0.28
E=2*7.9e10*(1+poisson) ##1e11
density=7.8e8
frictionAngle=0.096
frictionAngleW=0.228
O.materials.append(FrictMat(young=E,poisson=poisson,density=density,frictionAngle=frictionAngleW,label='Wallmat'))
O.materials.append(FrictMat(young=E,poisson=poisson,density=density,frictionAngle=frictionAngle,label='Smat'))
##########################
### SPHERE PACKING ###
##########################
#### Parameters the cylinder ###
L=0.203 #high [m]
l=0.0505 # radius [m]
color=[155./255.,155./255.,100./255.]
radius=1e-02
kwBoxes={'color':[1,0,0],'wire':True,'dynamic':False,'material':1}
O.bodies.append(utils.geom.facetCylinder(center=Vector3(0,0,L/2.), radius=l, height=L, orientation=Quaternion((1, 0, 0), 0),**kwBoxes))
###erase the top and bottom facet of the cylinder
for i in range(0,40,4):
O.bodies.erase(i)
for i in range(1,38,4):
O.bodies.erase(i)
predicate=inCylinder(centerBottom=Vector3(0,0,0), centerTop=Vector3(0,0,L+L/2.), radius=l-0.005)
sp=SpherePack()
sp=pack.randomDensePack(predicate, radius=radius, material='Smat', cropLayers=10, rRelFuzz=0.0, spheresInCell=100,returnSpherePack=True)
sp.toSimulation()
########################
#### WALL GENERATION ##
########################
O.materials.append(FrictMat(young=E,poisson=poisson,density=density,frictionAngle=frictionAngleW,label='Wmat'))
topPlate=utils.wall(position=hMax(2)+radius*10,sense=0, axis=2,color=Vector3(1,0,0),material='Wmat')
O.bodies.append(topPlate)
bottomPlate=utils.wall(position=0,sense=0, axis=2,color=Vector3(1,0,0),material='Wmat')
O.bodies.append(bottomPlate)
######################
#### MOVE TOP WALL ##
######################
v=1.7e-03
def movewall(v):
topPlate.state.pos=Vector3(0,0,hMax(2)+radius)
topPlate.state.vel=Vector3(0,0,-v)
def dataCollector():
S=pi*l**2
Fnt=O.forces.f(topPlate.id)[2]
Fnb=O.forces.f(bottomPlate.id)[2]
#sigma=Fnb/S
plot.addData(t1=O.time,t2=O.time,Fnb=Fnb,Fnt=Fnt)
plot.plots={'t1':('Fnb'),'t2':('Fnt')}
plot.plot(noShow=False, subPlots=True)
#########################
### ENGINE DEFINITION ##
#########################
O.dt=0.5*PWaveTimeStep()
O.engines=[
ForceResetter(),
InsertionSortCollider([
Bo1_Sphere_Aabb(),
Bo1_Wall_Aabb(),
Bo1_Facet_Aabb(),
]),
InteractionLoop([
Ig2_Sphere_Sphere_ScGeom(),
Ig2_Facet_Sphere_ScGeom(),
Ig2_Wall_Sphere_ScGeom()
],
[Ip2_CohFrictMat_CohFrictMat_CohFrictPhys(setCohesionNow=True,setCohesionOnNewContacts=True),
Ip2_FrictMat_FrictMat_FrictPhys()],
[Law2_ScGeom6D_CohFrictPhys_CohesionMoment(),
Law2_ScGeom_FrictPhys_CundallStrack(),
Law2_ScGridCoGeom_FrictPhys_CundallStrack(),
Law2_GridCoGridCoGeom_FrictPhys_CundallStrack()
]
),
DomainLimiter(lo=(-l,-l,0),hi=(l,l,1),iterPeriod=200),
NewtonIntegrator(damping=0.7,gravity=(0,0,-9.81),label='Newton'),
PyRunner(initRun=True,iterPeriod=1,command='dataCollector()'),
] | gpl-2.0 | 1,837,860,220,478,985,200 | 28.3125 | 137 | 0.650518 | false |
openstack/python-zaqarclient | tests/unit/cli/fakes.py | 1 | 1115 | # Copyright (c) 2015 Catalyst IT Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils
class TestMessaging(utils.TestCommand):
def setUp(self):
super(TestMessaging, self).setUp()
self.messaging_client = mock.MagicMock()
# TODO(flwang): It would be nice if we can figure out a better way to
# get the mocked request and transport.
req_trans = (mock.MagicMock(), mock.MagicMock())
self.messaging_client._request_and_transport.return_value = req_trans
self.app.client_manager.messaging = self.messaging_client
| apache-2.0 | 2,726,139,654,589,934,000 | 34.967742 | 77 | 0.721973 | false |
hpcuantwerpen/easybuild-framework | test/framework/toy_build.py | 1 | 172930 | # -*- coding: utf-8 -*-
##
# Copyright 2013-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Toy build unit test
@author: Kenneth Hoste (Ghent University)
@author: Damian Alvarez (Forschungszentrum Juelich GmbH)
"""
import copy
import glob
import grp
import os
import re
import shutil
import signal
import stat
import sys
import tempfile
import textwrap
from distutils.version import LooseVersion
from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered
from test.framework.package import mock_fpm
from unittest import TextTestRunner
import easybuild.tools.hooks # so we can reset cached hooks
import easybuild.tools.module_naming_scheme # required to dynamically load test module naming scheme(s)
from easybuild.framework.easyconfig.easyconfig import EasyConfig
from easybuild.framework.easyconfig.parser import EasyConfigParser
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import get_module_syntax, get_repositorypath
from easybuild.tools.environment import modify_env
from easybuild.tools.filetools import adjust_permissions, change_dir, copy_file, mkdir, move_file
from easybuild.tools.filetools import read_file, remove_dir, remove_file, which, write_file
from easybuild.tools.module_generator import ModuleGeneratorTcl
from easybuild.tools.modules import Lmod
from easybuild.tools.py2vs3 import reload, string_type
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import get_shared_lib_ext
from easybuild.tools.version import VERSION as EASYBUILD_VERSION
class ToyBuildTest(EnhancedTestCase):
"""Toy build unit test."""
def setUp(self):
"""Test setup."""
super(ToyBuildTest, self).setUp()
fd, self.dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
os.close(fd)
# clear log
write_file(self.logfile, '')
def tearDown(self):
"""Cleanup."""
# kick out any paths for included easyblocks from sys.path,
# to avoid infected any other tests
for path in sys.path[:]:
if '/included-easyblocks' in path:
sys.path.remove(path)
# reload toy easyblock (and generic toy_extension easyblock that imports it) after cleaning up sys.path,
# to avoid trouble in other tests due to included toy easyblock that is cached somewhere
# (despite the cleanup in sys.modules);
# important for tests that include a customised copy of the toy easyblock
# (like test_toy_build_enhanced_sanity_check)
import easybuild.easyblocks.toy
reload(easybuild.easyblocks.toy)
import easybuild.easyblocks.toytoy
reload(easybuild.easyblocks.toytoy)
import easybuild.easyblocks.generic.toy_extension
reload(easybuild.easyblocks.generic.toy_extension)
del sys.modules['easybuild.easyblocks.toy']
del sys.modules['easybuild.easyblocks.toytoy']
del sys.modules['easybuild.easyblocks.generic.toy_extension']
super(ToyBuildTest, self).tearDown()
# remove logs
if os.path.exists(self.dummylogfn):
os.remove(self.dummylogfn)
def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versionsuffix=''):
"""Check whether toy build succeeded."""
full_version = ''.join([versionprefix, version, versionsuffix])
# check for success
success = re.compile(r"COMPLETED: Installation ended successfully \(took .* secs?\)")
self.assertTrue(success.search(outtxt), "COMPLETED message found in '%s" % outtxt)
# if the module exists, it should be fine
toy_module = os.path.join(installpath, 'modules', 'all', 'toy', full_version)
msg = "module for toy build toy/%s found (path %s)" % (full_version, toy_module)
if get_module_syntax() == 'Lua':
toy_module += '.lua'
self.assertTrue(os.path.exists(toy_module), msg)
# module file is symlinked according to moduleclass
toy_module_symlink = os.path.join(installpath, 'modules', 'tools', 'toy', full_version)
if get_module_syntax() == 'Lua':
toy_module_symlink += '.lua'
self.assertTrue(os.path.islink(toy_module_symlink))
self.assertTrue(os.path.exists(toy_module_symlink))
# make sure installation log file and easyconfig file are copied to install dir
software_path = os.path.join(installpath, 'software', 'toy', full_version)
install_log_path_pattern = os.path.join(software_path, 'easybuild', 'easybuild-toy-%s*.log' % version)
self.assertTrue(len(glob.glob(install_log_path_pattern)) >= 1,
"Found at least 1 file at %s" % install_log_path_pattern)
# make sure test report is available
test_report_path_pattern = os.path.join(software_path, 'easybuild', 'easybuild-toy-%s*test_report.md' % version)
self.assertTrue(len(glob.glob(test_report_path_pattern)) >= 1,
"Found at least 1 file at %s" % test_report_path_pattern)
ec_file_path = os.path.join(software_path, 'easybuild', 'toy-%s.eb' % full_version)
self.assertTrue(os.path.exists(ec_file_path))
devel_module_path = os.path.join(software_path, 'easybuild', 'toy-%s-easybuild-devel' % full_version)
self.assertTrue(os.path.exists(devel_module_path))
def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True, fails=False, verbose=True,
raise_error=False, test_report=None, versionsuffix='', testing=True,
raise_systemexit=False, force=True):
"""Perform a toy build."""
if extra_args is None:
extra_args = []
test_readme = False
if ec_file is None:
ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
test_readme = True
full_ver = '0.0%s' % versionsuffix
args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
]
if force:
args.append('--force')
if tmpdir is not None:
args.append('--tmpdir=%s' % tmpdir)
if test_report is not None:
args.append('--dump-test-report=%s' % test_report)
args.extend(extra_args)
myerr = None
try:
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=verbose,
raise_error=raise_error, testing=testing, raise_systemexit=raise_systemexit)
except Exception as err:
myerr = err
if raise_error:
raise myerr
if verify:
self.check_toy(self.test_installpath, outtxt, versionsuffix=versionsuffix)
if test_readme:
# make sure postinstallcmds were used
toy_install_path = os.path.join(self.test_installpath, 'software', 'toy', full_ver)
self.assertEqual(read_file(os.path.join(toy_install_path, 'README')), "TOY\n")
# make sure full test report was dumped, and contains sensible information
if test_report is not None:
self.assertTrue(os.path.exists(test_report))
if fails:
test_result = 'FAIL'
else:
test_result = 'SUCCESS'
regex_patterns = [
r"Test result[\S\s]*Build succeeded for %d out of 1" % (not fails),
r"Overview of tested easyconfig[\S\s]*%s[\S\s]*%s" % (test_result, os.path.basename(ec_file)),
r"Time info[\S\s]*start:[\S\s]*end:",
r"EasyBuild info[\S\s]*framework version:[\S\s]*easyblocks ver[\S\s]*command line[\S\s]*configuration",
r"System info[\S\s]*cpu model[\S\s]*os name[\S\s]*os version[\S\s]*python version",
r"List of loaded modules",
r"Environment",
]
test_report_txt = read_file(test_report)
for regex_pattern in regex_patterns:
regex = re.compile(regex_pattern, re.M)
msg = "Pattern %s found in full test report: %s" % (regex.pattern, test_report_txt)
self.assertTrue(regex.search(test_report_txt), msg)
return outtxt
def run_test_toy_build_with_output(self, *args, **kwargs):
"""Run test_toy_build with specified arguments, catch stdout/stderr and return it."""
self.mock_stderr(True)
self.mock_stdout(True)
self.test_toy_build(*args, **kwargs)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
return stdout, stderr
def test_toy_broken(self):
"""Test deliberately broken toy build."""
tmpdir = tempfile.mkdtemp()
broken_toy_ec = os.path.join(tmpdir, "toy-broken.eb")
toy_ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
broken_toy_ec_txt = read_file(toy_ec_file)
broken_toy_ec_txt += "checksums = ['clearywrongMD5checksumoflength32']"
write_file(broken_toy_ec, broken_toy_ec_txt)
error_regex = "Checksum verification .* failed"
self.assertErrorRegex(EasyBuildError, error_regex, self.test_toy_build, ec_file=broken_toy_ec, tmpdir=tmpdir,
verify=False, fails=True, verbose=False, raise_error=True)
# make sure log file is retained, also for failed build
log_path_pattern = os.path.join(tmpdir, 'eb-*', 'easybuild-toy-0.0*.log')
self.assertTrue(len(glob.glob(log_path_pattern)) == 1, "Log file found at %s" % log_path_pattern)
# make sure individual test report is retained, also for failed build
test_report_fp_pattern = os.path.join(tmpdir, 'eb-*', 'easybuild-toy-0.0*test_report.md')
self.assertTrue(len(glob.glob(test_report_fp_pattern)) == 1, "Test report %s found" % test_report_fp_pattern)
# test dumping full test report (doesn't raise an exception)
test_report_fp = os.path.join(self.test_buildpath, 'full_test_report.md')
self.test_toy_build(ec_file=broken_toy_ec, tmpdir=tmpdir, verify=False, fails=True, verbose=False,
raise_error=True, test_report=test_report_fp)
# cleanup
shutil.rmtree(tmpdir)
def test_toy_tweaked(self):
"""Test toy build with tweaked easyconfig, for testing extra easyconfig parameters."""
test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
ec_file = os.path.join(self.test_buildpath, 'toy-0.0-tweaked.eb')
shutil.copy2(os.path.join(test_ecs_dir, 'test_ecs', 't', 'toy', 'toy-0.0.eb'), ec_file)
modloadmsg = 'THANKS FOR LOADING ME\\nI AM %(name)s v%(version)s'
modloadmsg_regex_tcl = r'THANKS.*\n\s*I AM toy v0.0\n\s*"'
modloadmsg_regex_lua = r'\[==\[THANKS.*\n\s*I AM toy v0.0\n\s*\]==\]'
# tweak easyconfig by appending to it
ec_extra = '\n'.join([
"versionsuffix = '-tweaked'",
"modextrapaths = {'SOMEPATH': ['foo/bar', 'baz', '']}",
"modextravars = {'FOO': 'bar'}",
"modloadmsg = '%s'" % modloadmsg,
"modtclfooter = 'puts stderr \"oh hai!\"'", # ignored when module syntax is Lua
"modluafooter = 'io.stderr:write(\"oh hai!\")'", # ignored when module syntax is Tcl
"usage = 'This toy is easy to use, 100%!'",
"examples = 'No example available, 0% complete'",
"citing = 'If you use this package, please cite our paper https://ieeexplore.ieee.org/document/6495863'",
"docpaths = ['share/doc/toy/readme.txt', 'share/doc/toy/html/index.html']",
"docurls = ['https://easybuilders.github.io/easybuild/toy/docs.html']",
"upstream_contacts = '[email protected]'",
"site_contacts = ['Jim Admin', 'Jane Admin']",
])
write_file(ec_file, ec_extra, append=True)
args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--force',
]
outtxt = self.eb_main(args, do_build=True, verbose=True, raise_error=True)
self.check_toy(self.test_installpath, outtxt, versionsuffix='-tweaked')
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-tweaked')
if get_module_syntax() == 'Lua':
toy_module += '.lua'
toy_module_txt = read_file(toy_module)
if get_module_syntax() == 'Tcl':
self.assertTrue(re.search(r'^setenv\s*FOO\s*"bar"$', toy_module_txt, re.M))
self.assertTrue(re.search(r'^prepend-path\s*SOMEPATH\s*\$root/foo/bar$', toy_module_txt, re.M))
self.assertTrue(re.search(r'^prepend-path\s*SOMEPATH\s*\$root/baz$', toy_module_txt, re.M))
self.assertTrue(re.search(r'^prepend-path\s*SOMEPATH\s*\$root$', toy_module_txt, re.M))
mod_load_msg = r'module-info mode load.*\n\s*puts stderr\s*.*%s$' % modloadmsg_regex_tcl
self.assertTrue(re.search(mod_load_msg, toy_module_txt, re.M))
self.assertTrue(re.search(r'^puts stderr "oh hai!"$', toy_module_txt, re.M))
elif get_module_syntax() == 'Lua':
self.assertTrue(re.search(r'^setenv\("FOO", "bar"\)', toy_module_txt, re.M))
pattern = r'^prepend_path\("SOMEPATH", pathJoin\(root, "foo/bar"\)\)$'
self.assertTrue(re.search(pattern, toy_module_txt, re.M))
self.assertTrue(re.search(r'^prepend_path\("SOMEPATH", pathJoin\(root, "baz"\)\)$', toy_module_txt, re.M))
self.assertTrue(re.search(r'^prepend_path\("SOMEPATH", root\)$', toy_module_txt, re.M))
mod_load_msg = r'^if mode\(\) == "load" then\n\s*io.stderr:write\(%s\)$' % modloadmsg_regex_lua
regex = re.compile(mod_load_msg, re.M)
self.assertTrue(regex.search(toy_module_txt), "Pattern '%s' found in: %s" % (regex.pattern, toy_module_txt))
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
# newline between "I AM toy v0.0" (modloadmsg) and "oh hai!" (mod*footer) is added automatically
expected = "\nTHANKS FOR LOADING ME\nI AM toy v0.0\n"
# with module files in Tcl syntax, a newline is added automatically
if get_module_syntax() == 'Tcl':
expected += "\n"
expected += "oh hai!"
# setting $LMOD_QUIET results in suppression of printed message with Lmod & module files in Tcl syntax
if 'LMOD_QUIET' in os.environ:
del os.environ['LMOD_QUIET']
self.modtool.use(os.path.join(self.test_installpath, 'modules', 'all'))
out = self.modtool.run_module('load', 'toy/0.0-tweaked', return_output=True)
self.assertTrue(out.strip().endswith(expected))
def test_toy_buggy_easyblock(self):
"""Test build using a buggy/broken easyblock, make sure a traceback is reported."""
ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
kwargs = {
'ec_file': ec_file,
'extra_args': ['--easyblock=EB_toy_buggy'],
'raise_error': True,
'verify': False,
'verbose': False,
}
err_regex = r"Traceback[\S\s]*toy_buggy.py.*build_step[\S\s]*name 'run_cmd' is not defined"
self.assertErrorRegex(EasyBuildError, err_regex, self.test_toy_build, **kwargs)
def test_toy_build_formatv2(self):
"""Perform a toy build (format v2)."""
# set $MODULEPATH such that modules for specified dependencies are found
modulepath = os.environ.get('MODULEPATH')
os.environ['MODULEPATH'] = os.path.abspath(os.path.join(os.path.dirname(__file__), 'modules'))
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'v2.0', 'toy.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
'--software-version=0.0',
'--toolchain=system,system',
'--experimental',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
self.check_toy(self.test_installpath, outtxt)
# restore
if modulepath is not None:
os.environ['MODULEPATH'] = modulepath
else:
del os.environ['MODULEPATH']
def test_toy_build_with_blocks(self):
"""Test a toy build with multiple blocks."""
orig_sys_path = sys.path[:]
# add directory in which easyconfig file can be found to Python search path,
# since we're not specifying it full path below
tmpdir = tempfile.mkdtemp()
# note get_paths_for expects easybuild/easyconfigs subdir
ecs_path = os.path.join(tmpdir, "easybuild", "easyconfigs")
os.makedirs(ecs_path)
test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
shutil.copy2(os.path.join(test_ecs, 't', 'toy', 'toy-0.0-multiple.eb'), ecs_path)
sys.path.append(tmpdir)
args = [
'toy-0.0-multiple.eb',
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
for toy_prefix, toy_version, toy_suffix in [
('', '0.0', '-somesuffix'),
('someprefix-', '0.0', '-somesuffix')
]:
self.check_toy(self.test_installpath, outtxt, version=toy_version,
versionprefix=toy_prefix, versionsuffix=toy_suffix)
# cleanup
shutil.rmtree(tmpdir)
sys.path = orig_sys_path
def test_toy_build_formatv2_sections(self):
"""Perform a toy build (format v2, using sections)."""
versions = {
'0.0': {'versionprefix': '', 'versionsuffix': ''},
'1.0': {'versionprefix': '', 'versionsuffix': ''},
'1.1': {'versionprefix': 'stable-', 'versionsuffix': ''},
'1.5': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'1.6': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'2.0': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'3.0': {'versionprefix': 'stable-', 'versionsuffix': '-mature'},
}
for version, specs in versions.items():
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'v2.0', 'toy-with-sections.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
'--software-version=%s' % version,
'--toolchain=system,system',
'--experimental',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
specs['version'] = version
self.check_toy(self.test_installpath, outtxt, **specs)
def test_toy_download_sources(self):
"""Test toy build with sources that still need to be 'downloaded'."""
tmpdir = tempfile.mkdtemp()
# copy toy easyconfig file, and append source_urls to it
topdir = os.path.dirname(os.path.abspath(__file__))
shutil.copy2(os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'), tmpdir)
source_url = os.path.join(topdir, 'sandbox', 'sources', 'toy')
ec_file = os.path.join(tmpdir, 'toy-0.0.eb')
write_file(ec_file, '\nsource_urls = ["file://%s"]\n' % source_url, append=True)
# unset $EASYBUILD_XPATH env vars, to make sure --prefix is picked up
for cfg_opt in ['build', 'install', 'source']:
del os.environ['EASYBUILD_%sPATH' % cfg_opt.upper()]
sourcepath = os.path.join(tmpdir, 'mysources')
args = [
ec_file,
'--prefix=%s' % tmpdir,
'--sourcepath=%s' % ':'.join([sourcepath, '/bar']), # include senseless path which should be ignored
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
self.check_toy(tmpdir, outtxt)
self.assertTrue(os.path.exists(os.path.join(sourcepath, 't', 'toy', 'toy-0.0.tar.gz')))
shutil.rmtree(tmpdir)
def test_toy_permissions(self):
"""Test toy build with custom umask settings."""
toy_ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
test_ec_txt = read_file(toy_ec_file)
# remove exec perms on bin subdirectory for others, to check whether correct dir permissions are set
test_ec_txt += "\npostinstallcmds += ['chmod o-x %(installdir)s/bin']"
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, test_ec_txt)
args = [
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
# set umask hard to verify default reliably
orig_umask = os.umask(0o022)
# test specifying a non-existing group
allargs = [test_ec] + args + ['--group=thisgroupdoesnotexist']
outtxt, err = self.eb_main(allargs, logfile=self.dummylogfn, do_build=True, return_error=True)
err_regex = re.compile("Failed to get group ID .* group does not exist")
self.assertTrue(err_regex.search(outtxt), "Pattern '%s' found in '%s'" % (err_regex.pattern, outtxt))
# determine current group name (at least we can use that)
gid = os.getgid()
curr_grp = grp.getgrgid(gid).gr_name
for umask, cfg_group, ec_group, dir_perms, fil_perms, bin_perms in [
(None, None, None, 0o755, 0o644, 0o755), # default: inherit session umask
(None, None, curr_grp, 0o750, 0o640, 0o750), # default umask, but with specified group in ec
(None, curr_grp, None, 0o750, 0o640, 0o750), # default umask, but with specified group in cfg
(None, 'notagrp', curr_grp, 0o750, 0o640, 0o750), # default umask, but with specified group in cfg/ec
('000', None, None, 0o777, 0o666, 0o777), # stupid empty umask
('032', None, None, 0o745, 0o644, 0o745), # no write/execute for group, no write for other
('030', None, curr_grp, 0o740, 0o640, 0o740), # no write for group, with specified group
('077', None, None, 0o700, 0o600, 0o700), # no access for other/group
]:
# empty the install directory, to ensure any created directories adher to the permissions
shutil.rmtree(self.test_installpath)
if cfg_group is None and ec_group is None:
allargs = [test_ec]
elif ec_group is not None:
shutil.copy2(test_ec, self.test_buildpath)
tmp_ec_file = os.path.join(self.test_buildpath, os.path.basename(test_ec))
write_file(tmp_ec_file, "\ngroup = '%s'" % ec_group, append=True)
allargs = [tmp_ec_file]
allargs.extend(args)
if umask is not None:
allargs.append("--umask=%s" % umask)
if cfg_group is not None:
allargs.append("--group=%s" % cfg_group)
outtxt = self.eb_main(allargs, logfile=self.dummylogfn, do_build=True, verbose=True)
# verify that installation was correct
self.check_toy(self.test_installpath, outtxt)
# group specified in easyconfig overrules configured group
group = cfg_group
if ec_group is not None:
group = ec_group
# verify permissions
paths_perms = [
# no write permissions for group/other, regardless of umask
(('software', 'toy', '0.0'), dir_perms & ~ 0o022),
(('software', 'toy', '0.0', 'bin'), dir_perms & ~ 0o022),
(('software', 'toy', '0.0', 'bin', 'toy'), bin_perms & ~ 0o022),
]
# only software subdirs are chmod'ed for 'protected' installs, so don't check those if a group is specified
if group is None:
paths_perms.extend([
(('software', ), dir_perms),
(('software', 'toy'), dir_perms),
(('software', 'toy', '0.0', 'easybuild', '*.log'), fil_perms),
(('modules', ), dir_perms),
(('modules', 'all'), dir_perms),
(('modules', 'all', 'toy'), dir_perms),
])
if get_module_syntax() == 'Tcl':
paths_perms.append((('modules', 'all', 'toy', '0.0'), fil_perms))
elif get_module_syntax() == 'Lua':
paths_perms.append((('modules', 'all', 'toy', '0.0.lua'), fil_perms))
for path, correct_perms in paths_perms:
fullpath = glob.glob(os.path.join(self.test_installpath, *path))[0]
perms = os.stat(fullpath).st_mode & 0o777
tup = (fullpath, oct(correct_perms), oct(perms), umask, cfg_group, ec_group)
msg = "Path %s has %s permissions: %s (umask: %s, group: %s - %s)" % tup
self.assertEqual(oct(perms), oct(correct_perms), msg)
if group is not None:
path_gid = os.stat(fullpath).st_gid
self.assertEqual(path_gid, grp.getgrnam(group).gr_gid)
# restore original umask
os.umask(orig_umask)
def test_toy_permissions_installdir(self):
"""Test --read-only-installdir and --group-write-installdir."""
# Avoid picking up the already prepared fake module
try:
del os.environ['MODULEPATH']
except KeyError:
pass
# set umask hard to verify default reliably
orig_umask = os.umask(0o022)
toy_ec = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
test_ec_txt = read_file(toy_ec)
# take away read permissions, to check whether they are correctly restored by EasyBuild after installation
test_ec_txt += "\npostinstallcmds += ['chmod -R og-r %(installdir)s']"
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, test_ec_txt)
# first check default behaviour
self.test_toy_build(ec_file=test_ec)
toy_install_dir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
toy_bin = os.path.join(toy_install_dir, 'bin', 'toy')
installdir_perms = os.stat(toy_install_dir).st_mode & 0o777
self.assertEqual(installdir_perms, 0o755, "%s has default permissions" % toy_install_dir)
toy_bin_perms = os.stat(toy_bin).st_mode & 0o777
self.assertEqual(toy_bin_perms, 0o755, "%s has default permissions" % toy_bin_perms)
shutil.rmtree(self.test_installpath)
# check whether --read-only-installdir works as intended
# Tested 5 times:
# 1. Non existing build -> Install and set read-only
# 2. Existing build with --rebuild -> Reinstall and set read-only
# 3. Existing build with --force -> Reinstall and set read-only
# 4-5: Same as 2-3 but with --skip
for extra_args in ([], ['--rebuild'], ['--force'], ['--skip', '--rebuild'], ['--skip', '--force']):
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=['--read-only-installdir'] + extra_args, force=False)
self.mock_stdout(False)
installdir_perms = os.stat(os.path.dirname(toy_install_dir)).st_mode & 0o777
self.assertEqual(installdir_perms, 0o755, "%s has default permissions" % os.path.dirname(toy_install_dir))
installdir_perms = os.stat(toy_install_dir).st_mode & 0o777
self.assertEqual(installdir_perms, 0o555, "%s has read-only permissions" % toy_install_dir)
toy_bin_perms = os.stat(toy_bin).st_mode & 0o777
self.assertEqual(toy_bin_perms, 0o555, "%s has read-only permissions" % toy_bin_perms)
toy_bin_perms = os.stat(os.path.join(toy_install_dir, 'README')).st_mode & 0o777
self.assertEqual(toy_bin_perms, 0o444, "%s has read-only permissions" % toy_bin_perms)
# also log file copied into install dir should be read-only (not just the 'easybuild/' subdir itself)
log_path = glob.glob(os.path.join(toy_install_dir, 'easybuild', '*log'))[0]
log_perms = os.stat(log_path).st_mode & 0o777
self.assertEqual(log_perms, 0o444, "%s has read-only permissions" % log_path)
adjust_permissions(toy_install_dir, stat.S_IWUSR, add=True)
shutil.rmtree(self.test_installpath)
# also check --group-writable-installdir
self.test_toy_build(ec_file=test_ec, extra_args=['--group-writable-installdir'])
installdir_perms = os.stat(toy_install_dir).st_mode & 0o777
self.assertEqual(installdir_perms, 0o775, "%s has group write permissions" % self.test_installpath)
toy_bin_perms = os.stat(toy_bin).st_mode & 0o777
self.assertEqual(toy_bin_perms, 0o775, "%s has group write permissions" % toy_bin_perms)
# make sure --read-only-installdir is robust against not having the 'easybuild/' subdir after installation
# this happens when for example using ModuleRC easyblock (because no devel module is created)
test_ec_txt += "\nmake_module = False"
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, extra_args=['--read-only-installdir'], verify=False, raise_error=True)
# restore original umask
os.umask(orig_umask)
def test_toy_gid_sticky_bits(self):
"""Test setting gid and sticky bits."""
subdirs = [
(('',), False),
(('software',), False),
(('software', 'toy'), False),
(('software', 'toy', '0.0'), True),
(('modules', 'all'), False),
(('modules', 'all', 'toy'), False),
]
# no gid/sticky bits by default
self.test_toy_build()
for subdir, _ in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
self.assertFalse(perms & stat.S_ISGID, "no gid bit on %s" % fullpath)
self.assertFalse(perms & stat.S_ISVTX, "no sticky bit on %s" % fullpath)
# git/sticky bits are set, but only on (re)created directories
self.test_toy_build(extra_args=['--set-gid-bit', '--sticky-bit'])
for subdir, bits_set in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
if bits_set:
self.assertTrue(perms & stat.S_ISGID, "gid bit set on %s" % fullpath)
self.assertTrue(perms & stat.S_ISVTX, "sticky bit set on %s" % fullpath)
else:
self.assertFalse(perms & stat.S_ISGID, "no gid bit on %s" % fullpath)
self.assertFalse(perms & stat.S_ISVTX, "no sticky bit on %s" % fullpath)
# start with a clean slate, now gid/sticky bits should be set on everything
shutil.rmtree(self.test_installpath)
self.test_toy_build(extra_args=['--set-gid-bit', '--sticky-bit'])
for subdir, _ in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
self.assertTrue(perms & stat.S_ISGID, "gid bit set on %s" % fullpath)
self.assertTrue(perms & stat.S_ISVTX, "sticky bit set on %s" % fullpath)
def test_toy_group_check(self):
"""Test presence of group check in generated (Lua) modules"""
fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
os.close(fd)
# figure out a group that we're a member of to use in the test
out, ec = run_cmd('groups', simple=False)
self.assertEqual(ec, 0, "Failed to select group to use in test")
group_name = out.split(' ')[0].strip()
toy_ec = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
test_ec = os.path.join(self.test_prefix, 'test.eb')
args = [
test_ec,
'--force',
'--module-only',
]
for group in [group_name, (group_name, "Hey, you're not in the '%s' group!" % group_name)]:
if isinstance(group, string_type):
write_file(test_ec, read_file(toy_ec) + "\ngroup = '%s'\n" % group)
else:
write_file(test_ec, read_file(toy_ec) + "\ngroup = %s\n" % str(group))
self.mock_stdout(True)
outtxt = self.eb_main(args, logfile=dummylogfn, do_build=True, raise_error=True, raise_systemexit=True)
self.mock_stdout(False)
if get_module_syntax() == 'Tcl':
pattern = "Can't generate robust check in TCL modules for users belonging to group %s." % group_name
regex = re.compile(pattern, re.M)
self.assertTrue(regex.search(outtxt), "Pattern '%s' found in: %s" % (regex.pattern, outtxt))
elif get_module_syntax() == 'Lua':
lmod_version = os.getenv('LMOD_VERSION', 'NOT_FOUND')
if LooseVersion(lmod_version) >= LooseVersion('6.0.8'):
toy_mod = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0.lua')
toy_mod_txt = read_file(toy_mod)
if isinstance(group, tuple):
group_name = group[0]
error_msg_pattern = "Hey, you're not in the '%s' group!" % group_name
else:
group_name = group
error_msg_pattern = "You are not part of '%s' group of users" % group_name
pattern = '\n'.join([
r'^if not \( userInGroup\("%s"\) \) then' % group_name,
r' LmodError\("%s[^"]*"\)' % error_msg_pattern,
r'end$',
])
regex = re.compile(pattern, re.M)
self.assertTrue(regex.search(outtxt), "Pattern '%s' found in: %s" % (regex.pattern, toy_mod_txt))
else:
pattern = r"Can't generate robust check in Lua modules for users belonging to group %s. "
pattern += r"Lmod version not recent enough \(%s\), should be >= 6.0.8" % lmod_version
regex = re.compile(pattern % group_name, re.M)
self.assertTrue(regex.search(outtxt), "Pattern '%s' found in: %s" % (regex.pattern, outtxt))
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
write_file(test_ec, read_file(toy_ec) + "\ngroup = ('%s', 'custom message', 'extra item')\n" % group_name)
self.assertErrorRegex(SystemExit, '.*', self.eb_main, args, do_build=True,
raise_error=True, raise_systemexit=True)
def test_allow_system_deps(self):
"""Test allow_system_deps easyconfig parameter."""
tmpdir = tempfile.mkdtemp()
# copy toy easyconfig file, and append source_urls to it
topdir = os.path.dirname(os.path.abspath(__file__))
shutil.copy2(os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'), tmpdir)
ec_file = os.path.join(tmpdir, 'toy-0.0.eb')
write_file(ec_file, "\nallow_system_deps = [('Python', SYS_PYTHON_VERSION)]\n", append=True)
self.test_toy_build(ec_file=ec_file)
shutil.rmtree(tmpdir)
def test_toy_hierarchical(self):
"""Test toy build under example hierarchical module naming scheme."""
test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
self.setup_hierarchical_modules()
mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')
args = [
os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % test_easyconfigs,
'--module-naming-scheme=HierarchicalMNS',
]
# test module paths/contents with foss build
extra_args = [
'--try-toolchain=foss,2018a',
# This test was created for the regex substitution of toolchains, to trigger this (rather than subtoolchain
# resolution) we must add an additional build option
'--disable-map-toolchains',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'MPI', 'GCC', '6.4.0-2.28', 'OpenMPI', '2.1.2', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_module_path += '.lua'
self.assertTrue(os.path.exists(toy_module_path))
# check that toolchain load is expanded to loads for toolchain dependencies,
# except for the ones that extend $MODULEPATH to make the toy module available
if get_module_syntax() == 'Tcl':
load_regex_template = "load %s"
elif get_module_syntax() == 'Lua':
load_regex_template = r'load\("%s/.*"\)'
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
modtxt = read_file(toy_module_path)
for dep in ['foss', 'GCC', 'OpenMPI']:
load_regex = re.compile(load_regex_template % dep)
self.assertFalse(load_regex.search(modtxt), "Pattern '%s' not found in %s" % (load_regex.pattern, modtxt))
for dep in ['OpenBLAS', 'FFTW', 'ScaLAPACK']:
load_regex = re.compile(load_regex_template % dep)
self.assertTrue(load_regex.search(modtxt), "Pattern '%s' found in %s" % (load_regex.pattern, modtxt))
os.remove(toy_module_path)
# test module path with GCC/6.4.0-2.28 build
extra_args = [
'--try-toolchain=GCC,6.4.0-2.28',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Compiler', 'GCC', '6.4.0-2.28', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_module_path += '.lua'
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
self.assertFalse(re.search("module load", modtxt))
os.remove(toy_module_path)
# test module path with GCC/6.4.0-2.28 build, pretend to be an MPI lib by setting moduleclass
extra_args = [
'--try-toolchain=GCC,6.4.0-2.28',
'--try-amend=moduleclass=mpi',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Compiler', 'GCC', '6.4.0-2.28', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_module_path += '.lua'
self.assertTrue(os.path.exists(toy_module_path))
# 'module use' statements to extend $MODULEPATH are present
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'MPI', 'GCC', '6.4.0-2.28', 'toy', '0.0')
if get_module_syntax() == 'Tcl':
self.assertTrue(re.search(r'^module\s*use\s*"%s"' % modpath_extension, modtxt, re.M))
elif get_module_syntax() == 'Lua':
fullmodpath_extension = os.path.join(self.test_installpath, modpath_extension)
regex = re.compile(r'^prepend_path\("MODULEPATH", "%s"\)' % fullmodpath_extension, re.M)
self.assertTrue(regex.search(modtxt), "Pattern '%s' found in %s" % (regex.pattern, modtxt))
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
os.remove(toy_module_path)
# ... unless they shouldn't be
extra_args.append('--try-amend=include_modpath_extensions=') # pass empty string as equivalent to False
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'MPI', 'GCC', '6.4.0-2.28', 'toy', '0.0')
if get_module_syntax() == 'Tcl':
self.assertFalse(re.search(r'^module\s*use\s*"%s"' % modpath_extension, modtxt, re.M))
elif get_module_syntax() == 'Lua':
fullmodpath_extension = os.path.join(self.test_installpath, modpath_extension)
regex = re.compile(r'^prepend_path\("MODULEPATH", "%s"\)' % fullmodpath_extension, re.M)
self.assertFalse(regex.search(modtxt), "Pattern '%s' found in %s" % (regex.pattern, modtxt))
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
os.remove(toy_module_path)
# test module path with system/system build
extra_args = [
'--try-toolchain=system,system',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Core', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_module_path += '.lua'
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
self.assertFalse(re.search("module load", modtxt))
os.remove(toy_module_path)
# test module path with system/system build, pretend to be a compiler by setting moduleclass
extra_args = [
'--try-toolchain=system,system',
'--try-amend=moduleclass=compiler',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Core', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_module_path += '.lua'
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'Compiler', 'toy', '0.0')
if get_module_syntax() == 'Tcl':
self.assertTrue(re.search(r'^module\s*use\s*"%s"' % modpath_extension, modtxt, re.M))
elif get_module_syntax() == 'Lua':
fullmodpath_extension = os.path.join(self.test_installpath, modpath_extension)
regex = re.compile(r'^prepend_path\("MODULEPATH", "%s"\)' % fullmodpath_extension, re.M)
self.assertTrue(regex.search(modtxt), "Pattern '%s' found in %s" % (regex.pattern, modtxt))
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
os.remove(toy_module_path)
# building a toolchain module should also work
gompi_module_path = os.path.join(mod_prefix, 'Core', 'gompi', '2018a')
# make sure Core/gompi/2018a module that may already be there is removed (both Tcl/Lua variants)
for modfile in glob.glob(gompi_module_path + '*'):
os.remove(modfile)
if get_module_syntax() == 'Lua':
gompi_module_path += '.lua'
args[0] = os.path.join(test_easyconfigs, 'g', 'gompi', 'gompi-2018a.eb')
self.modtool.purge()
self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
self.assertTrue(os.path.exists(gompi_module_path), "%s found" % gompi_module_path)
def test_toy_hierarchical_subdir_user_modules(self):
"""
Test toy build under example hierarchical module naming scheme that was created using --subidr-user-modules
"""
# redefine $HOME to a temporary location we can fiddle with
home = os.path.join(self.test_prefix, 'HOME')
mkdir(home)
os.environ['HOME'] = home
test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
self.setup_hierarchical_modules()
mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')
gcc_mod_subdir = os.path.join('Compiler', 'GCC', '6.4.0-2.28')
openmpi_mod_subdir = os.path.join('MPI', 'GCC', '6.4.0-2.28', 'OpenMPI', '2.1.2')
# include guarded 'module use' statement in GCC & OpenMPI modules,
# like there would be when --subdir-user-modules=modules/all is used
extra_modtxt = '\n'.join([
'if { [ file isdirectory [ file join $env(HOME) "modules/all/%s" ] ] } {' % gcc_mod_subdir,
' module use [ file join $env(HOME) "modules/all/%s" ]' % gcc_mod_subdir,
'}',
])
gcc_mod = os.path.join(mod_prefix, 'Core', 'GCC', '6.4.0-2.28')
write_file(gcc_mod, extra_modtxt, append=True)
extra_modtxt = '\n'.join([
'if { [ file isdirectory [ file join $env(HOME) "modules/all/%s" ] ] } {' % openmpi_mod_subdir,
' module use [ file join $env(HOME) "modules/all/%s" ]' % openmpi_mod_subdir,
'}',
])
openmpi_mod = os.path.join(mod_prefix, gcc_mod_subdir, 'OpenMPI', '2.1.2')
write_file(openmpi_mod, extra_modtxt, append=True)
args = [
os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0-gompi-2018a.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % home,
'--unittest-file=%s' % self.logfile,
'--force',
'--module-naming-scheme=HierarchicalMNS',
'--try-toolchain=foss,2018a',
]
self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
mod_ext = ''
if get_module_syntax() == 'Lua':
mod_ext = '.lua'
toy_mod = os.path.join(home, 'modules', 'all', openmpi_mod_subdir, 'toy', '0.0' + mod_ext)
toy_modtxt = read_file(toy_mod)
# No math libs in original toolchain, --try-toolchain is too clever to upgrade it beyond necessary
for modname in ['FFTW', 'OpenBLAS', 'ScaLAPACK']:
regex = re.compile('load.*' + modname, re.M)
self.assertFalse(regex.search(toy_modtxt), "Pattern '%s' not found in: %s" % (regex.pattern, toy_modtxt))
for modname in ['GCC', 'OpenMPI']:
regex = re.compile('load.*' + modname, re.M)
self.assertFalse(regex.search(toy_modtxt), "Pattern '%s' not found in: %s" % (regex.pattern, toy_modtxt))
# also check with Lua GCC/OpenMPI modules in case of Lmod
if isinstance(self.modtool, Lmod):
# remove Tcl modules for GCC/OpenMPI in hierarchy
remove_file(gcc_mod)
remove_file(openmpi_mod)
# we also need to clear the 'module show' cache since we're replacing modules in the same $MODULEPATH
from easybuild.tools.modules import MODULE_SHOW_CACHE
MODULE_SHOW_CACHE.clear()
# make very sure toy module is regenerated
remove_file(toy_mod)
mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')
# create minimal GCC module that extends $MODULEPATH with Compiler/GCC/6.4.0-2.28 in both locations
gcc_mod_txt = '\n'.join([
'setenv("EBROOTGCC", "/tmp/software/Core/GCC/6.4.0-2.28")',
'setenv("EBVERSIONGCC", "6.4.0-2.28")',
'prepend_path("MODULEPATH", "%s/%s")' % (mod_prefix, gcc_mod_subdir),
'if isDir(pathJoin(os.getenv("HOME"), "modules/all/%s")) then' % gcc_mod_subdir,
' prepend_path("MODULEPATH", pathJoin(os.getenv("HOME"), "modules/all/%s"))' % gcc_mod_subdir,
'end',
])
write_file(gcc_mod + '.lua', gcc_mod_txt)
# create minimal OpenMPI module that extends $MODULEPATH
# with MPI/GCC/6.4.0-2.28/OpenMPi/2.1.2 in both locations
openmpi_mod_txt = '\n'.join([
'setenv("EBROOTOPENMPI", "/tmp/software/Compiler/GCC/6.4.0-2.28/OpenMPI/2.1.2")',
'setenv("EBVERSIONOPENMPI", "2.1.2")',
'prepend_path("MODULEPATH", "%s/%s")' % (mod_prefix, openmpi_mod_subdir),
'if isDir(pathJoin(os.getenv("HOME"), "modules/all/%s")) then' % openmpi_mod_subdir,
' prepend_path("MODULEPATH", pathJoin(os.getenv("HOME"), "modules/all/%s"))' % openmpi_mod_subdir,
'end',
])
write_file(openmpi_mod + '.lua', openmpi_mod_txt)
self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
toy_modtxt = read_file(toy_mod)
# No math libs in original toolchain, --try-toolchain is too clever to upgrade it beyond necessary
for modname in ['FFTW', 'OpenBLAS', 'ScaLAPACK']:
regex = re.compile('load.*' + modname, re.M)
self.assertFalse(regex.search(toy_modtxt), "Pattern '%s' not found in: %s" % (regex.pattern,
toy_modtxt))
for modname in ['GCC', 'OpenMPI']:
regex = re.compile('load.*' + modname, re.M)
self.assertFalse(regex.search(toy_modtxt),
"Pattern '%s' not found in: %s" % (regex.pattern, toy_modtxt))
def test_toy_advanced(self):
"""Test toy build with extensions and non-system toolchain."""
test_dir = os.path.abspath(os.path.dirname(__file__))
os.environ['MODULEPATH'] = os.path.join(test_dir, 'modules')
test_ec = os.path.join(test_dir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0-gompi-2018a-test.eb')
self.test_toy_build(ec_file=test_ec, versionsuffix='-gompi-2018a-test', extra_args=['--debug'])
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-gompi-2018a-test')
if get_module_syntax() == 'Lua':
toy_module += '.lua'
toy_mod_txt = read_file(toy_module)
patterns = [
'^setenv.*EBEXTSLISTTOY.*bar-0.0,barbar-0.0',
# set by ToyExtension easyblock used to install extensions
'^setenv.*TOY_EXT_BAR.*bar',
'^setenv.*TOY_EXT_BARBAR.*barbar',
]
for pattern in patterns:
self.assertTrue(re.search(pattern, toy_mod_txt, re.M), "Pattern '%s' found in: %s" % (pattern, toy_mod_txt))
def test_toy_advanced_filter_deps(self):
"""Test toy build with extensions, and filtered build dependency."""
# test case for bug https://github.com/easybuilders/easybuild-framework/pull/2515
test_dir = os.path.abspath(os.path.dirname(__file__))
os.environ['MODULEPATH'] = os.path.join(test_dir, 'modules')
toy_ec = os.path.join(test_dir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0-gompi-2018a-test.eb')
toy_ec_txt = read_file(toy_ec)
# add FFTW as build dependency, just to filter it out again
toy_ec_txt += "\nbuilddependencies = [('FFTW', '3.3.3')]"
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, toy_ec_txt)
self.test_toy_build(ec_file=test_ec, versionsuffix='-gompi-2018a-test', extra_args=["--filter-deps=FFTW"])
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-gompi-2018a-test')
if get_module_syntax() == 'Lua':
toy_module += '.lua'
self.assertTrue(os.path.exists(toy_module))
def test_toy_hidden_cmdline(self):
"""Test installing a hidden module using the '--hidden' command line option."""
test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
ec_file = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb')
self.test_toy_build(ec_file=ec_file, extra_args=['--hidden'], verify=False)
# module file is hidden
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '.0.0')
if get_module_syntax() == 'Lua':
toy_module += '.lua'
self.assertTrue(os.path.exists(toy_module), 'Found hidden module %s' % toy_module)
# installed software is not hidden
toybin = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy')
self.assertTrue(os.path.exists(toybin))
def test_toy_hidden_easyconfig(self):
"""Test installing a hidden module using the 'hidden = True' easyconfig parameter."""
# copy toy easyconfig file, and add hiding option to it
topdir = os.path.dirname(os.path.abspath(__file__))
ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
shutil.copy2(ec_file, self.test_prefix)
ec_file = os.path.join(self.test_prefix, 'toy-0.0.eb')
write_file(ec_file, "\nhidden = True\n", append=True)
self.test_toy_build(ec_file=ec_file, verify=False)
# module file is hidden
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '.0.0')
if get_module_syntax() == 'Lua':
toy_module += '.lua'
self.assertTrue(os.path.exists(toy_module), 'Found hidden module %s' % toy_module)
# installed software is not hidden
toybin = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy')
self.assertTrue(os.path.exists(toybin))
def test_module_filepath_tweaking(self):
"""Test using --suffix-modules-path."""
mns_path = "easybuild.tools.module_naming_scheme.test_module_naming_scheme"
__import__(mns_path, globals(), locals(), [''])
topdir = os.path.dirname(os.path.abspath(__file__))
eb_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
args = [
eb_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--force',
'--debug',
'--suffix-modules-path=foobarbaz',
'--module-naming-scheme=TestModuleNamingScheme',
]
self.eb_main(args, do_build=True, verbose=True)
mod_file_prefix = os.path.join(self.test_installpath, 'modules')
mod_file_suffix = ''
if get_module_syntax() == 'Lua':
mod_file_suffix += '.lua'
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 'foobarbaz', 'toy', '0.0' + mod_file_suffix)))
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 'TOOLS', 'toy', '0.0' + mod_file_suffix)))
self.assertTrue(os.path.islink(os.path.join(mod_file_prefix, 'TOOLS', 'toy', '0.0' + mod_file_suffix)))
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 't', 'toy', '0.0' + mod_file_suffix)))
self.assertTrue(os.path.islink(os.path.join(mod_file_prefix, 't', 'toy', '0.0' + mod_file_suffix)))
def test_toy_archived_easyconfig(self):
"""Test archived easyconfig for a succesful build."""
repositorypath = os.path.join(self.test_installpath, 'easyconfigs_archive')
extra_args = [
'--repository=FileRepository',
'--repositorypath=%s' % repositorypath,
]
self.test_toy_build(raise_error=True, extra_args=extra_args)
archived_ec = os.path.join(repositorypath, 'toy', 'toy-0.0.eb')
self.assertTrue(os.path.exists(archived_ec))
ec = EasyConfig(archived_ec)
self.assertEqual(ec.name, 'toy')
self.assertEqual(ec.version, '0.0')
def test_toy_patches(self):
"""Test whether patches are being copied to install directory and easyconfigs archive"""
repositorypath = os.path.join(self.test_installpath, 'easyconfigs_archive')
extra_args = [
'--repository=FileRepository',
'--repositorypath=%s' % repositorypath,
]
self.test_toy_build(raise_error=True, extra_args=extra_args)
installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
patch_file = os.path.join(installdir, 'easybuild', 'toy-0.0_fix-silly-typo-in-printf-statement.patch')
self.assertTrue(os.path.exists(patch_file))
archived_patch_file = os.path.join(repositorypath, 'toy', 'toy-0.0_fix-silly-typo-in-printf-statement.patch')
self.assertTrue(os.path.isfile(archived_patch_file))
def test_toy_extension_patches_postinstallcmds(self):
"""Test install toy that includes extensions with patches and postinstallcmds."""
test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec)
# create file that we'll copy via 'patches'
write_file(os.path.join(self.test_prefix, 'test.txt'), 'test123')
test_ec = os.path.join(self.test_prefix, 'test.eb')
test_ec_txt = '\n'.join([
toy_ec_txt,
'exts_list = [',
' ("bar", "0.0", {',
' "buildopts": " && ls -l test.txt",',
' "patches": [',
' "bar-0.0_fix-silly-typo-in-printf-statement.patch",', # normal patch
' ("bar-0.0_fix-very-silly-typo-in-printf-statement.patch", 0),', # patch with patch level
' ("test.txt", "."),', # file to copy to build dir (not a real patch file)
' ],',
' "postinstallcmds": ["touch %(installdir)s/created-via-postinstallcmds.txt"],',
' }),',
']',
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec)
installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
# make sure that patches were actually applied (without them the message producded by 'bar' is different)
bar_bin = os.path.join(installdir, 'bin', 'bar')
out, _ = run_cmd(bar_bin)
self.assertEqual(out, "I'm a bar, and very very proud of it.\n")
# verify that post-install command for 'bar' extension was executed
fn = 'created-via-postinstallcmds.txt'
self.assertTrue(os.path.exists(os.path.join(installdir, fn)))
def test_toy_extension_sources(self):
"""Test install toy that includes extensions with 'sources' spec (as single-item list)."""
topdir = os.path.dirname(os.path.abspath(__file__))
test_ecs = os.path.join(topdir, 'easyconfigs', 'test_ecs')
toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec)
test_ec = os.path.join(self.test_prefix, 'test.eb')
bar_sources_specs = [
'["bar-%(version)s.tar.gz"]', # single-element list
'"bar-%(version)s.tar.gz"', # string value
]
for bar_sources_spec in bar_sources_specs:
# test use of single-element list in 'sources' with just the filename
test_ec_txt = '\n'.join([
toy_ec_txt,
'exts_list = [',
' ("bar", "0.0", {',
' "sources": %s,' % bar_sources_spec,
' }),',
']',
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec)
# copy bar-0.0.tar.gz to <tmpdir>/bar-0.0-local.tar.gz, to be used below
test_source_path = os.path.join(self.test_prefix, 'sources')
toy_ext_sources = os.path.join(topdir, 'sandbox', 'sources', 'toy', 'extensions')
bar_source = os.path.join(toy_ext_sources, 'bar-0.0.tar.gz')
copy_file(bar_source, os.path.join(test_source_path, 'bar-0.0-local.tar.gz'))
bar_patch = os.path.join(toy_ext_sources, 'bar-0.0_fix-silly-typo-in-printf-statement.patch')
copy_file(bar_patch, os.path.join(self.test_prefix, 'bar-0.0_fix-local.patch'))
# verify that source_urls and patches are picked up and taken into account
# when 'sources' is used to specify extension sources
bar_sources_spec = bar_sources_spec.replace('bar-%(version)s.tar.gz', 'bar-0.0-local.tar.gz')
test_ec_txt = '\n'.join([
toy_ec_txt,
'exts_list = [',
' ("bar", "0.0", {',
' "source_urls": ["file://%s"],' % test_source_path,
' "sources": %s,' % bar_sources_spec,
' "patches": ["bar-%(version)s_fix-local.patch"],',
' }),',
']',
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, raise_error=True)
# check that checksums are picked up and verified
test_ec_txt = '\n'.join([
toy_ec_txt,
'exts_list = [',
' ("bar", "0.0", {',
' "source_urls": ["file://%s"],' % test_source_path,
' "sources": %s,' % bar_sources_spec,
' "patches": ["bar-%(version)s_fix-local.patch"],',
# note: purposely incorrect (SHA256) checksums! (to check if checksum verification works)
' "checksums": [',
' "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",',
' "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",',
' ],',
' }),',
']',
])
write_file(test_ec, test_ec_txt)
error_pattern = r"Checksum verification for extension source bar-0.0-local.tar.gz failed"
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
raise_error=True, verbose=False)
# test again with correct checksum for bar-0.0.tar.gz, but faulty checksum for patch file
test_ec_txt = '\n'.join([
toy_ec_txt,
'exts_list = [',
' ("bar", "0.0", {',
' "source_urls": ["file://%s"],' % test_source_path,
' "sources": %s,' % bar_sources_spec,
' "patches": ["bar-%(version)s_fix-local.patch"],',
' "checksums": [',
' "f3676716b610545a4e8035087f5be0a0248adee0abb3930d3edb76d498ae91e7",',
# note: purposely incorrect checksum for patch!
' "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",',
' ],',
' }),',
']',
])
write_file(test_ec, test_ec_txt)
error_pattern = r"Checksum verification for extension patch bar-0.0_fix-local.patch failed"
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
raise_error=True, verbose=False)
# test again with correct checksums
test_ec_txt = '\n'.join([
toy_ec_txt,
'exts_list = [',
' ("bar", "0.0", {',
' "source_urls": ["file://%s"],' % test_source_path,
' "sources": %s,' % bar_sources_spec,
' "patches": ["bar-%(version)s_fix-local.patch"],',
' "checksums": [',
' "f3676716b610545a4e8035087f5be0a0248adee0abb3930d3edb76d498ae91e7",',
' "84db53592e882b5af077976257f9c7537ed971cb2059003fd4faa05d02cae0ab",',
' ],',
' }),',
']',
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, raise_error=True)
def test_toy_extension_sources_git_config(self):
"""Test install toy that includes extensions with 'sources' spec including 'git_config'."""
test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec)
# Tar-ball which should be created via 'git_config', and one file
ext_tgz = 'exts-git.tar.gz'
ext_tarball = os.path.join(self.test_sourcepath, 't', 'toy', ext_tgz)
ext_tarfile = 'a_directory/a_file.txt'
# Dummy source code required for extensions build_step to pass
ext_code = 'int main() { return 0; }'
ext_cfile = 'exts-git.c'
test_ec = os.path.join(self.test_prefix, 'test.eb')
test_ec_txt = '\n'.join([
toy_ec_txt,
'prebuildopts = "echo \\\"%s\\\" > %s && ",' % (ext_code, ext_cfile),
'exts_list = [',
' ("exts-git", "0.0", {',
' "buildopts": "&& ls -l %s %s",' % (ext_tarball, ext_tarfile),
' "sources": {',
' "filename": "%(name)s.tar.gz",',
' "git_config": {',
' "repo_name": "testrepository",',
' "url": "https://github.com/easybuilders",',
' "tag": "main",',
' },',
' },',
' }),',
']',
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec)
def test_toy_module_fulltxt(self):
"""Strict text comparison of generated module file."""
self.test_toy_tweaked()
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-tweaked')
if get_module_syntax() == 'Lua':
toy_module += '.lua'
toy_mod_txt = read_file(toy_module)
modloadmsg_tcl = [
r'puts stderr "THANKS FOR LOADING ME',
r'I AM toy v0.0',
'"',
]
modloadmsg_lua = [
r'io.stderr:write\(\[==\[THANKS FOR LOADING ME',
r'I AM toy v0.0',
r'\]==\]\)',
]
help_txt = '\n'.join([
r'Description',
r'===========',
r'Toy C program, 100% toy.',
r'',
r'',
r'Usage',
r'=====',
r'This toy is easy to use, 100%!',
r'',
r'',
r'Examples',
r'========',
r'No example available, 0% complete',
r'',
r'',
r'Citing',
r'======',
r'If you use this package, please cite our paper https://ieeexplore.ieee.org/document/6495863',
r'',
r'',
r'More information',
r'================',
r' - Homepage: https://easybuilders.github.io/easybuild',
r' - Documentation:',
r' - \$EBROOTTOY/share/doc/toy/readme.txt',
r' - \$EBROOTTOY/share/doc/toy/html/index.html',
r' - https://easybuilders.github.io/easybuild/toy/docs.html',
r' - Upstream contact: [email protected]',
r' - Site contacts:',
r' - Jim Admin',
r' - Jane Admin',
])
if get_module_syntax() == 'Lua':
mod_txt_regex_pattern = '\n'.join([
r'help\(\[==\[',
r'',
r'%s' % help_txt,
r'\]==\]\)',
r'',
r'whatis\(\[==\[Description: Toy C program, 100% toy.\]==\]\)',
r'whatis\(\[==\[Homepage: https://easybuilders.github.io/easybuild\]==\]\)',
r'whatis\(\[==\[URL: https://easybuilders.github.io/easybuild\]==\]\)',
r'',
r'local root = "%s/software/toy/0.0-tweaked"' % self.test_installpath,
r'',
r'conflict\("toy"\)',
r'',
r'prepend_path\("CMAKE_PREFIX_PATH", root\)',
r'prepend_path\("LD_LIBRARY_PATH", pathJoin\(root, "lib"\)\)',
r'prepend_path\("LIBRARY_PATH", pathJoin\(root, "lib"\)\)',
r'prepend_path\("PATH", pathJoin\(root, "bin"\)\)',
r'setenv\("EBROOTTOY", root\)',
r'setenv\("EBVERSIONTOY", "0.0"\)',
r'setenv\("EBDEVELTOY", pathJoin\(root, "easybuild/toy-0.0-tweaked-easybuild-devel"\)\)',
r'',
r'setenv\("FOO", "bar"\)',
r'prepend_path\("SOMEPATH", pathJoin\(root, "foo/bar"\)\)',
r'prepend_path\("SOMEPATH", pathJoin\(root, "baz"\)\)',
r'prepend_path\("SOMEPATH", root\)',
r'',
r'if mode\(\) == "load" then',
] + modloadmsg_lua + [
r'end',
r'setenv\("TOY", "toy-0.0"\)',
r'-- Built with EasyBuild version .*',
r'io.stderr:write\("oh hai\!"\)$',
])
elif get_module_syntax() == 'Tcl':
mod_txt_regex_pattern = '\n'.join([
r'^#%Module',
r'proc ModulesHelp { } {',
r' puts stderr {',
r'',
r'%s' % help_txt,
r' }',
r'}',
r'',
r'module-whatis {Description: Toy C program, 100% toy.}',
r'module-whatis {Homepage: https://easybuilders.github.io/easybuild}',
r'module-whatis {URL: https://easybuilders.github.io/easybuild}',
r'',
r'set root %s/software/toy/0.0-tweaked' % self.test_installpath,
r'',
r'conflict toy',
r'',
r'prepend-path CMAKE_PREFIX_PATH \$root',
r'prepend-path LD_LIBRARY_PATH \$root/lib',
r'prepend-path LIBRARY_PATH \$root/lib',
r'prepend-path PATH \$root/bin',
r'setenv EBROOTTOY "\$root"',
r'setenv EBVERSIONTOY "0.0"',
r'setenv EBDEVELTOY "\$root/easybuild/toy-0.0-tweaked-easybuild-devel"',
r'',
r'setenv FOO "bar"',
r'prepend-path SOMEPATH \$root/foo/bar',
r'prepend-path SOMEPATH \$root/baz',
r'prepend-path SOMEPATH \$root',
r'',
r'if { \[ module-info mode load \] } {',
] + modloadmsg_tcl + [
r'}',
r'setenv TOY "toy-0.0"',
r'# Built with EasyBuild version .*',
r'puts stderr "oh hai\!"$',
])
else:
self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax())
mod_txt_regex = re.compile(mod_txt_regex_pattern)
msg = "Pattern '%s' matches with: %s" % (mod_txt_regex.pattern, toy_mod_txt)
self.assertTrue(mod_txt_regex.match(toy_mod_txt), msg)
def test_external_dependencies(self):
"""Test specifying external (build) dependencies."""
topdir = os.path.dirname(os.path.abspath(__file__))
ectxt = read_file(os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0-deps.eb'))
toy_ec = os.path.join(self.test_prefix, 'toy-0.0-external-deps.eb')
# just specify some of the test modules we ship, doesn't matter where they come from
extraectxt = "\ndependencies += [('foobar/1.2.3', EXTERNAL_MODULE)]"
extraectxt += "\nbuilddependencies = [('somebuilddep/0.1', EXTERNAL_MODULE)]"
extraectxt += "\nversionsuffix = '-external-deps'"
write_file(toy_ec, ectxt + extraectxt)
# install dummy modules
modulepath = os.path.join(self.test_prefix, 'modules')
for mod in ['intel/2018a', 'GCC/6.4.0-2.28', 'foobar/1.2.3', 'somebuilddep/0.1']:
mkdir(os.path.join(modulepath, os.path.dirname(mod)), parents=True)
write_file(os.path.join(modulepath, mod), "#%Module")
installed_test_modules = os.path.join(self.test_installpath, 'modules', 'all')
self.reset_modulepath([modulepath, installed_test_modules])
start_env = copy.deepcopy(os.environ)
self.test_toy_build(ec_file=toy_ec, versionsuffix='-external-deps', verbose=True, raise_error=True)
self.modtool.load(['toy/0.0-external-deps'])
# note build dependency is not loaded
mods = ['intel/2018a', 'GCC/6.4.0-2.28', 'foobar/1.2.3', 'toy/0.0-external-deps']
self.assertEqual([x['mod_name'] for x in self.modtool.list()], mods)
# restore original environment (to undo 'module load' done above)
modify_env(os.environ, start_env, verbose=False)
# check behaviour when a non-existing external (build) dependency is included
extraectxt = "\nbuilddependencies = [('nosuchbuilddep/0.0.0', EXTERNAL_MODULE)]"
extraectxt += "\nversionsuffix = '-external-deps-broken1'"
write_file(toy_ec, ectxt + extraectxt)
if isinstance(self.modtool, Lmod):
err_msg = r"Module command \\'.*load nosuchbuilddep/0.0.0\\' failed"
else:
err_msg = r"Unable to locate a modulefile for 'nosuchbuilddep/0.0.0'"
self.assertErrorRegex(EasyBuildError, err_msg, self.test_toy_build, ec_file=toy_ec,
raise_error=True, verbose=False)
extraectxt = "\ndependencies += [('nosuchmodule/1.2.3', EXTERNAL_MODULE)]"
extraectxt += "\nversionsuffix = '-external-deps-broken2'"
write_file(toy_ec, ectxt + extraectxt)
if isinstance(self.modtool, Lmod):
err_msg = r"Module command \\'.*load nosuchmodule/1.2.3\\' failed"
else:
err_msg = r"Unable to locate a modulefile for 'nosuchmodule/1.2.3'"
self.assertErrorRegex(EasyBuildError, err_msg, self.test_toy_build, ec_file=toy_ec,
raise_error=True, verbose=False)
# --dry-run still works when external modules are missing; external modules are treated as if they were there
outtxt = self.test_toy_build(ec_file=toy_ec, verbose=True, extra_args=['--dry-run'], verify=False)
regex = re.compile(r"^ \* \[ \] .* \(module: toy/0.0-external-deps-broken2\)", re.M)
self.assertTrue(regex.search(outtxt), "Pattern '%s' found in: %s" % (regex.pattern, outtxt))
def test_module_only(self):
"""Test use of --module-only."""
ec_files_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
ec_file = os.path.join(ec_files_path, 't', 'toy', 'toy-0.0-deps.eb')
toy_mod = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-deps')
# only consider provided test modules
self.reset_modulepath([os.path.join(os.path.dirname(os.path.abspath(__file__)), 'modules')])
# sanity check fails without --force if software is not installed yet
common_args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--robot=%s' % ec_files_path,
'--module-syntax=Tcl',
]
args = common_args + ['--module-only']
err_msg = "Sanity check failed"
self.assertErrorRegex(EasyBuildError, err_msg, self.eb_main, args, do_build=True, raise_error=True)
self.assertFalse(os.path.exists(toy_mod))
self.eb_main(args + ['--force'], do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
# make sure load statements for dependencies are included in additional module file generated with --module-only
modtxt = read_file(toy_mod)
self.assertTrue(re.search('load.*intel/2018a', modtxt), "load statement for intel/2018a found in module")
self.assertTrue(re.search('load.*GCC/6.4.0-2.28', modtxt), "load statement for GCC/6.4.0-2.28 found in module")
os.remove(toy_mod)
# --module-only --rebuild should run sanity check
rebuild_args = args + ['--rebuild']
err_msg = "Sanity check failed"
self.assertErrorRegex(EasyBuildError, err_msg, self.eb_main, rebuild_args, do_build=True, raise_error=True)
self.assertFalse(os.path.exists(toy_mod))
# installing another module under a different naming scheme and using Lua module syntax works fine
# first actually build and install toy software + module
prefix = os.path.join(self.test_installpath, 'software', 'toy', '0.0-deps')
self.eb_main(common_args + ['--force'], do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
self.assertTrue(os.path.exists(os.path.join(self.test_installpath, 'software', 'toy', '0.0-deps', 'bin')))
modtxt = read_file(toy_mod)
self.assertTrue(re.search("set root %s" % prefix, modtxt))
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2)
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1)
# install (only) additional module under a hierarchical MNS
args = common_args + [
'--module-only',
'--module-naming-scheme=MigrateFromEBToHMNS',
]
toy_core_mod = os.path.join(self.test_installpath, 'modules', 'all', 'Core', 'toy', '0.0-deps')
self.assertFalse(os.path.exists(toy_core_mod))
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_core_mod))
# existing install is reused
modtxt2 = read_file(toy_core_mod)
self.assertTrue(re.search("set root %s" % prefix, modtxt2))
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3)
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1)
# make sure load statements for dependencies are included
modtxt = read_file(toy_core_mod)
self.assertTrue(re.search('load.*intel/2018a', modtxt), "load statement for intel/2018a found in module")
# Test we can create a module even for an installation where we don't have write permissions
os.remove(toy_core_mod)
# remove the write permissions on the installation
adjust_permissions(prefix, stat.S_IRUSR | stat.S_IXUSR, relative=False)
self.assertFalse(os.path.exists(toy_core_mod))
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_core_mod))
# existing install is reused
modtxt2 = read_file(toy_core_mod)
self.assertTrue(re.search("set root %s" % prefix, modtxt2))
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3)
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1)
# make sure load statements for dependencies are included
modtxt = read_file(toy_core_mod)
self.assertTrue(re.search('load.*intel/2018a', modtxt), "load statement for intel/2018a found in module")
os.remove(toy_core_mod)
os.remove(toy_mod)
# test installing (only) additional module in Lua syntax (if Lmod is available)
lmod_abspath = os.environ.get('LMOD_CMD') or which('lmod')
if lmod_abspath is not None:
args = common_args[:-1] + [
'--allow-modules-tool-mismatch',
'--module-only',
'--module-syntax=Lua',
'--modules-tool=Lmod',
]
self.assertFalse(os.path.exists(toy_mod + '.lua'))
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod + '.lua'))
# existing install is reused
modtxt3 = read_file(toy_mod + '.lua')
self.assertTrue(re.search('local root = "%s"' % prefix, modtxt3))
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3)
self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1)
# make sure load statements for dependencies are included
modtxt = read_file(toy_mod + '.lua')
self.assertTrue(re.search('load.*intel/2018a', modtxt), "load statement for intel/2018a found in module")
def test_module_only_extensions(self):
"""
Test use of --module-only with extensions involved.
Sanity check should catch problems with extensions,
extensions can be skipped using --skip-exts.
"""
topdir = os.path.abspath(os.path.dirname(__file__))
toy_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
toy_mod = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_mod += '.lua'
test_ec = os.path.join(self.test_prefix, 'test.ec')
test_ec_txt = read_file(toy_ec)
test_ec_txt += '\n' + '\n'.join([
"sanity_check_commands = ['barbar', 'toy']",
"sanity_check_paths = {'files': ['bin/barbar', 'bin/toy'], 'dirs': ['bin']}",
"exts_list = [",
" ('barbar', '0.0', {",
" 'start_dir': 'src',",
" 'exts_filter': ('ls -l lib/lib%(ext_name)s.a', ''),",
" })",
"]",
])
write_file(test_ec, test_ec_txt)
# clean up $MODULEPATH so only modules in test prefix dir are found
self.reset_modulepath([os.path.join(self.test_installpath, 'modules', 'all')])
self.assertEqual(self.modtool.available('toy'), [])
# install toy/0.0
self.eb_main([test_ec], do_build=True, raise_error=True)
# remove module file so we can try --module-only
remove_file(toy_mod)
# rename file required for barbar extension, so we can check whether sanity check catches it
libbarbar = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'lib', 'libbarbar.a')
move_file(libbarbar, libbarbar + '.foobar')
# check whether sanity check fails now when using --module-only
error_pattern = 'Sanity check failed: command "ls -l lib/libbarbar.a" failed'
for extra_args in (['--module-only'], ['--module-only', '--rebuild']):
self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, [test_ec] + extra_args,
do_build=True, raise_error=True)
self.assertFalse(os.path.exists(toy_mod))
# failing sanity check for barbar extension is ignored when using --module-only --skip-extensions
for extra_args in (['--module-only'], ['--module-only', '--rebuild']):
self.eb_main([test_ec, '--skip-extensions'] + extra_args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
remove_file(toy_mod)
# we can force module generation via --force (which skips sanity check entirely)
self.eb_main([test_ec, '--module-only', '--force'], do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
def test_backup_modules(self):
"""Test use of backing up of modules with --module-only."""
ec_files_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
ec_file = os.path.join(ec_files_path, 't', 'toy', 'toy-0.0-deps.eb')
toy_mod = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-deps')
toy_mod_dir, toy_mod_fn = os.path.split(toy_mod)
common_args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--robot=%s' % ec_files_path,
'--force',
'--disable-cleanup-tmpdir'
]
args = common_args + ['--module-syntax=Tcl']
# install module once (without --module-only), so it can be backed up
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
# forced reinstall, no backup of module file because --backup-modules (or --module-only) is not used
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), 0)
self.mock_stderr(True)
self.mock_stdout(True)
# note: no need to specificy --backup-modules, enabled automatically under --module-only
self.eb_main(args + ['--module-only'], do_build=True, raise_error=True)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
self.assertTrue(os.path.exists(toy_mod))
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), 1)
first_toy_mod_backup = toy_mod_backups[0]
# check that backup module is hidden (required for Tcl syntax)
self.assertTrue(os.path.basename(first_toy_mod_backup).startswith('.'))
toy_mod_bak = r".*/toy/\.0\.0-deps\.bak_[0-9]+_[0-9]+"
regex = re.compile("^== backup of existing module file stored at %s" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
regex = re.compile("^== comparing module file with backup %s; no differences found$" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
self.assertEqual(stderr, '')
# no backup of existing module file if --disable-backup-modules is used
self.eb_main(args + ['--disable-backup-modules'], do_build=True, raise_error=True)
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), 1)
# inject additional lines in module file to generate diff
write_file(toy_mod, "some difference\n", append=True)
self.mock_stderr(True)
self.mock_stdout(True)
self.eb_main(args + ['--module-only'], do_build=True, raise_error=True, verbose=True)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), 2)
regex = re.compile("^== backup of existing module file stored at %s" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
regex = re.compile("^== comparing module file with backup %s; diff is:$" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
regex = re.compile("^-some difference$", re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
self.assertEqual(stderr, '')
# Test also with Lua syntax if Lmod is available.
# In particular, that the backup is not hidden (except when using Lmod < 7.0)
if isinstance(self.modtool, Lmod):
args = common_args + ['--module-syntax=Lua', '--backup-modules']
remove_dir(toy_mod_dir)
toy_mod = os.path.join(toy_mod_dir, toy_mod_fn + '.lua')
# initial installation of Lua module file
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_mod))
lua_toy_mods = glob.glob(os.path.join(toy_mod_dir, '*.lua*'))
self.assertEqual(len(lua_toy_mods), 1)
self.assertEqual(os.path.basename(toy_mod), os.path.basename(lua_toy_mods[0]))
# no backups yet
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), 0)
hidden_toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(hidden_toy_mod_backups), 0)
# 2nd installation: backup module is created
self.mock_stderr(True)
self.mock_stdout(True)
self.eb_main(args, do_build=True, raise_error=True, verbose=True)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
self.assertTrue(os.path.exists(toy_mod))
lua_toy_mods = glob.glob(os.path.join(toy_mod_dir, '*.lua*'))
self.assertEqual(len(lua_toy_mods), 1)
self.assertEqual(os.path.basename(toy_mod), os.path.basename(lua_toy_mods[0]))
# backup module is only hidden for old Lmod versions
lmod_version = os.getenv('LMOD_VERSION', 'NOT_FOUND')
if LooseVersion(lmod_version) < LooseVersion('7.0.0'):
backups_visible, backups_hidden = 0, 1
toy_mod_bak = r".*/toy/\.0\.0-deps\.bak_[0-9]+_[0-9]+"
else:
backups_visible, backups_hidden = 1, 0
toy_mod_bak = r".*/toy/0\.0-deps\.bak_[0-9]+_[0-9]+"
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), backups_visible)
hidden_toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(hidden_toy_mod_backups), backups_hidden)
first_toy_lua_mod_backup = (toy_mod_backups or hidden_toy_mod_backups)[0]
self.assertTrue('.bak_' in os.path.basename(first_toy_lua_mod_backup))
# check messages in stdout/stderr
regex = re.compile("^== backup of existing module file stored at %s" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
regex = re.compile("^== comparing module file with backup %s; no differences found$" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
self.assertEqual(stderr, '')
# tweak existing module file so we can verify diff of installed module with backup in stdout
write_file(toy_mod, "some difference\n", append=True)
self.mock_stderr(True)
self.mock_stdout(True)
self.eb_main(args, do_build=True, raise_error=True, verbose=True)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
if LooseVersion(lmod_version) < LooseVersion('7.0.0'):
backups_hidden += 1
else:
backups_visible += 1
lua_toy_mods = glob.glob(os.path.join(toy_mod_dir, '*.lua*'))
self.assertEqual(len(lua_toy_mods), 1)
self.assertEqual(os.path.basename(toy_mod), os.path.basename(lua_toy_mods[0]))
toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, toy_mod_fn + '.bak_*'))
self.assertEqual(len(toy_mod_backups), backups_visible)
hidden_toy_mod_backups = glob.glob(os.path.join(toy_mod_dir, '.' + toy_mod_fn + '.bak_*'))
self.assertEqual(len(hidden_toy_mod_backups), backups_hidden)
regex = re.compile("^== backup of existing module file stored at %s" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
regex = re.compile("^== comparing module file with backup %s; diff is:$" % toy_mod_bak, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
regex = re.compile("^-some difference$", re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
self.assertEqual(stderr, '')
def test_package(self):
"""Test use of --package and accompanying package configuration settings."""
mock_fpm(self.test_prefix)
pkgpath = os.path.join(self.test_prefix, 'pkgs')
extra_args = [
'--package',
'--package-release=321',
'--package-tool=fpm',
'--package-type=foo',
'--packagepath=%s' % pkgpath,
]
self.test_toy_build(extra_args=extra_args)
toypkg = os.path.join(pkgpath, 'toy-0.0-eb-%s.321.foo' % EASYBUILD_VERSION)
self.assertTrue(os.path.exists(toypkg), "%s is there" % toypkg)
def test_package_skip(self):
"""Test use of --package with --skip."""
mock_fpm(self.test_prefix)
pkgpath = os.path.join(self.test_prefix, 'packages') # default path
self.test_toy_build(['--packagepath=%s' % pkgpath])
self.assertFalse(os.path.exists(pkgpath), "%s is not created without use of --package" % pkgpath)
self.mock_stdout(True)
self.test_toy_build(extra_args=['--package', '--skip'], verify=False)
self.mock_stdout(False)
toypkg = os.path.join(pkgpath, 'toy-0.0-eb-%s.1.rpm' % EASYBUILD_VERSION)
self.assertTrue(os.path.exists(toypkg), "%s is there" % toypkg)
def test_regtest(self):
"""Test use of --regtest."""
self.test_toy_build(extra_args=['--regtest', '--sequential'], verify=False)
# just check whether module exists
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0')
msg = "module %s found" % toy_module
if get_module_syntax() == 'Lua':
toy_module += '.lua'
self.assertTrue(os.path.exists(toy_module), msg)
def test_minimal_toolchains(self):
"""Test toy build with --minimal-toolchains."""
# this test doesn't check for anything specific to using minimal toolchains, only side-effects
self.test_toy_build(extra_args=['--minimal-toolchains'])
def test_reproducibility(self):
"""Test toy build produces expected reproducibility files"""
# We need hooks for a complete test
hooks_filename = 'my_hooks.py'
hooks_file = os.path.join(self.test_prefix, hooks_filename)
hooks_file_txt = '\n'.join([
"import os",
'',
"def start_hook():",
" print('start hook triggered')",
'',
"def pre_configure_hook(self):",
" print('pre-configure: toy.source: %s' % os.path.exists('toy.source'))",
'',
])
write_file(hooks_file, hooks_file_txt)
# also use the easyblock with inheritance to fully test
self.mock_stdout(True)
self.test_toy_build(extra_args=['--minimal-toolchains', '--easyblock=EB_toytoy', '--hooks=%s' % hooks_file])
self.mock_stdout(False)
# Check whether easyconfig is dumped to reprod/ subdir
reprod_dir = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'easybuild', 'reprod')
reprod_ec = os.path.join(reprod_dir, 'toy-0.0.eb')
self.assertTrue(os.path.exists(reprod_ec))
# Check that the toytoy easyblock is recorded in the reprod easyconfig
ec = EasyConfig(reprod_ec)
self.assertEqual(ec.parser.get_config_dict()['easyblock'], 'EB_toytoy')
# make sure start_dir is not recorded in the dumped easyconfig, this does not appear in the original easyconfig
# and is representative of values that are (typically) set by the easyblock steps (which are also dumped)
self.assertFalse('start_dir' in ec.parser.get_config_dict())
# Check for child easyblock existence
child_easyblock = os.path.join(reprod_dir, 'easyblocks', 'toytoy.py')
self.assertTrue(os.path.exists(child_easyblock))
# Check for parent easyblock existence
parent_easyblock = os.path.join(reprod_dir, 'easyblocks', 'toy.py')
self.assertTrue(os.path.exists(parent_easyblock))
# Make sure framework easyblock modules are not included
for framework_easyblock in ['easyblock.py', 'extensioneasyblock.py']:
path = os.path.join(reprod_dir, 'easyblocks', framework_easyblock)
self.assertFalse(os.path.exists(path))
# Make sure hooks are also copied
reprod_hooks = os.path.join(reprod_dir, 'hooks', hooks_filename)
self.assertTrue(os.path.exists(reprod_hooks))
def test_reproducibility_ext_easyblocks(self):
"""Test toy build produces expected reproducibility files also when extensions are used"""
topdir = os.path.dirname(os.path.abspath(__file__))
toy_ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec_file)
ec1 = os.path.join(self.test_prefix, 'toy1.eb')
ec1_txt = '\n'.join([
toy_ec_txt,
"exts_list = [('barbar', '0.0', {'start_dir': 'src'})]",
"",
])
write_file(ec1, ec1_txt)
self.test_toy_build(ec_file=ec1, verify=False, extra_args=['--minimal-toolchains', '--easyblock=EB_toytoy'])
# Check whether easyconfig is dumped to reprod/ subdir
reprod_dir = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'easybuild', 'reprod')
reprod_ec = os.path.join(reprod_dir, 'toy-0.0.eb')
self.assertTrue(os.path.exists(reprod_ec))
# Check for child easyblock existence
child_easyblock = os.path.join(reprod_dir, 'easyblocks', 'toytoy.py')
self.assertTrue(os.path.exists(child_easyblock))
# Check for parent easyblock existence
parent_easyblock = os.path.join(reprod_dir, 'easyblocks', 'toy.py')
self.assertTrue(os.path.exists(parent_easyblock))
# Check for extension easyblock existence
ext_easyblock = os.path.join(reprod_dir, 'easyblocks', 'toy_extension.py')
self.assertTrue(os.path.exists(ext_easyblock))
# Make sure framework easyblock modules are not included
for framework_easyblock in ['easyblock.py', 'extensioneasyblock.py']:
path = os.path.join(reprod_dir, 'easyblocks', framework_easyblock)
self.assertFalse(os.path.exists(path))
def test_toy_toy(self):
"""Test building two easyconfigs in a single go, with one depending on the other."""
topdir = os.path.dirname(os.path.abspath(__file__))
toy_ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec_file)
ec1 = os.path.join(self.test_prefix, 'toy1.eb')
ec1_txt = '\n'.join([
toy_ec_txt,
"versionsuffix = '-one'",
])
write_file(ec1, ec1_txt)
ec2 = os.path.join(self.test_prefix, 'toy2.eb')
ec2_txt = '\n'.join([
toy_ec_txt,
"versionsuffix = '-two'",
"dependencies = [('toy', '0.0', '-one')]",
])
write_file(ec2, ec2_txt)
self.test_toy_build(ec_file=self.test_prefix, verify=False)
mod1 = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-one')
mod2 = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-two')
self.assertTrue(os.path.exists(mod1) or os.path.exists('%s.lua' % mod1))
self.assertTrue(os.path.exists(mod2) or os.path.exists('%s.lua' % mod2))
if os.path.exists(mod2):
mod2_txt = read_file(mod2)
else:
mod2_txt = read_file('%s.lua' % mod2)
load1_regex = re.compile('load.*toy/0.0-one', re.M)
self.assertTrue(load1_regex.search(mod2_txt), "Pattern '%s' found in: %s" % (load1_regex.pattern, mod2_txt))
def test_toy_sanity_check_commands(self):
"""Test toy build with extra sanity check commands."""
self.setup_hierarchical_modules()
test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec_txt = read_file(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))
toy_ec_txt = '\n'.join([
toy_ec_txt,
"toolchain = {'name': 'foss', 'version': '2018a'}",
# specially construct (sort of senseless) sanity check commands,
# that will fail if the corresponding modules are not loaded
# cfr. https://github.com/easybuilders/easybuild-framework/pull/1754
"sanity_check_commands = [",
" 'env | grep EBROOTFFTW',",
" 'env | grep EBROOTGCC',",
# tuple format (kinda weird but kept in place for backward compatibility)
" ('env | grep EBROOTFOSS', ''),",
# True implies running 'toy -h', should work (although pretty senseless in this case)
" True,",
# test command to make sure that '-h' is not passed to commands specified as string ('env -h' fails)
" 'env',"
"]",
])
tweaked_toy_ec = os.path.join(self.test_prefix, 'toy-0.0-tweaked.eb')
write_file(tweaked_toy_ec, toy_ec_txt)
args = [
tweaked_toy_ec,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % test_easyconfigs,
'--module-naming-scheme=HierarchicalMNS',
]
self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
modpath = os.path.join(self.test_installpath, 'modules', 'all')
toy_modfile = os.path.join(modpath, 'MPI', 'GCC', '6.4.0-2.28', 'OpenMPI', '2.1.2', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_modfile += '.lua'
self.assertTrue(os.path.exists(toy_modfile))
def test_sanity_check_paths_lib64(self):
"""Test whether fallback in sanity check for lib64/ equivalents of library files works."""
test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
ec_file = os.path.join(test_ecs_dir, 'test_ecs', 't', 'toy', 'toy-0.0.eb')
ectxt = read_file(ec_file)
# modify test easyconfig: move lib/libtoy.a to lib64/libtoy.a
ectxt = re.sub(r"\s*'files'.*", "'files': ['bin/toy', ('lib/libtoy.a', 'lib/libfoo.a')],", ectxt)
postinstallcmd = "mkdir %(installdir)s/lib64 && mv %(installdir)s/lib/libtoy.a %(installdir)s/lib64/libtoy.a"
ectxt = re.sub("postinstallcmds.*", "postinstallcmds = ['%s']" % postinstallcmd, ectxt)
test_ec = os.path.join(self.test_prefix, 'toy-0.0.eb')
write_file(test_ec, ectxt)
# sanity check fails if lib64 fallback in sanity check is disabled
error_pattern = r"Sanity check failed: no file found at 'lib/libtoy.a' or 'lib/libfoo.a' in "
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
extra_args=['--disable-lib64-fallback-sanity-check', '--disable-lib64-lib-symlink'],
raise_error=True, verbose=False)
# all is fine is lib64 fallback check is enabled (which it is by default)
self.test_toy_build(ec_file=test_ec, raise_error=True)
# also check with 'lib' in sanity check dirs (special case)
ectxt = re.sub(r"\s*'files'.*", "'files': ['bin/toy'],", ectxt)
ectxt = re.sub(r"\s*'dirs'.*", "'dirs': ['lib'],", ectxt)
write_file(test_ec, ectxt)
error_pattern = r"Sanity check failed: no \(non-empty\) directory found at 'lib' in "
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
extra_args=['--disable-lib64-fallback-sanity-check', '--disable-lib64-lib-symlink'],
raise_error=True, verbose=False)
self.test_toy_build(ec_file=test_ec, extra_args=['--disable-lib64-lib-symlink'], raise_error=True)
# also check other way around (lib64 -> lib)
ectxt = read_file(ec_file)
ectxt = re.sub(r"\s*'files'.*", "'files': ['bin/toy', 'lib64/libtoy.a'],", ectxt)
write_file(test_ec, ectxt)
# sanity check fails if lib64 fallback in sanity check is disabled, since lib64/libtoy.a is not there
error_pattern = r"Sanity check failed: no file found at 'lib64/libtoy.a' in "
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
extra_args=['--disable-lib64-fallback-sanity-check', '--disable-lib64-lib-symlink'],
raise_error=True, verbose=False)
# sanity check passes when lib64 fallback is enabled (by default), since lib/libtoy.a is also considered
self.test_toy_build(ec_file=test_ec, extra_args=['--disable-lib64-lib-symlink'], raise_error=True)
# also check with 'lib64' in sanity check dirs (special case)
ectxt = re.sub(r"\s*'files'.*", "'files': ['bin/toy'],", ectxt)
ectxt = re.sub(r"\s*'dirs'.*", "'dirs': ['lib64'],", ectxt)
write_file(test_ec, ectxt)
error_pattern = r"Sanity check failed: no \(non-empty\) directory found at 'lib64' in "
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
extra_args=['--disable-lib64-fallback-sanity-check', '--disable-lib64-lib-symlink'],
raise_error=True, verbose=False)
self.test_toy_build(ec_file=test_ec, extra_args=['--disable-lib64-lib-symlink'], raise_error=True)
# check whether fallback works for files that's more than 1 subdir deep
ectxt = read_file(ec_file)
ectxt = re.sub(r"\s*'files'.*", "'files': ['bin/toy', 'lib/test/libtoy.a'],", ectxt)
postinstallcmd = "mkdir -p %(installdir)s/lib64/test && "
postinstallcmd += "mv %(installdir)s/lib/libtoy.a %(installdir)s/lib64/test/libtoy.a"
ectxt = re.sub("postinstallcmds.*", "postinstallcmds = ['%s']" % postinstallcmd, ectxt)
write_file(test_ec, ectxt)
self.test_toy_build(ec_file=test_ec, extra_args=['--disable-lib64-lib-symlink'], raise_error=True)
def test_toy_build_enhanced_sanity_check(self):
"""Test enhancing of sanity check."""
# if toy easyblock was imported, get rid of corresponding entry in sys.modules,
# to avoid that it messes up the use of --include-easyblocks=toy.py below...
if 'easybuild.easyblocks.toy' in sys.modules:
del sys.modules['easybuild.easyblocks.toy']
test_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)))
toy_ec = os.path.join(test_dir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec)
test_ec = os.path.join(self.test_prefix, 'test.eb')
# get rid of custom sanity check paths in test easyconfig
regex = re.compile(r'^sanity_check_paths\s*=\s*{[^}]+}', re.M)
test_ec_txt = regex.sub('', toy_ec_txt)
write_file(test_ec, test_ec_txt)
self.assertFalse('sanity_check_' in test_ec_txt)
# create custom easyblock for toy that has a custom sanity_check_step
toy_easyblock = os.path.join(test_dir, 'sandbox', 'easybuild', 'easyblocks', 't', 'toy.py')
toy_easyblock_txt = read_file(toy_easyblock)
toy_custom_sanity_check_step = '\n'.join([
'',
" def sanity_check_step(self):",
" paths = {",
" 'files': ['bin/toy'],",
" 'dirs': [],",
" }",
" cmds = ['toy']",
" return super(EB_toy, self).sanity_check_step(custom_paths=paths, custom_commands=cmds)",
])
test_toy_easyblock = os.path.join(self.test_prefix, 'toy.py')
write_file(test_toy_easyblock, toy_easyblock_txt + toy_custom_sanity_check_step)
eb_args = [
'--extended-dry-run',
'--include-easyblocks=%s' % test_toy_easyblock,
]
# by default, sanity check commands & paths specified by easyblock are used
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True)
stdout = self.get_stdout()
self.mock_stdout(False)
pattern_lines = [
r"Sanity check paths - file.*",
r"\s*\* bin/toy",
r"Sanity check paths - \(non-empty\) directory.*",
r"\s*\(none\)",
r"Sanity check commands",
r"\s*\* toy",
r'',
]
regex = re.compile(r'\n'.join(pattern_lines), re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout))
# we need to manually wipe the entry for the included toy easyblock,
# to avoid trouble with subsequent EasyBuild sessions in this test
del sys.modules['easybuild.easyblocks.toy']
# easyconfig specifies custom sanity_check_paths & sanity_check_commands,
# the ones defined by the easyblock are skipped by default
test_ec_txt = test_ec_txt + '\n'.join([
'',
"sanity_check_paths = {",
" 'files': ['README'],",
" 'dirs': ['bin/']",
"}",
"sanity_check_commands = ['ls %(installdir)s']",
])
write_file(test_ec, test_ec_txt)
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True)
stdout = self.get_stdout()
self.mock_stdout(False)
pattern_lines = [
r"Sanity check paths - file.*",
r"\s*\* README",
r"Sanity check paths - \(non-empty\) directory.*",
r"\s*\* bin/",
r"Sanity check commands",
r"\s*\* ls .*/software/toy/0.0",
r'',
]
regex = re.compile(r'\n'.join(pattern_lines), re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout))
del sys.modules['easybuild.easyblocks.toy']
# if enhance_sanity_check is enabled, then sanity check paths/commands specified in easyconfigs
# are used in addition to those defined in easyblock
test_ec_txt = test_ec_txt + '\nenhance_sanity_check = True'
write_file(test_ec, test_ec_txt)
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True)
stdout = self.get_stdout()
self.mock_stdout(False)
# now 'bin/toy' file and 'toy' command should also be part of sanity check
pattern_lines = [
r"Sanity check paths - file.*",
r"\s*\* README",
r"\s*\* bin/toy",
r"Sanity check paths - \(non-empty\) directory.*",
r"\s*\* bin/",
r"Sanity check commands",
r"\s*\* ls .*/software/toy/0.0",
r"\s*\* toy",
r'',
]
regex = re.compile(r'\n'.join(pattern_lines), re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout))
del sys.modules['easybuild.easyblocks.toy']
# sanity_check_paths with only one key is allowed if enhance_sanity_check is enabled;
test_ec_txt = test_ec_txt + "\nsanity_check_paths = {'files': ['README']}"
write_file(test_ec, test_ec_txt)
# we need to do a non-dry run here, to ensure the code we want to test is triggered
# (EasyConfig.dump called by 'reproduce_build' function from 'build_and_install_one')
eb_args = [
'--include-easyblocks=%s' % test_toy_easyblock,
'--trace',
]
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True)
stdout = self.get_stdout()
self.mock_stdout(False)
pattern_lines = [
r"^== sanity checking\.\.\.",
r" >> file 'bin/toy' found: OK",
]
regex = re.compile(r'\n'.join(pattern_lines), re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout))
# no directories are checked in sanity check now, only files (since dirs is an empty list)
regex = re.compile(r"directory .* found:", re.M)
self.assertFalse(regex.search(stdout), "Pattern '%s' should be not found in: %s" % (regex.pattern, stdout))
del sys.modules['easybuild.easyblocks.toy']
# if enhance_sanity_check is disabled, both files/dirs keys are strictly required in sanity_check_paths
test_ec_txt = test_ec_txt + '\nenhance_sanity_check = False'
write_file(test_ec, test_ec_txt)
error_pattern = r" Missing mandatory key 'dirs' in sanity_check_paths."
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec,
extra_args=eb_args, raise_error=True, verbose=False)
del sys.modules['easybuild.easyblocks.toy']
def test_toy_dumped_easyconfig(self):
""" Test dumping of file in eb_filerepo in both .eb and .yeb format """
filename = 'toy-0.0'
test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
paths = [
os.path.join(test_ecs_dir, 'test_ecs', 't', 'toy', '%s.eb' % filename),
os.path.join(test_ecs_dir, 'yeb', '%s.yeb' % filename),
]
for path in paths:
if path.endswith('.yeb') and 'yaml' not in sys.modules:
print("Skipping .yeb part of test_toy_dumped_easyconfig (no PyYAML available)")
continue
args = [
path,
'--experimental',
'--force',
]
self.eb_main(args, do_build=True)
# test eb build with dumped file
args[0] = os.path.join(get_repositorypath()[0], 'toy', 'toy-0.0%s' % os.path.splitext(path)[-1])
self.eb_main(args, do_build=True)
easybuild.tools.build_log.EXPERIMENTAL = True
ec = EasyConfig(args[0])
buildstats = ec.parser.get_config_dict()['buildstats']
self.assertTrue(all(isinstance(bs, dict) for bs in buildstats))
def test_toy_filter_env_vars(self):
"""Test use of --filter-env-vars on generated module file"""
toy_mod_path = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_mod_path += '.lua'
regexs = [
re.compile("prepend[-_]path.*LD_LIBRARY_PATH.*lib", re.M),
re.compile("prepend[-_]path.*LIBRARY_PATH.*lib", re.M),
re.compile("prepend[-_]path.*PATH.*bin", re.M),
]
self.test_toy_build()
toy_mod_txt = read_file(toy_mod_path)
for regex in regexs:
self.assertTrue(regex.search(toy_mod_txt), "Pattern '%s' found in: %s" % (regex.pattern, toy_mod_txt))
self.test_toy_build(extra_args=['--filter-env-vars=LD_LIBRARY_PATH,PATH'])
toy_mod_txt = read_file(toy_mod_path)
self.assertFalse(regexs[0].search(toy_mod_txt), "Pattern '%s' found in: %s" % (regexs[0].pattern, toy_mod_txt))
self.assertTrue(regexs[1].search(toy_mod_txt), "Pattern '%s' found in: %s" % (regexs[1].pattern, toy_mod_txt))
self.assertFalse(regexs[2].search(toy_mod_txt), "Pattern '%s' found in: %s" % (regexs[2].pattern, toy_mod_txt))
def test_toy_iter(self):
"""Test toy build that involves iterating over buildopts."""
topdir = os.path.abspath(os.path.dirname(__file__))
toy_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0-iter.eb')
expected_buildopts = ['', '-O2; mv %(name)s toy_O2_$EBVERSIONGCC', '-O1; mv %(name)s toy_O1_$EBVERSIONGCC']
for extra_args in [None, ['--minimal-toolchains']]:
# sanity check will make sure all entries in buildopts list were taken into account
self.test_toy_build(ec_file=toy_ec, extra_args=extra_args, versionsuffix='-iter')
# verify whether dumped easyconfig contains original value for buildopts
dumped_toy_ec = os.path.join(self.test_prefix, 'ebfiles_repo', 'toy', os.path.basename(toy_ec))
ec = EasyConfigParser(dumped_toy_ec).get_config_dict()
self.assertEqual(ec['buildopts'], expected_buildopts)
def test_toy_rpath(self):
"""Test toy build using --rpath."""
# find_eb_script function used to find rpath_args.py requires that location where easybuild/scripts
# resides is listed in sys.path via absolute path;
# this is only needed to make this test pass when it's being called from that same location...
top_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, top_path)
def grab_gcc_rpath_wrapper_args():
"""Helper function to grab arguments from last RPATH wrapper for 'gcc'."""
rpath_wrappers_dir = glob.glob(os.path.join(os.getenv('TMPDIR'), '*', '*', 'rpath_wrappers'))[0]
gcc_rpath_wrapper_txt = read_file(glob.glob(os.path.join(rpath_wrappers_dir, '*', 'gcc'))[0])
# First get the filter argument
rpath_args_regex = re.compile(r"^rpath_args_out=.*rpath_args.py \$CMD '([^ ]*)'.*", re.M)
res_filter = rpath_args_regex.search(gcc_rpath_wrapper_txt)
self.assertTrue(res_filter, "Pattern '%s' found in: %s" % (rpath_args_regex.pattern, gcc_rpath_wrapper_txt))
# Now get the include argument
rpath_args_regex = re.compile(r"^rpath_args_out=.*rpath_args.py \$CMD '.*' '([^ ]*)'.*", re.M)
res_include = rpath_args_regex.search(gcc_rpath_wrapper_txt)
self.assertTrue(res_include, "Pattern '%s' found in: %s" % (rpath_args_regex.pattern,
gcc_rpath_wrapper_txt))
shutil.rmtree(rpath_wrappers_dir)
return {'filter_paths': res_filter.group(1), 'include_paths': res_include.group(1)}
args = ['--rpath', '--experimental']
self.test_toy_build(extra_args=args, raise_error=True)
# by default, /lib and /usr are included in RPATH filter,
# together with temporary directory and build directory
rpath_filter_paths = grab_gcc_rpath_wrapper_args()['filter_paths'].split(',')
self.assertTrue('/lib.*' in rpath_filter_paths)
self.assertTrue('/usr.*' in rpath_filter_paths)
self.assertTrue(any(p.startswith(os.getenv('TMPDIR')) for p in rpath_filter_paths))
self.assertTrue(any(p.startswith(self.test_buildpath) for p in rpath_filter_paths))
# Check that we can use --rpath-override-dirs
args = ['--rpath', '--experimental', '--rpath-override-dirs=/opt/eessi/2021.03/lib:/opt/eessi/lib']
self.test_toy_build(extra_args=args, raise_error=True)
rpath_include_paths = grab_gcc_rpath_wrapper_args()['include_paths'].split(',')
# Make sure our directories appear in dirs to be included in the rpath (and in the right order)
self.assertEqual(rpath_include_paths[0], '/opt/eessi/2021.03/lib')
self.assertEqual(rpath_include_paths[1], '/opt/eessi/lib')
# Check that when we use --rpath-override-dirs empty values are filtered
args = ['--rpath', '--experimental', '--rpath-override-dirs=/opt/eessi/2021.03/lib::/opt/eessi/lib']
self.test_toy_build(extra_args=args, raise_error=True)
rpath_include_paths = grab_gcc_rpath_wrapper_args()['include_paths'].split(',')
# Make sure our directories appear in dirs to be included in the rpath (and in the right order)
self.assertEqual(rpath_include_paths[0], '/opt/eessi/2021.03/lib')
self.assertEqual(rpath_include_paths[1], '/opt/eessi/lib')
# Check that when we use --rpath-override-dirs we can only provide absolute paths
eb_args = ['--rpath', '--experimental', '--rpath-override-dirs=/opt/eessi/2021.03/lib:eessi/lib']
error_pattern = r"Path used in rpath_override_dirs is not an absolute path: eessi/lib"
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, extra_args=eb_args, raise_error=True,
verbose=False)
# also test use of --rpath-filter
args.extend(['--rpath-filter=/test.*,/foo/bar.*', '--disable-cleanup-tmpdir'])
self.test_toy_build(extra_args=args, raise_error=True)
# check whether rpath filter was set correctly
rpath_filter_paths = grab_gcc_rpath_wrapper_args()['filter_paths'].split(',')
self.assertTrue('/test.*' in rpath_filter_paths)
self.assertTrue('/foo/bar.*' in rpath_filter_paths)
self.assertTrue(any(p.startswith(os.getenv('TMPDIR')) for p in rpath_filter_paths))
self.assertTrue(any(p.startswith(self.test_buildpath) for p in rpath_filter_paths))
# test use of rpath toolchain option
test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec_txt = read_file(os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb'))
toy_ec_txt += "\ntoolchainopts = {'rpath': False}\n"
toy_ec = os.path.join(self.test_prefix, 'toy.eb')
write_file(toy_ec, toy_ec_txt)
self.test_toy_build(ec_file=toy_ec, extra_args=['--rpath', '--experimental'], raise_error=True)
def test_toy_modaltsoftname(self):
"""Build two dependent toys as in test_toy_toy but using modaltsoftname"""
topdir = os.path.dirname(os.path.abspath(__file__))
toy_ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec_file)
self.assertFalse(re.search('^modaltsoftname', toy_ec_txt, re.M))
ec1 = os.path.join(self.test_prefix, 'toy-0.0-one.eb')
ec1_txt = '\n'.join([
toy_ec_txt,
"versionsuffix = '-one'",
"modaltsoftname = 'yot'"
])
write_file(ec1, ec1_txt)
ec2 = os.path.join(self.test_prefix, 'toy-0.0-two.eb')
ec2_txt = '\n'.join([
toy_ec_txt,
"versionsuffix = '-two'",
"dependencies = [('toy', '0.0', '-one')]",
"modaltsoftname = 'toytwo'",
])
write_file(ec2, ec2_txt)
extra_args = [
'--module-naming-scheme=HierarchicalMNS',
'--robot-paths=%s' % self.test_prefix,
]
self.test_toy_build(ec_file=self.test_prefix, verify=False, extra_args=extra_args, raise_error=True)
software_path = os.path.join(self.test_installpath, 'software')
modules_path = os.path.join(self.test_installpath, 'modules', 'all', 'Core')
# install dirs for both installations should be there (using original software name)
self.assertTrue(os.path.exists(os.path.join(software_path, 'toy', '0.0-one', 'bin', 'toy')))
self.assertTrue(os.path.exists(os.path.join(software_path, 'toy', '0.0-two', 'bin', 'toy')))
toytwo_name = '0.0-two'
yot_name = '0.0-one'
if get_module_syntax() == 'Lua':
toytwo_name += '.lua'
yot_name += '.lua'
# modules for both installations with alternative name should be there
self.assertTrue(os.path.exists(os.path.join(modules_path, 'toytwo', toytwo_name)))
self.assertTrue(os.path.exists(os.path.join(modules_path, 'yot', yot_name)))
# only subdirectories for software should be created
self.assertEqual(sorted(os.listdir(software_path)), sorted(['toy', '.locks']))
self.assertEqual(sorted(os.listdir(os.path.join(software_path, 'toy'))), ['0.0-one', '0.0-two'])
# only subdirectories for modules with alternative names should be created
self.assertEqual(sorted(os.listdir(modules_path)), ['toytwo', 'yot'])
self.assertEqual(os.listdir(os.path.join(modules_path, 'toytwo')), [toytwo_name])
self.assertEqual(os.listdir(os.path.join(modules_path, 'yot')), [yot_name])
def test_toy_build_trace(self):
"""Test use of --trace"""
topdir = os.path.dirname(os.path.abspath(__file__))
toy_ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, read_file(toy_ec_file) + '\nsanity_check_commands = ["toy"]')
self.mock_stderr(True)
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=['--trace', '--experimental'], verify=False, testing=False)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
self.assertEqual(stderr, '')
patterns = [
r"^ >> installation prefix: .*/software/toy/0\.0$",
r"^== fetching files\.\.\.\n >> sources:\n >> .*/toy-0\.0\.tar\.gz \[SHA256: 44332000.*\]$",
r"^ >> applying patch toy-0\.0_fix-silly-typo-in-printf-statement\.patch$",
r'\n'.join([
r"^ >> running command:",
r"\t\[started at: .*\]",
r"\t\[working dir: .*\]",
r"\t\[output logged in .*\]",
r"\tgcc toy.c -o toy\n"
r'',
]),
r" >> command completed: exit 0, ran in .*",
r'^' + r'\n'.join([
r"== sanity checking\.\.\.",
r" >> file 'bin/yot' or 'bin/toy' found: OK",
r" >> \(non-empty\) directory 'bin' found: OK",
r" >> loading modules: toy/0.0\.\.\.",
r" >> running command 'toy' \.\.\.",
r" >> result for command 'toy': OK",
]) + r'$',
r"^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$",
]
for pattern in patterns:
regex = re.compile(pattern, re.M)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
def test_toy_build_hooks(self):
"""Test use of --hooks."""
hooks_file = os.path.join(self.test_prefix, 'my_hooks.py')
hooks_file_txt = textwrap.dedent("""
import os
def start_hook():
print('start hook triggered')
def parse_hook(ec):
print('%s %s' % (ec.name, ec.version))
# print sources value to check that raw untemplated strings are exposed in parse_hook
print(ec['sources'])
# try appending to postinstallcmd to see whether the modification is actually picked up
# (required templating to be disabled before parse_hook is called)
ec['postinstallcmds'].append('echo toy')
print(ec['postinstallcmds'][-1])
def pre_configure_hook(self):
print('pre-configure: toy.source: %s' % os.path.exists('toy.source'))
def post_configure_hook(self):
print('post-configure: toy.source: %s' % os.path.exists('toy.source'))
def post_install_hook(self):
print('in post-install hook for %s v%s' % (self.name, self.version))
print(', '.join(sorted(os.listdir(self.installdir))))
def module_write_hook(self, module_path, module_txt):
print('in module-write hook hook for %s' % os.path.basename(module_path))
return module_txt.replace('Toy C program, 100% toy.', 'Not a toy anymore')
def end_hook():
print('end hook triggered, all done!')
""")
write_file(hooks_file, hooks_file_txt)
self.mock_stderr(True)
self.mock_stdout(True)
self.test_toy_build(extra_args=['--hooks=%s' % hooks_file], raise_error=True)
stderr = self.get_stderr()
stdout = self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
test_mod_path = os.path.join(self.test_installpath, 'modules', 'all')
toy_mod_file = os.path.join(test_mod_path, 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_mod_file += '.lua'
self.assertEqual(stderr, '')
# There are 4 modules written:
# Sanitycheck for extensions and main easyblock (1 each), main and devel module
expected_output = textwrap.dedent("""
== Running start hook...
start hook triggered
== Running parse hook for toy-0.0.eb...
toy 0.0
['%(name)s-%(version)s.tar.gz']
echo toy
== Running pre-configure hook...
pre-configure: toy.source: True
== Running post-configure hook...
post-configure: toy.source: False
== Running post-install hook...
in post-install hook for toy v0.0
bin, lib
== Running module_write hook...
in module-write hook hook for {mod_name}
== Running module_write hook...
in module-write hook hook for {mod_name}
== Running module_write hook...
in module-write hook hook for {mod_name}
== Running module_write hook...
in module-write hook hook for {mod_name}
== Running end hook...
end hook triggered, all done!
""").strip().format(mod_name=os.path.basename(toy_mod_file))
self.assertEqual(stdout.strip(), expected_output)
toy_mod = read_file(toy_mod_file)
self.assertIn('Not a toy anymore', toy_mod)
def test_toy_multi_deps(self):
"""Test installation of toy easyconfig that uses multi_deps."""
test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec = os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb')
test_ec_txt = read_file(toy_ec)
test_ec = os.path.join(self.test_prefix, 'test.eb')
# also inject (minimal) list of extensions to test iterative installation of extensions
test_ec_txt += "\nexts_list = [('barbar', '0.0', {'start_dir': 'src'})]"
test_ec_txt += "\nmulti_deps = {'GCC': ['4.6.3', '7.3.0-2.30']}"
write_file(test_ec, test_ec_txt)
test_mod_path = os.path.join(self.test_installpath, 'modules', 'all')
# create empty modules for both GCC versions
# (in Tcl syntax, because we're lazy since that works for all supported module tools)
gcc463_modfile = os.path.join(test_mod_path, 'GCC', '4.6.3')
write_file(gcc463_modfile, ModuleGeneratorTcl.MODULE_SHEBANG)
write_file(os.path.join(test_mod_path, 'GCC', '7.3.0-2.30'), ModuleGeneratorTcl.MODULE_SHEBANG)
self.modtool.use(test_mod_path)
# instruct Lmod to disallow auto-swapping of already loaded module with same name as module being loaded
# to make situation where GCC/7.3.0-2.30 is loaded when GCC/4.6.3 is already loaded (by default) fail
os.environ['LMOD_DISABLE_SAME_NAME_AUTOSWAP'] = 'yes'
self.test_toy_build(ec_file=test_ec)
toy_mod_file = os.path.join(test_mod_path, 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_mod_file += '.lua'
toy_mod_txt = read_file(toy_mod_file)
# check whether (guarded) load statement for first version listed in multi_deps is there
if get_module_syntax() == 'Lua':
expected = '\n'.join([
'if not ( isloaded("GCC/4.6.3") ) and not ( isloaded("GCC/7.3.0-2.30") ) then',
' load("GCC/4.6.3")',
'end',
])
else:
expected = '\n'.join([
'if { ![ is-loaded GCC/4.6.3 ] && ![ is-loaded GCC/7.3.0-2.30 ] } {',
' module load GCC/4.6.3',
'}',
])
self.assertTrue(expected in toy_mod_txt, "Pattern '%s' should be found in: %s" % (expected, toy_mod_txt))
# also check relevant parts of "module help" and whatis bits
expected_descr = '\n'.join([
"Compatible modules",
"==================",
"This module is compatible with the following modules, one of each line is required:",
"* GCC/4.6.3 (default), GCC/7.3.0-2.30",
])
error_msg_descr = "Pattern '%s' should be found in: %s" % (expected_descr, toy_mod_txt)
self.assertTrue(expected_descr in toy_mod_txt, error_msg_descr)
if get_module_syntax() == 'Lua':
expected_whatis = "whatis([==[Compatible modules: GCC/4.6.3 (default), GCC/7.3.0-2.30]==])"
else:
expected_whatis = "module-whatis {Compatible modules: GCC/4.6.3 (default), GCC/7.3.0-2.30}"
error_msg_whatis = "Pattern '%s' should be found in: %s" % (expected_whatis, toy_mod_txt)
self.assertTrue(expected_whatis in toy_mod_txt, error_msg_whatis)
def check_toy_load(depends_on=False):
# by default, toy/0.0 should load GCC/4.6.3 (first listed GCC version in multi_deps)
self.modtool.load(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertTrue('toy/0.0' in loaded_mod_names)
self.assertTrue('GCC/4.6.3' in loaded_mod_names)
self.assertFalse('GCC/7.3.0-2.30' in loaded_mod_names)
if depends_on:
# check behaviour when unloading toy (should also unload GCC/4.6.3)
self.modtool.unload(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertFalse('toy/0.0' in loaded_mod_names)
self.assertFalse('GCC/4.6.3' in loaded_mod_names)
else:
# just undo (don't use 'purge', make cause problems in test environment), to prepare for next test
self.modtool.unload(['toy/0.0', 'GCC/4.6.3'])
# if GCC/7.3.0-2.30 is loaded first, then GCC/4.6.3 is not loaded by loading toy/0.0
self.modtool.load(['GCC/7.3.0-2.30'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertTrue('GCC/7.3.0-2.30' in loaded_mod_names)
self.modtool.load(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertTrue('toy/0.0' in loaded_mod_names)
self.assertTrue('GCC/7.3.0-2.30' in loaded_mod_names)
self.assertFalse('GCC/4.6.3' in loaded_mod_names)
if depends_on:
# check behaviour when unloading toy (should *not* unload GCC/7.3.0-2.30)
self.modtool.unload(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertFalse('toy/0.0' in loaded_mod_names)
self.assertTrue('GCC/7.3.0-2.30' in loaded_mod_names)
else:
# just undo
self.modtool.unload(['toy/0.0', 'GCC/7.3.0-2.30'])
# having GCC/4.6.3 loaded already is also fine
self.modtool.load(['GCC/4.6.3'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertTrue('GCC/4.6.3' in loaded_mod_names)
self.modtool.load(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertTrue('toy/0.0' in loaded_mod_names)
self.assertTrue('GCC/4.6.3' in loaded_mod_names)
self.assertFalse('GCC/7.3.0-2.30' in loaded_mod_names)
if depends_on:
# check behaviour when unloading toy (should *not* unload GCC/4.6.3)
self.modtool.unload(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertFalse('toy/0.0' in loaded_mod_names)
self.assertTrue('GCC/4.6.3' in loaded_mod_names)
else:
# just undo
self.modtool.unload(['toy/0.0', 'GCC/4.6.3'])
check_toy_load()
# this behaviour can be disabled via "multi_dep_load_defaults = False"
write_file(test_ec, test_ec_txt + "\nmulti_deps_load_default = False")
remove_file(toy_mod_file)
self.test_toy_build(ec_file=test_ec)
toy_mod_txt = read_file(toy_mod_file)
self.assertFalse(expected in toy_mod_txt, "Pattern '%s' should not be found in: %s" % (expected, toy_mod_txt))
self.modtool.load(['toy/0.0'])
loaded_mod_names = [x['mod_name'] for x in self.modtool.list()]
self.assertTrue('toy/0.0' in loaded_mod_names)
self.assertFalse('GCC/4.6.3' in loaded_mod_names)
self.assertFalse('GCC/7.3.0-2.30' in loaded_mod_names)
# also check relevant parts of "module help" and whatis bits (no '(default)' here!)
expected_descr_no_default = '\n'.join([
"Compatible modules",
"==================",
"This module is compatible with the following modules, one of each line is required:",
"* GCC/4.6.3, GCC/7.3.0-2.30",
])
error_msg_descr = "Pattern '%s' should be found in: %s" % (expected_descr_no_default, toy_mod_txt)
self.assertTrue(expected_descr_no_default in toy_mod_txt, error_msg_descr)
if get_module_syntax() == 'Lua':
expected_whatis_no_default = "whatis([==[Compatible modules: GCC/4.6.3, GCC/7.3.0-2.30]==])"
else:
expected_whatis_no_default = "module-whatis {Compatible modules: GCC/4.6.3, GCC/7.3.0-2.30}"
error_msg_whatis = "Pattern '%s' should be found in: %s" % (expected_whatis_no_default, toy_mod_txt)
self.assertTrue(expected_whatis_no_default in toy_mod_txt, error_msg_whatis)
# restore original environment to continue testing with a clean slate
modify_env(os.environ, self.orig_environ, verbose=False)
self.modtool.use(test_mod_path)
write_file(test_ec, test_ec_txt)
# also check behaviour when using 'depends_on' rather than 'load' statements (requires Lmod 7.6.1 or newer)
if self.modtool.supports_depends_on:
remove_file(toy_mod_file)
self.test_toy_build(ec_file=test_ec, extra_args=['--module-depends-on'])
toy_mod_txt = read_file(toy_mod_file)
# check whether (guarded) load statement for first version listed in multi_deps is there
if get_module_syntax() == 'Lua':
expected = '\n'.join([
'if mode() == "unload" or isloaded("GCC/7.3.0-2.30") then',
' depends_on("GCC")',
'else',
' depends_on("GCC/4.6.3")',
'end',
])
else:
expected = '\n'.join([
'if { [ module-info mode remove ] || [ is-loaded GCC/7.3.0-2.30 ] } {',
' depends-on GCC',
'} else {',
' depends-on GCC/4.6.3',
'}',
])
self.assertTrue(expected in toy_mod_txt, "Pattern '%s' should be found in: %s" % (expected, toy_mod_txt))
error_msg_descr = "Pattern '%s' should be found in: %s" % (expected_descr, toy_mod_txt)
self.assertTrue(expected_descr in toy_mod_txt, error_msg_descr)
error_msg_whatis = "Pattern '%s' should be found in: %s" % (expected_whatis, toy_mod_txt)
self.assertTrue(expected_whatis in toy_mod_txt, error_msg_whatis)
check_toy_load(depends_on=True)
def test_fix_shebang(self):
"""Test use of fix_python_shebang_for & co."""
test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec_txt = read_file(os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb'))
test_ec = os.path.join(self.test_prefix, 'test.eb')
test_ec_txt = '\n'.join([
toy_ec_txt,
"postinstallcmds = ["
# copy of bin/toy to use in fix_python_shebang_for and fix_perl_shebang_for
" 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.python',",
" 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.perl',",
# hardcoded path to bin/python
" 'echo \"#!/usr/bin/python\\n# test\" > %(installdir)s/bin/t1.py',",
# hardcoded path to bin/python3.6
" 'echo \"#!/software/Python/3.6.6-foss-2018b/bin/python3.6\\n# test\" > %(installdir)s/bin/t2.py',",
# already OK, should remain the same
" 'echo \"#!/usr/bin/env python\\n# test\" > %(installdir)s/bin/t3.py',",
# space after #! + 'env python3'
" 'echo \"#! /usr/bin/env python3\\n# test\" > %(installdir)s/bin/t4.py',",
# 'env python3.6'
" 'echo \"#!/usr/bin/env python3.6\\n# test\" > %(installdir)s/bin/t5.py',",
# shebang with space, should strip the space
" 'echo \"#! /usr/bin/env python\\n# test\" > %(installdir)s/bin/t6.py',",
# no shebang python
" 'echo \"# test\" > %(installdir)s/bin/t7.py',",
# shebang bash
" 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b1.sh',",
# tests for perl shebang
# hardcoded path to bin/perl
" 'echo \"#!/usr/bin/perl\\n# test\" > %(installdir)s/bin/t1.pl',",
# hardcoded path to bin/perl5
" 'echo \"#!/software/Perl/5.28.1-GCCcore-7.3.0/bin/perl5\\n# test\" > %(installdir)s/bin/t2.pl',",
# already OK, should remain the same
" 'echo \"#!/usr/bin/env perl\\n# test\" > %(installdir)s/bin/t3.pl',",
# hardcoded perl with extra arguments
" 'echo \"#!/usr/bin/perl -w\\n# test\" > %(installdir)s/bin/t4.pl',",
# space after #! + 'env perl5'
" 'echo \"#!/usr/bin/env perl5\\n# test\" > %(installdir)s/bin/t5.pl',",
# shebang with space, should strip the space
" 'echo \"#! /usr/bin/env perl\\n# test\" > %(installdir)s/bin/t6.pl',",
# no shebang perl
" 'echo \"# test\" > %(installdir)s/bin/t7.pl',",
# shebang bash
" 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b2.sh',",
"]",
"fix_python_shebang_for = ['bin/t1.py', 'bin/*.py', 'nosuchdir/*.py', 'bin/toy.python', 'bin/b1.sh']",
"fix_perl_shebang_for = ['bin/*.pl', 'bin/b2.sh', 'bin/toy.perl']",
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, raise_error=True)
toy_bindir = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin')
# bin/toy and bin/toy2 should *not* be patched, since they're binary files
toy_txt = read_file(os.path.join(toy_bindir, 'toy'), mode='rb')
for fn in ['toy.perl', 'toy.python']:
fn_txt = read_file(os.path.join(toy_bindir, fn), mode='rb')
# no shebang added
self.assertFalse(fn_txt.startswith(b"#!/"))
# exact same file as original binary (untouched)
self.assertEqual(toy_txt, fn_txt)
# no re.M, this should match at start of file!
py_shebang_regex = re.compile(r'^#!/usr/bin/env python\n# test$')
for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py']:
pybin_path = os.path.join(toy_bindir, pybin)
pybin_txt = read_file(pybin_path)
self.assertTrue(py_shebang_regex.match(pybin_txt),
"Pattern '%s' found in %s: %s" % (py_shebang_regex.pattern, pybin_path, pybin_txt))
# no re.M, this should match at start of file!
perl_shebang_regex = re.compile(r'^#!/usr/bin/env perl\n# test$')
for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl']:
perlbin_path = os.path.join(toy_bindir, perlbin)
perlbin_txt = read_file(perlbin_path)
self.assertTrue(perl_shebang_regex.match(perlbin_txt),
"Pattern '%s' found in %s: %s" % (perl_shebang_regex.pattern, perlbin_path, perlbin_txt))
# There are 2 bash files which shouldn't be influenced by fix_shebang
bash_shebang_regex = re.compile(r'^#!/usr/bin/env bash\n# test$')
for bashbin in ['b1.sh', 'b2.sh']:
bashbin_path = os.path.join(toy_bindir, bashbin)
bashbin_txt = read_file(bashbin_path)
self.assertTrue(bash_shebang_regex.match(bashbin_txt),
"Pattern '%s' found in %s: %s" % (bash_shebang_regex.pattern, bashbin_path, bashbin_txt))
# now test with a custom env command
extra_args = ['--env-for-shebang=/usr/bin/env -S']
self.test_toy_build(ec_file=test_ec, extra_args=extra_args, raise_error=True)
toy_bindir = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin')
# bin/toy and bin/toy2 should *not* be patched, since they're binary files
toy_txt = read_file(os.path.join(toy_bindir, 'toy'), mode='rb')
for fn in ['toy.perl', 'toy.python']:
fn_txt = read_file(os.path.join(toy_bindir, fn), mode='rb')
# no shebang added
self.assertFalse(fn_txt.startswith(b"#!/"))
# exact same file as original binary (untouched)
self.assertEqual(toy_txt, fn_txt)
# no re.M, this should match at start of file!
py_shebang_regex = re.compile(r'^#!/usr/bin/env -S python\n# test$')
for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py']:
pybin_path = os.path.join(toy_bindir, pybin)
pybin_txt = read_file(pybin_path)
self.assertTrue(py_shebang_regex.match(pybin_txt),
"Pattern '%s' found in %s: %s" % (py_shebang_regex.pattern, pybin_path, pybin_txt))
# no re.M, this should match at start of file!
perl_shebang_regex = re.compile(r'^#!/usr/bin/env -S perl\n# test$')
for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl']:
perlbin_path = os.path.join(toy_bindir, perlbin)
perlbin_txt = read_file(perlbin_path)
self.assertTrue(perl_shebang_regex.match(perlbin_txt),
"Pattern '%s' found in %s: %s" % (perl_shebang_regex.pattern, perlbin_path, perlbin_txt))
# There are 2 bash files which shouldn't be influenced by fix_shebang
bash_shebang_regex = re.compile(r'^#!/usr/bin/env bash\n# test$')
for bashbin in ['b1.sh', 'b2.sh']:
bashbin_path = os.path.join(toy_bindir, bashbin)
bashbin_txt = read_file(bashbin_path)
self.assertTrue(bash_shebang_regex.match(bashbin_txt),
"Pattern '%s' found in %s: %s" % (bash_shebang_regex.pattern, bashbin_path, bashbin_txt))
def test_toy_system_toolchain_alias(self):
"""Test use of 'system' toolchain alias."""
toy_ec = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec)
test_ec = os.path.join(self.test_prefix, 'test.eb')
tc_regex = re.compile('^toolchain = .*', re.M)
test_tcs = [
"toolchain = {'name': 'system', 'version': 'system'}",
"toolchain = {'name': 'system', 'version': ''}",
"toolchain = SYSTEM",
]
for tc in test_tcs:
test_ec_txt = tc_regex.sub(tc, toy_ec_txt)
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec)
def test_toy_ghost_installdir(self):
"""Test whether ghost installation directory is removed under --force."""
toy_installdir = os.path.join(self.test_prefix, 'test123', 'toy', '0.0')
mkdir(toy_installdir, parents=True)
write_file(os.path.join(toy_installdir, 'bin', 'toy'), "#!/bin/bash\necho hello")
toy_modfile = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0')
if get_module_syntax() == 'Lua':
toy_modfile += '.lua'
dummy_toy_mod_txt = 'local root = "%s"\n' % toy_installdir
else:
dummy_toy_mod_txt = '\n'.join([
"#%Module",
"set root %s" % toy_installdir,
'',
])
write_file(toy_modfile, dummy_toy_mod_txt)
stdout, stderr = self.run_test_toy_build_with_output()
# by default, a warning is printed for ghost installation directories (but they're left untouched)
self.assertFalse(stdout)
regex = re.compile("WARNING: Likely ghost installation directory detected: %s" % toy_installdir)
self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr))
self.assertTrue(os.path.exists(toy_installdir))
# cleanup of ghost installation directories can be enable via --remove-ghost-install-dirs
write_file(toy_modfile, dummy_toy_mod_txt)
stdout, stderr = self.run_test_toy_build_with_output(extra_args=['--remove-ghost-install-dirs'])
self.assertFalse(stderr)
regex = re.compile("^== Ghost installation directory %s removed" % toy_installdir)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
self.assertFalse(os.path.exists(toy_installdir))
def test_toy_build_lock(self):
"""Test toy installation when a lock is already in place."""
locks_dir = os.path.join(self.test_installpath, 'software', '.locks')
toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
toy_lock_fn = toy_installdir.replace(os.path.sep, '_') + '.lock'
toy_lock_path = os.path.join(locks_dir, toy_lock_fn)
mkdir(toy_lock_path, parents=True)
error_pattern = "Lock .*_software_toy_0.0.lock already exists, aborting!"
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, raise_error=True, verbose=False)
locks_dir = os.path.join(self.test_prefix, 'locks')
# no lock in place, so installation proceeds as normal
extra_args = ['--locks-dir=%s' % locks_dir]
self.test_toy_build(extra_args=extra_args, verify=True, raise_error=True)
# put lock in place in custom locks dir, try again
toy_lock_path = os.path.join(locks_dir, toy_lock_fn)
mkdir(toy_lock_path, parents=True)
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build,
extra_args=extra_args, raise_error=True, verbose=False)
# also test use of --ignore-locks
self.test_toy_build(extra_args=extra_args + ['--ignore-locks'], verify=True, raise_error=True)
orig_sigalrm_handler = signal.getsignal(signal.SIGALRM)
# define a context manager that remove a lock after a while, so we can check the use of --wait-for-lock
class remove_lock_after(object):
def __init__(self, seconds, lock_fp):
self.seconds = seconds
self.lock_fp = lock_fp
def remove_lock(self, *args):
remove_dir(self.lock_fp)
def __enter__(self):
signal.signal(signal.SIGALRM, self.remove_lock)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
# clean up SIGALRM signal handler, and cancel scheduled alarm
signal.signal(signal.SIGALRM, orig_sigalrm_handler)
signal.alarm(0)
# wait for lock to be removed, with 1 second interval of checking;
# check with both --wait-on-lock-interval and deprecated --wait-on-lock options
wait_regex = re.compile("^== lock .*_software_toy_0.0.lock exists, waiting 1 seconds", re.M)
ok_regex = re.compile("^== COMPLETED: Installation ended successfully", re.M)
test_cases = [
['--wait-on-lock=1'],
['--wait-on-lock=1', '--wait-on-lock-interval=60'],
['--wait-on-lock=100', '--wait-on-lock-interval=1'],
['--wait-on-lock-limit=100', '--wait-on-lock=1'],
['--wait-on-lock-limit=100', '--wait-on-lock-interval=1'],
['--wait-on-lock-limit=-1', '--wait-on-lock=1'],
['--wait-on-lock-limit=-1', '--wait-on-lock-interval=1'],
]
for opts in test_cases:
if any('--wait-on-lock=' in x for x in opts):
self.allow_deprecated_behaviour()
else:
self.disallow_deprecated_behaviour()
if not os.path.exists(toy_lock_path):
mkdir(toy_lock_path)
self.assertTrue(os.path.exists(toy_lock_path))
all_args = extra_args + opts
# use context manager to remove lock after 3 seconds
with remove_lock_after(3, toy_lock_path):
self.mock_stderr(True)
self.mock_stdout(True)
self.test_toy_build(extra_args=all_args, verify=False, raise_error=True, testing=False)
stderr, stdout = self.get_stderr(), self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
if any('--wait-on-lock=' in x for x in all_args):
self.assertTrue("Use of --wait-on-lock is deprecated" in stderr)
else:
self.assertEqual(stderr, '')
wait_matches = wait_regex.findall(stdout)
# we can't rely on an exact number of 'waiting' messages, so let's go with a range...
self.assertTrue(len(wait_matches) in range(2, 5))
self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout))
# check use of --wait-on-lock-limit: if lock is never removed, we should give up when limit is reached
mkdir(toy_lock_path)
all_args = extra_args + ['--wait-on-lock-limit=3', '--wait-on-lock-interval=1']
self.mock_stderr(True)
self.mock_stdout(True)
error_pattern = r"Maximum wait time for lock /.*toy_0.0.lock to be released reached: [0-9]+ sec >= 3 sec"
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, extra_args=all_args,
verify=False, raise_error=True, testing=False)
stderr, stdout = self.get_stderr(), self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
wait_matches = wait_regex.findall(stdout)
self.assertTrue(len(wait_matches) in range(2, 5))
# when there is no lock in place, --wait-on-lock* has no impact
remove_dir(toy_lock_path)
for opt in ['--wait-on-lock=1', '--wait-on-lock-limit=3', '--wait-on-lock-interval=1']:
all_args = extra_args + [opt]
self.assertFalse(os.path.exists(toy_lock_path))
self.mock_stderr(True)
self.mock_stdout(True)
self.test_toy_build(extra_args=all_args, verify=False, raise_error=True, testing=False)
stderr, stdout = self.get_stderr(), self.get_stdout()
self.mock_stderr(False)
self.mock_stdout(False)
self.assertEqual(stderr, '')
self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout))
self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout))
# check for clean error on creation of lock
extra_args = ['--locks-dir=/']
error_pattern = r"Failed to create lock /.*_software_toy_0.0.lock:.* "
error_pattern += r"(Read-only file system|Permission denied)"
self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build,
extra_args=extra_args, raise_error=True, verbose=False)
def test_toy_lock_cleanup_signals(self):
"""Test cleanup of locks after EasyBuild session gets a cancellation signal."""
orig_wd = os.getcwd()
locks_dir = os.path.join(self.test_installpath, 'software', '.locks')
self.assertFalse(os.path.exists(locks_dir))
orig_sigalrm_handler = signal.getsignal(signal.SIGALRM)
# context manager which stops the function being called with the specified signal
class wait_and_signal(object):
def __init__(self, seconds, signum):
self.seconds = seconds
self.signum = signum
def send_signal(self, *args):
os.kill(os.getpid(), self.signum)
def __enter__(self):
signal.signal(signal.SIGALRM, self.send_signal)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
# clean up SIGALRM signal handler, and cancel scheduled alarm
signal.signal(signal.SIGALRM, orig_sigalrm_handler)
signal.alarm(0)
# add extra sleep command to ensure session takes long enough
test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
toy_ec_txt = read_file(os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb'))
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, toy_ec_txt + '\npostinstallcmds = ["sleep 5"]')
signums = [
(signal.SIGABRT, SystemExit),
(signal.SIGINT, KeyboardInterrupt),
(signal.SIGTERM, SystemExit),
(signal.SIGQUIT, SystemExit),
]
for (signum, exc) in signums:
# avoid recycling stderr of previous test
stderr = ''
with wait_and_signal(1, signum):
# change back to original working directory before each test
change_dir(orig_wd)
self.mock_stderr(True)
self.mock_stdout(True)
self.assertErrorRegex(exc, '.*', self.test_toy_build, ec_file=test_ec, verify=False,
raise_error=True, testing=False, raise_systemexit=True)
stderr = self.get_stderr().strip()
self.mock_stderr(False)
self.mock_stdout(False)
pattern = r"^WARNING: signal received \(%s\), " % int(signum)
pattern += r"cleaning up locks \(.*software_toy_0.0\)\.\.\."
regex = re.compile(pattern)
self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr))
def test_toy_build_unicode_description(self):
"""Test installation of easyconfig file that has non-ASCII characters in description."""
# cfr. https://github.com/easybuilders/easybuild-framework/issues/3284
test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs')
toy_ec = os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb')
toy_ec_txt = read_file(toy_ec)
# the tilde character included here is a Unicode tilde character, not a regular ASCII tilde (~)
descr = "This description includes a unicode tilde character: ∼, for your entertainment."
self.assertFalse('~' in descr)
regex = re.compile(r'^description\s*=.*', re.M)
test_ec_txt = regex.sub(r'description = "%s"' % descr, toy_ec_txt)
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, raise_error=True)
def test_toy_build_lib64_lib_symlink(self):
"""Check whether lib64 symlink to lib subdirectory is created."""
# this is done to ensure that <installdir>/lib64 is considered before /lib64 by GCC linker,
# see https://github.com/easybuilders/easybuild-easyconfigs/issues/5776
# by default, lib64 -> lib symlink is created (--lib64-lib-symlink is enabled by default)
self.test_toy_build()
toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
lib_path = os.path.join(toy_installdir, 'lib')
lib64_path = os.path.join(toy_installdir, 'lib64')
# lib64 subdir exists, is not a symlink
self.assertTrue(os.path.exists(lib_path))
self.assertTrue(os.path.isdir(lib_path))
self.assertFalse(os.path.islink(lib_path))
# lib64 subdir is a symlink to lib subdir
self.assertTrue(os.path.exists(lib64_path))
self.assertTrue(os.path.islink(lib64_path))
self.assertTrue(os.path.samefile(lib_path, lib64_path))
# lib64 symlink should point to a relative path
self.assertFalse(os.path.isabs(os.readlink(lib64_path)))
# cleanup and try again with --disable-lib64-lib-symlink
remove_dir(self.test_installpath)
self.test_toy_build(extra_args=['--disable-lib64-lib-symlink'])
self.assertTrue(os.path.exists(lib_path))
self.assertFalse(os.path.exists(lib64_path))
self.assertFalse('lib64' in os.listdir(toy_installdir))
self.assertTrue(os.path.isdir(lib_path))
self.assertFalse(os.path.islink(lib_path))
def test_toy_build_lib_lib64_symlink(self):
"""Check whether lib64 symlink to lib subdirectory is created."""
test_ecs = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs')
toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb')
test_ec_txt = read_file(toy_ec)
test_ec_txt += "\npostinstallcmds += ['mv %(installdir)s/lib %(installdir)s/lib64']"
test_ec = os.path.join(self.test_prefix, 'test.eb')
write_file(test_ec, test_ec_txt)
# by default, lib -> lib64 symlink is created (--lib-lib64-symlink is enabled by default)
self.test_toy_build(ec_file=test_ec)
toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
lib_path = os.path.join(toy_installdir, 'lib')
lib64_path = os.path.join(toy_installdir, 'lib64')
# lib64 subdir exists, is not a symlink
self.assertTrue(os.path.exists(lib64_path))
self.assertTrue(os.path.isdir(lib64_path))
self.assertFalse(os.path.islink(lib64_path))
# lib subdir is a symlink to lib64 subdir
self.assertTrue(os.path.exists(lib_path))
self.assertTrue(os.path.isdir(lib_path))
self.assertTrue(os.path.islink(lib_path))
self.assertTrue(os.path.samefile(lib_path, lib64_path))
# lib symlink should point to a relative path
self.assertFalse(os.path.isabs(os.readlink(lib_path)))
# cleanup and try again with --disable-lib-lib64-symlink
remove_dir(self.test_installpath)
self.test_toy_build(ec_file=test_ec, extra_args=['--disable-lib-lib64-symlink'])
self.assertTrue(os.path.exists(lib64_path))
self.assertFalse(os.path.exists(lib_path))
self.assertFalse('lib' in os.listdir(toy_installdir))
self.assertTrue(os.path.isdir(lib64_path))
self.assertFalse(os.path.islink(lib64_path))
def test_toy_build_sanity_check_linked_libs(self):
"""Test sanity checks for banned/requires libraries."""
test_ecs = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs')
libtoy_ec = os.path.join(test_ecs, 'l', 'libtoy', 'libtoy-0.0.eb')
libtoy_modfile_path = os.path.join(self.test_installpath, 'modules', 'all', 'libtoy', '0.0')
if get_module_syntax() == 'Lua':
libtoy_modfile_path += '.lua'
test_ec = os.path.join(self.test_prefix, 'test.eb')
shlib_ext = get_shared_lib_ext()
libtoy_fn = 'libtoy.%s' % shlib_ext
error_msg = "Check for banned/required shared libraries failed for"
# default check is done via EB_libtoy easyblock, which specifies several banned/required libraries
self.test_toy_build(ec_file=libtoy_ec, raise_error=True, verbose=False, verify=False)
remove_file(libtoy_modfile_path)
# we can make the check fail by defining environment variables picked up by the EB_libtoy easyblock
os.environ['EB_LIBTOY_BANNED_SHARED_LIBS'] = 'libtoy'
self.assertErrorRegex(EasyBuildError, error_msg, self.test_toy_build, force=False,
ec_file=libtoy_ec, extra_args=['--module-only'], raise_error=True, verbose=False)
del os.environ['EB_LIBTOY_BANNED_SHARED_LIBS']
os.environ['EB_LIBTOY_REQUIRED_SHARED_LIBS'] = 'thisisnottheremostlikely'
self.assertErrorRegex(EasyBuildError, error_msg, self.test_toy_build, force=False,
ec_file=libtoy_ec, extra_args=['--module-only'], raise_error=True, verbose=False)
del os.environ['EB_LIBTOY_REQUIRED_SHARED_LIBS']
# make sure default check passes (so we know better what triggered a failing test)
self.test_toy_build(ec_file=libtoy_ec, extra_args=['--module-only'], force=False,
raise_error=True, verbose=False, verify=False)
remove_file(libtoy_modfile_path)
# check specifying banned/required libraries via EasyBuild configuration option
args = ['--banned-linked-shared-libs=%s,foobarbaz' % libtoy_fn, '--module-only']
self.assertErrorRegex(EasyBuildError, error_msg, self.test_toy_build, force=False,
ec_file=libtoy_ec, extra_args=args, raise_error=True, verbose=False)
args = ['--required-linked-shared=libs=foobarbazisnotthereforsure', '--module-only']
self.assertErrorRegex(EasyBuildError, error_msg, self.test_toy_build, force=False,
ec_file=libtoy_ec, extra_args=args, raise_error=True, verbose=False)
# check specifying banned/required libraries via easyconfig parameter
test_ec_txt = read_file(libtoy_ec)
test_ec_txt += "\nbanned_linked_shared_libs = ['toy']"
write_file(test_ec, test_ec_txt)
self.assertErrorRegex(EasyBuildError, error_msg, self.test_toy_build, force=False,
ec_file=test_ec, extra_args=['--module-only'], raise_error=True, verbose=False)
test_ec_txt = read_file(libtoy_ec)
test_ec_txt += "\nrequired_linked_shared_libs = ['thereisnosuchlibraryyoudummy']"
write_file(test_ec, test_ec_txt)
self.assertErrorRegex(EasyBuildError, error_msg, self.test_toy_build, force=False,
ec_file=test_ec, extra_args=['--module-only'], raise_error=True, verbose=False)
# check behaviour when alternate subdirectories are specified
test_ec_txt = read_file(libtoy_ec)
test_ec_txt += "\nbin_lib_subdirs = ['', 'lib', 'lib64']"
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, extra_args=['--module-only'], force=False,
raise_error=True, verbose=False, verify=False)
# one last time: supercombo (with patterns that should pass the check)
test_ec_txt = read_file(libtoy_ec)
test_ec_txt += "\nbanned_linked_shared_libs = ['yeahthisisjustatest', '/usr/lib/libssl.so']"
test_ec_txt += "\nrequired_linked_shared_libs = ['/lib']"
test_ec_txt += "\nbin_lib_subdirs = ['', 'lib', 'lib64']"
write_file(test_ec, test_ec_txt)
args = [
'--banned-linked-shared-libs=the_forbidden_library',
'--required-linked-shared-libs=.*',
'--module-only',
]
self.test_toy_build(ec_file=test_ec, extra_args=args, force=False,
raise_error=True, verbose=False, verify=False)
def suite():
""" return all the tests in this file """
return TestLoaderFiltered().loadTestsFromTestCase(ToyBuildTest, sys.argv[1:])
if __name__ == '__main__':
res = TextTestRunner(verbosity=1).run(suite())
sys.exit(len(res.failures))
| gpl-2.0 | -6,146,299,068,494,226,000 | 48.379783 | 120 | 0.57905 | false |
naototty/vagrant-lxc-ironic | ironic/tests/drivers/test_deploy_utils.py | 1 | 75359 | # Copyright (c) 2012 NTT DOCOMO, INC.
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Ilya Alekseyev
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import gzip
import os
import shutil
import stat
import tempfile
import time
import types
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import uuidutils
import requests
import testtools
from ironic.common import boot_devices
from ironic.common import disk_partitioner
from ironic.common import exception
from ironic.common import images
from ironic.common import states
from ironic.common import utils as common_utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import agent_client
from ironic.drivers.modules import deploy_utils as utils
from ironic.drivers.modules import image_cache
from ironic.tests import base as tests_base
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
_PXECONF_DEPLOY = b"""
default deploy
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
label boot_whole_disk
COM32 chain.c32
append mbr:{{ DISK_IDENTIFIER }}
"""
_PXECONF_BOOT_PARTITION = """
default boot_partition
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
label boot_whole_disk
COM32 chain.c32
append mbr:{{ DISK_IDENTIFIER }}
"""
_PXECONF_BOOT_WHOLE_DISK = """
default boot_whole_disk
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
label boot_whole_disk
COM32 chain.c32
append mbr:0x12345678
"""
_IPXECONF_DEPLOY = b"""
#!ipxe
dhcp
goto deploy
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
boot
:boot_whole_disk
kernel chain.c32
append mbr:{{ DISK_IDENTIFIER }}
boot
"""
_IPXECONF_BOOT_PARTITION = """
#!ipxe
dhcp
goto boot_partition
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
boot
:boot_whole_disk
kernel chain.c32
append mbr:{{ DISK_IDENTIFIER }}
boot
"""
_IPXECONF_BOOT_WHOLE_DISK = """
#!ipxe
dhcp
goto boot_whole_disk
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
boot
:boot_whole_disk
kernel chain.c32
append mbr:0x12345678
boot
"""
_UEFI_PXECONF_DEPLOY = b"""
default=deploy
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root={{ ROOT }}"
image=chain.c32
label=boot_whole_disk
append="mbr:{{ DISK_IDENTIFIER }}"
"""
_UEFI_PXECONF_BOOT_PARTITION = """
default=boot_partition
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root=UUID=12345678-1234-1234-1234-1234567890abcdef"
image=chain.c32
label=boot_whole_disk
append="mbr:{{ DISK_IDENTIFIER }}"
"""
_UEFI_PXECONF_BOOT_WHOLE_DISK = """
default=boot_whole_disk
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root={{ ROOT }}"
image=chain.c32
label=boot_whole_disk
append="mbr:0x12345678"
"""
@mock.patch.object(time, 'sleep', lambda seconds: None)
class PhysicalWorkTestCase(tests_base.TestCase):
def _mock_calls(self, name_list):
patch_list = [mock.patch.object(utils, name,
spec_set=types.FunctionType)
for name in name_list]
mock_list = [patcher.start() for patcher in patch_list]
for patcher in patch_list:
self.addCleanup(patcher.stop)
parent_mock = mock.MagicMock(spec=[])
for mocker, name in zip(mock_list, name_list):
parent_mock.attach_mock(mocker, name)
return parent_mock
def _test_deploy_partition_image(self, boot_option=None, boot_mode=None):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
swap_part = '/dev/fake-part1'
root_part = '/dev/fake-part2'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part,
'swap': swap_part}
make_partitions_expected_args = [dev, root_mb, swap_mb, ephemeral_mb,
configdrive_mb]
make_partitions_expected_kwargs = {'commit': True}
deploy_kwargs = {}
if boot_option:
make_partitions_expected_kwargs['boot_option'] = boot_option
deploy_kwargs['boot_option'] = boot_option
else:
make_partitions_expected_kwargs['boot_option'] = 'netboot'
if boot_mode:
make_partitions_expected_kwargs['boot_mode'] = boot_mode
deploy_kwargs['boot_mode'] = boot_mode
else:
make_partitions_expected_kwargs['boot_mode'] = 'bios'
# If no boot_option, then it should default to netboot.
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(
*make_partitions_expected_args,
**make_partitions_expected_kwargs),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuids_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid, **deploy_kwargs)
self.assertEqual(calls_expected, parent_mock.mock_calls)
expected_uuid_dict = {
'root uuid': root_uuid,
'efi system partition uuid': None}
self.assertEqual(expected_uuid_dict, uuids_dict_returned)
def test_deploy_partition_image_without_boot_option(self):
self._test_deploy_partition_image()
def test_deploy_partition_image_netboot(self):
self._test_deploy_partition_image(boot_option="netboot")
def test_deploy_partition_image_localboot(self):
self._test_deploy_partition_image(boot_option="local")
def test_deploy_partition_image_wo_boot_option_and_wo_boot_mode(self):
self._test_deploy_partition_image()
def test_deploy_partition_image_netboot_bios(self):
self._test_deploy_partition_image(boot_option="netboot",
boot_mode="bios")
def test_deploy_partition_image_localboot_bios(self):
self._test_deploy_partition_image(boot_option="local",
boot_mode="bios")
def test_deploy_partition_image_netboot_uefi(self):
self._test_deploy_partition_image(boot_option="netboot",
boot_mode="uefi")
@mock.patch.object(utils, 'get_image_mb', return_value=129, autospec=True)
def test_deploy_partition_image_image_exceeds_root_partition(self,
gim_mock):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
self.assertRaises(exception.InstanceDeployFailure,
utils.deploy_partition_image, address, port, iqn,
lun, image_path, root_mb, swap_mb, ephemeral_mb,
ephemeral_format, node_uuid)
gim_mock.assert_called_once_with(image_path)
# We mock utils.block_uuid separately here because we can't predict
# the order in which it will be called.
@mock.patch.object(utils, 'block_uuid', autospec=True)
def test_deploy_partition_image_localboot_uefi(self, block_uuid_mock):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
efi_system_part = '/dev/fake-part1'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
efi_system_part_uuid = '9036-482'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
def block_uuid_side_effect(device):
if device == root_part:
return root_uuid
if device == efi_system_part:
return efi_system_part_uuid
block_uuid_mock.side_effect = block_uuid_side_effect
parent_mock.make_partitions.return_value = {
'root': root_part, 'swap': swap_part,
'efi system partition': efi_system_part}
# If no boot_option, then it should default to netboot.
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="local",
boot_mode="uefi"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(efi_system_part),
mock.call.mkfs(dev=efi_system_part, fs='vfat',
label='efi-part'),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid, boot_option="local",
boot_mode="uefi")
self.assertEqual(calls_expected, parent_mock.mock_calls)
block_uuid_mock.assert_any_call('/dev/fake-part1')
block_uuid_mock.assert_any_call('/dev/fake-part3')
expected_uuid_dict = {
'root uuid': root_uuid,
'efi system partition uuid': efi_system_part_uuid}
self.assertEqual(expected_uuid_dict, uuid_dict_returned)
def test_deploy_partition_image_without_swap(self):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 0
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
root_part = '/dev/fake-part1'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'block_uuid',
'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part}
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.populate_image(image_path, root_part),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
lun, image_path,
root_mb, swap_mb,
ephemeral_mb,
ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
def test_deploy_partition_image_with_ephemeral(self):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
configdrive_mb = 0
ephemeral_format = 'exttest'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'swap': swap_part,
'ephemeral': ephemeral_part,
'root': root_part}
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(ephemeral_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.mkfs(dev=ephemeral_part,
fs=ephemeral_format,
label='ephemeral0'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
lun, image_path,
root_mb, swap_mb,
ephemeral_mb,
ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
def test_deploy_partition_image_preserve_ephemeral(self):
"""Check if all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
ephemeral_format = 'exttest'
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'get_dev_block_size']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'swap': swap_part,
'ephemeral': ephemeral_part,
'root': root_part}
parent_mock.block_uuid.return_value = root_uuid
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=False,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(ephemeral_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid,
preserve_ephemeral=True, boot_option="netboot")
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertFalse(parent_mock.get_dev_block_size.called)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
@mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
def test_deploy_partition_image_with_configdrive(self, mock_unlink):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 0
ephemeral_mb = 0
configdrive_mb = 10
ephemeral_format = None
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
configdrive_url = 'http://1.2.3.4/cd'
dev = '/dev/fake'
configdrive_part = '/dev/fake-part1'
root_part = '/dev/fake-part2'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'block_uuid',
'notify', 'destroy_disk_metadata', 'dd',
'_get_configdrive']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part,
'configdrive':
configdrive_part}
parent_mock._get_configdrive.return_value = (10, 'configdrive-path')
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call._get_configdrive(configdrive_url,
node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(configdrive_part),
mock.call.dd(mock.ANY, configdrive_part),
mock.call.populate_image(image_path, root_part),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid,
configdrive=configdrive_url)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
mock_unlink.assert_called_once_with('configdrive-path')
@mock.patch.object(utils, 'get_disk_identifier', autospec=True)
def test_deploy_whole_disk_image(self, mock_gdi):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
name_list = ['get_dev', 'discovery', 'login_iscsi', 'logout_iscsi',
'delete_iscsi', 'is_block_device', 'populate_image',
'notify']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.is_block_device.return_value = True
mock_gdi.return_value = '0x12345678'
calls_expected = [mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.populate_image(image_path, dev),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_disk_image(address, port, iqn, lun,
image_path, node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual('0x12345678', uuid_dict_returned['disk identifier'])
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_verify_iscsi_connection_raises(self, mock_exec):
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.abc', '']
self.assertRaises(exception.InstanceDeployFailure,
utils.verify_iscsi_connection, iqn)
self.assertEqual(3, mock_exec.call_count)
@mock.patch.object(os.path, 'exists', autospec=True)
def test_check_file_system_for_iscsi_device_raises(self, mock_os):
iqn = 'iqn.xyz'
ip = "127.0.0.1"
port = "22"
mock_os.return_value = False
self.assertRaises(exception.InstanceDeployFailure,
utils.check_file_system_for_iscsi_device, ip, port, iqn)
self.assertEqual(3, mock_os.call_count)
@mock.patch.object(os.path, 'exists', autospec=True)
def test_check_file_system_for_iscsi_device(self, mock_os):
iqn = 'iqn.xyz'
ip = "127.0.0.1"
port = "22"
check_dir = "/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-1" % (ip,
port,
iqn)
mock_os.return_value = True
utils.check_file_system_for_iscsi_device(ip, port, iqn)
mock_os.assert_called_once_with(check_dir)
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_verify_iscsi_connection(self, mock_exec):
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.xyz', '']
utils.verify_iscsi_connection(iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-S',
run_as_root=True,
check_exit_code=[0])
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_force_iscsi_lun_update(self, mock_exec):
iqn = 'iqn.xyz'
utils.force_iscsi_lun_update(iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-T', iqn,
'-R',
run_as_root=True,
check_exit_code=[0])
@mock.patch.object(common_utils, 'execute', autospec=True)
@mock.patch.object(utils, 'verify_iscsi_connection', autospec=True)
@mock.patch.object(utils, 'force_iscsi_lun_update', autospec=True)
@mock.patch.object(utils, 'check_file_system_for_iscsi_device',
autospec=True)
def test_login_iscsi_calls_verify_and_update(self,
mock_check_dev,
mock_update,
mock_verify,
mock_exec):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.xyz', '']
utils.login_iscsi(address, port, iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-p', '%s:%s' % (address, port),
'-T', iqn,
'--login',
run_as_root=True,
check_exit_code=[0],
attempts=5,
delay_on_retry=True)
mock_verify.assert_called_once_with(iqn)
mock_update.assert_called_once_with(iqn)
mock_check_dev.assert_called_once_with(address, port, iqn)
@mock.patch.object(utils, 'is_block_device', lambda d: True)
def test_always_logout_and_delete_iscsi(self):
"""Check if logout_iscsi() and delete_iscsi() are called.
Make sure that logout_iscsi() and delete_iscsi() are called once
login_iscsi() is invoked.
"""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
ephemeral_format = 'exttest'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
class TestException(Exception):
pass
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'work_on_disk']
patch_list = [mock.patch.object(utils, name,
spec_set=types.FunctionType)
for name in name_list]
mock_list = [patcher.start() for patcher in patch_list]
for patcher in patch_list:
self.addCleanup(patcher.stop)
parent_mock = mock.MagicMock(spec=[])
for mocker, name in zip(mock_list, name_list):
parent_mock.attach_mock(mocker, name)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.work_on_disk.side_effect = TestException
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.work_on_disk(dev, root_mb, swap_mb,
ephemeral_mb,
ephemeral_format, image_path,
node_uuid, configdrive=None,
preserve_ephemeral=False,
boot_option="netboot",
boot_mode="bios"),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
self.assertRaises(TestException, utils.deploy_partition_image,
address, port, iqn, lun, image_path,
root_mb, swap_mb, ephemeral_mb, ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
class SwitchPxeConfigTestCase(tests_base.TestCase):
def _create_config(self, ipxe=False, boot_mode=None):
(fd, fname) = tempfile.mkstemp()
if boot_mode == 'uefi':
pxe_cfg = _UEFI_PXECONF_DEPLOY
else:
pxe_cfg = _IPXECONF_DEPLOY if ipxe else _PXECONF_DEPLOY
os.write(fd, pxe_cfg)
os.close(fd)
self.addCleanup(os.unlink, fname)
return fname
def test_switch_pxe_config_partition_image(self):
boot_mode = 'bios'
fname = self._create_config()
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_PXECONF_BOOT_PARTITION, pxeconf)
def test_switch_pxe_config_whole_disk_image(self):
boot_mode = 'bios'
fname = self._create_config()
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_PXECONF_BOOT_WHOLE_DISK, pxeconf)
def test_switch_ipxe_config_partition_image(self):
boot_mode = 'bios'
cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
fname = self._create_config(ipxe=True)
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_IPXECONF_BOOT_PARTITION, pxeconf)
def test_switch_ipxe_config_whole_disk_image(self):
boot_mode = 'bios'
cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
fname = self._create_config(ipxe=True)
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_IPXECONF_BOOT_WHOLE_DISK, pxeconf)
def test_switch_uefi_pxe_config_partition_image(self):
boot_mode = 'uefi'
fname = self._create_config(boot_mode=boot_mode)
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION, pxeconf)
def test_switch_uefi_config_whole_disk_image(self):
boot_mode = 'uefi'
fname = self._create_config(boot_mode=boot_mode)
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK, pxeconf)
@mock.patch('time.sleep', lambda sec: None)
class OtherFunctionTestCase(db_base.DbTestCase):
def setUp(self):
super(OtherFunctionTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
self.node = obj_utils.create_test_node(self.context, driver='fake_pxe')
def test_get_dev(self):
expected = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
actual = utils.get_dev('1.2.3.4', 5678, 'iqn.fake', 9)
self.assertEqual(expected, actual)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(stat, 'S_ISBLK', autospec=True)
def test_is_block_device_works(self, mock_is_blk, mock_os):
device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
mock_is_blk.return_value = True
mock_os().st_mode = 10000
self.assertTrue(utils.is_block_device(device))
mock_is_blk.assert_called_once_with(mock_os().st_mode)
@mock.patch.object(os, 'stat', autospec=True)
def test_is_block_device_raises(self, mock_os):
device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
mock_os.side_effect = OSError
self.assertRaises(exception.InstanceDeployFailure,
utils.is_block_device, device)
mock_os.assert_has_calls([mock.call(device)] * 3)
@mock.patch.object(os.path, 'getsize', autospec=True)
@mock.patch.object(images, 'converted_size', autospec=True)
def test_get_image_mb(self, mock_csize, mock_getsize):
mb = 1024 * 1024
mock_getsize.return_value = 0
mock_csize.return_value = 0
self.assertEqual(0, utils.get_image_mb('x', False))
self.assertEqual(0, utils.get_image_mb('x', True))
mock_getsize.return_value = 1
mock_csize.return_value = 1
self.assertEqual(1, utils.get_image_mb('x', False))
self.assertEqual(1, utils.get_image_mb('x', True))
mock_getsize.return_value = mb
mock_csize.return_value = mb
self.assertEqual(1, utils.get_image_mb('x', False))
self.assertEqual(1, utils.get_image_mb('x', True))
mock_getsize.return_value = mb + 1
mock_csize.return_value = mb + 1
self.assertEqual(2, utils.get_image_mb('x', False))
self.assertEqual(2, utils.get_image_mb('x', True))
def test_parse_root_device_hints(self):
self.node.properties['root_device'] = {'wwn': 123456}
expected = 'wwn=123456'
result = utils.parse_root_device_hints(self.node)
self.assertEqual(expected, result)
def test_parse_root_device_hints_string_space(self):
self.node.properties['root_device'] = {'model': 'fake model'}
expected = 'model=fake%20model'
result = utils.parse_root_device_hints(self.node)
self.assertEqual(expected, result)
def test_parse_root_device_hints_no_hints(self):
self.node.properties = {}
result = utils.parse_root_device_hints(self.node)
self.assertIsNone(result)
def test_parse_root_device_hints_invalid_hints(self):
self.node.properties['root_device'] = {'vehicle': 'Owlship'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_root_device_hints, self.node)
def test_parse_root_device_hints_invalid_size(self):
self.node.properties['root_device'] = {'size': 'not-int'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_root_device_hints, self.node)
@mock.patch.object(disk_partitioner.DiskPartitioner, 'commit', lambda _: None)
class WorkOnDiskTestCase(tests_base.TestCase):
def setUp(self):
super(WorkOnDiskTestCase, self).setUp()
self.image_path = '/tmp/xyz/image'
self.root_mb = 128
self.swap_mb = 64
self.ephemeral_mb = 0
self.ephemeral_format = None
self.configdrive_mb = 0
self.dev = '/dev/fake'
self.swap_part = '/dev/fake-part1'
self.root_part = '/dev/fake-part2'
self.mock_ibd_obj = mock.patch.object(
utils, 'is_block_device', autospec=True)
self.mock_ibd = self.mock_ibd_obj.start()
self.addCleanup(self.mock_ibd_obj.stop)
self.mock_mp_obj = mock.patch.object(
utils, 'make_partitions', autospec=True)
self.mock_mp = self.mock_mp_obj.start()
self.addCleanup(self.mock_mp_obj.stop)
self.mock_remlbl_obj = mock.patch.object(
utils, 'destroy_disk_metadata', autospec=True)
self.mock_remlbl = self.mock_remlbl_obj.start()
self.addCleanup(self.mock_remlbl_obj.stop)
self.mock_mp.return_value = {'swap': self.swap_part,
'root': self.root_part}
def test_no_root_partition(self):
self.mock_ibd.return_value = False
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid')
self.mock_ibd.assert_called_once_with(self.root_part)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
def test_no_swap_partition(self):
self.mock_ibd.side_effect = iter([True, False])
calls = [mock.call(self.root_part),
mock.call(self.swap_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid')
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
def test_no_ephemeral_partition(self):
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
ephemeral_mb = 256
ephemeral_format = 'exttest'
self.mock_mp.return_value = {'ephemeral': ephemeral_part,
'swap': swap_part,
'root': root_part}
self.mock_ibd.side_effect = iter([True, True, False])
calls = [mock.call(root_part),
mock.call(swap_part),
mock.call(ephemeral_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, ephemeral_mb, ephemeral_format,
self.image_path, 'fake-uuid')
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
@mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, '_get_configdrive', autospec=True)
def test_no_configdrive_partition(self, mock_configdrive, mock_unlink):
mock_configdrive.return_value = (10, 'fake-path')
swap_part = '/dev/fake-part1'
configdrive_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
configdrive_url = 'http://1.2.3.4/cd'
configdrive_mb = 10
self.mock_mp.return_value = {'swap': swap_part,
'configdrive': configdrive_part,
'root': root_part}
self.mock_ibd.side_effect = iter([True, True, False])
calls = [mock.call(root_part),
mock.call(swap_part),
mock.call(configdrive_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid',
preserve_ephemeral=False,
configdrive=configdrive_url,
boot_option="netboot")
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
mock_unlink.assert_called_once_with('fake-path')
@mock.patch.object(common_utils, 'execute', autospec=True)
class MakePartitionsTestCase(tests_base.TestCase):
def setUp(self):
super(MakePartitionsTestCase, self).setUp()
self.dev = 'fake-dev'
self.root_mb = 1024
self.swap_mb = 512
self.ephemeral_mb = 0
self.configdrive_mb = 0
self.parted_static_cmd = ['parted', '-a', 'optimal', '-s', self.dev,
'--', 'unit', 'MiB', 'mklabel', 'msdos']
def _test_make_partitions(self, mock_exc, boot_option):
mock_exc.return_value = (None, None)
utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
self.ephemeral_mb, self.configdrive_mb,
boot_option=boot_option)
expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1', '513',
'mkpart', 'primary', '', '513', '1537']
if boot_option == "local":
expected_mkpart.extend(['set', '2', 'boot', 'on'])
parted_cmd = self.parted_static_cmd + expected_mkpart
parted_call = mock.call(*parted_cmd, run_as_root=True,
check_exit_code=[0])
fuser_cmd = ['fuser', 'fake-dev']
fuser_call = mock.call(*fuser_cmd, run_as_root=True,
check_exit_code=[0, 1])
mock_exc.assert_has_calls([parted_call, fuser_call])
def test_make_partitions(self, mock_exc):
self._test_make_partitions(mock_exc, boot_option="netboot")
def test_make_partitions_local_boot(self, mock_exc):
self._test_make_partitions(mock_exc, boot_option="local")
def test_make_partitions_with_ephemeral(self, mock_exc):
self.ephemeral_mb = 2048
expected_mkpart = ['mkpart', 'primary', '', '1', '2049',
'mkpart', 'primary', 'linux-swap', '2049', '2561',
'mkpart', 'primary', '', '2561', '3585']
cmd = self.parted_static_cmd + expected_mkpart
mock_exc.return_value = (None, None)
utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
self.ephemeral_mb, self.configdrive_mb)
parted_call = mock.call(*cmd, run_as_root=True, check_exit_code=[0])
mock_exc.assert_has_calls([parted_call])
@mock.patch.object(utils, 'get_dev_block_size', autospec=True)
@mock.patch.object(common_utils, 'execute', autospec=True)
class DestroyMetaDataTestCase(tests_base.TestCase):
def setUp(self):
super(DestroyMetaDataTestCase, self).setUp()
self.dev = 'fake-dev'
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
def test_destroy_disk_metadata(self, mock_exec, mock_gz):
mock_gz.return_value = 64
expected_calls = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0]),
mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', 'seek=28',
run_as_root=True,
check_exit_code=[0])]
utils.destroy_disk_metadata(self.dev, self.node_uuid)
mock_exec.assert_has_calls(expected_calls)
self.assertTrue(mock_gz.called)
def test_destroy_disk_metadata_get_dev_size_fail(self, mock_exec, mock_gz):
mock_gz.side_effect = processutils.ProcessExecutionError
expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])]
self.assertRaises(processutils.ProcessExecutionError,
utils.destroy_disk_metadata,
self.dev,
self.node_uuid)
mock_exec.assert_has_calls(expected_call)
def test_destroy_disk_metadata_dd_fail(self, mock_exec, mock_gz):
mock_exec.side_effect = processutils.ProcessExecutionError
expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])]
self.assertRaises(processutils.ProcessExecutionError,
utils.destroy_disk_metadata,
self.dev,
self.node_uuid)
mock_exec.assert_has_calls(expected_call)
self.assertFalse(mock_gz.called)
@mock.patch.object(common_utils, 'execute', autospec=True)
class GetDeviceBlockSizeTestCase(tests_base.TestCase):
def setUp(self):
super(GetDeviceBlockSizeTestCase, self).setUp()
self.dev = 'fake-dev'
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
def test_get_dev_block_size(self, mock_exec):
mock_exec.return_value = ("64", "")
expected_call = [mock.call('blockdev', '--getsz', self.dev,
run_as_root=True, check_exit_code=[0])]
utils.get_dev_block_size(self.dev)
mock_exec.assert_has_calls(expected_call)
@mock.patch.object(utils, 'dd', autospec=True)
@mock.patch.object(images, 'qemu_img_info', autospec=True)
@mock.patch.object(images, 'convert_image', autospec=True)
class PopulateImageTestCase(tests_base.TestCase):
def setUp(self):
super(PopulateImageTestCase, self).setUp()
def test_populate_raw_image(self, mock_cg, mock_qinfo, mock_dd):
type(mock_qinfo.return_value).file_format = mock.PropertyMock(
return_value='raw')
utils.populate_image('src', 'dst')
mock_dd.assert_called_once_with('src', 'dst')
self.assertFalse(mock_cg.called)
def test_populate_qcow2_image(self, mock_cg, mock_qinfo, mock_dd):
type(mock_qinfo.return_value).file_format = mock.PropertyMock(
return_value='qcow2')
utils.populate_image('src', 'dst')
mock_cg.assert_called_once_with('src', 'dst', 'raw', True)
self.assertFalse(mock_dd.called)
@mock.patch.object(utils, 'is_block_device', lambda d: True)
@mock.patch.object(utils, 'block_uuid', lambda p: 'uuid')
@mock.patch.object(utils, 'dd', lambda *_: None)
@mock.patch.object(images, 'convert_image', lambda *_: None)
@mock.patch.object(common_utils, 'mkfs', lambda *_: None)
# NOTE(dtantsur): destroy_disk_metadata resets file size, disabling it
@mock.patch.object(utils, 'destroy_disk_metadata', lambda *_: None)
class RealFilePartitioningTestCase(tests_base.TestCase):
"""This test applies some real-world partitioning scenario to a file.
This test covers the whole partitioning, mocking everything not possible
on a file. That helps us assure, that we do all partitioning math properly
and also conducts integration testing of DiskPartitioner.
"""
def setUp(self):
super(RealFilePartitioningTestCase, self).setUp()
# NOTE(dtantsur): no parted utility on gate-ironic-python26
try:
common_utils.execute('parted', '--version')
except OSError as exc:
self.skipTest('parted utility was not found: %s' % exc)
self.file = tempfile.NamedTemporaryFile(delete=False)
# NOTE(ifarkas): the file needs to be closed, so fuser won't report
# any usage
self.file.close()
# NOTE(dtantsur): 20 MiB file with zeros
common_utils.execute('dd', 'if=/dev/zero', 'of=%s' % self.file.name,
'bs=1', 'count=0', 'seek=20MiB')
@staticmethod
def _run_without_root(func, *args, **kwargs):
"""Make sure root is not required when using utils.execute."""
real_execute = common_utils.execute
def fake_execute(*cmd, **kwargs):
kwargs['run_as_root'] = False
return real_execute(*cmd, **kwargs)
with mock.patch.object(common_utils, 'execute', fake_execute):
return func(*args, **kwargs)
def test_different_sizes(self):
# NOTE(dtantsur): Keep this list in order with expected partitioning
fields = ['ephemeral_mb', 'swap_mb', 'root_mb']
variants = ((0, 0, 12), (4, 2, 8), (0, 4, 10), (5, 0, 10))
for variant in variants:
kwargs = dict(zip(fields, variant))
self._run_without_root(utils.work_on_disk, self.file.name,
ephemeral_format='ext4', node_uuid='',
image_path='path', **kwargs)
part_table = self._run_without_root(
disk_partitioner.list_partitions, self.file.name)
for part, expected_size in zip(part_table, filter(None, variant)):
self.assertEqual(expected_size, part['size'],
"comparison failed for %s" % list(variant))
def test_whole_disk(self):
# 6 MiB ephemeral + 3 MiB swap + 9 MiB root + 1 MiB for MBR
# + 1 MiB MAGIC == 20 MiB whole disk
# TODO(dtantsur): figure out why we need 'magic' 1 more MiB
# and why the is different on Ubuntu and Fedora (see below)
self._run_without_root(utils.work_on_disk, self.file.name,
root_mb=9, ephemeral_mb=6, swap_mb=3,
ephemeral_format='ext4', node_uuid='',
image_path='path')
part_table = self._run_without_root(
disk_partitioner.list_partitions, self.file.name)
sizes = [part['size'] for part in part_table]
# NOTE(dtantsur): parted in Ubuntu 12.04 will occupy the last MiB,
# parted in Fedora 20 won't - thus two possible variants for last part
self.assertEqual([6, 3], sizes[:2],
"unexpected partitioning %s" % part_table)
self.assertIn(sizes[2], (9, 10))
@mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
def test_fetch_images(self, mock_clean_up_caches):
mock_cache = mock.MagicMock(
spec_set=['fetch_image', 'master_dir'], master_dir='master_dir')
utils.fetch_images(None, mock_cache, [('uuid', 'path')])
mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
[('uuid', 'path')])
mock_cache.fetch_image.assert_called_once_with('uuid', 'path',
ctx=None,
force_raw=True)
@mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
def test_fetch_images_fail(self, mock_clean_up_caches):
exc = exception.InsufficientDiskSpace(path='a',
required=2,
actual=1)
mock_cache = mock.MagicMock(
spec_set=['master_dir'], master_dir='master_dir')
mock_clean_up_caches.side_effect = iter([exc])
self.assertRaises(exception.InstanceDeployFailure,
utils.fetch_images,
None,
mock_cache,
[('uuid', 'path')])
mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
[('uuid', 'path')])
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
class GetConfigdriveTestCase(tests_base.TestCase):
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy):
mock_requests.return_value = mock.MagicMock(
spec_set=['content'], content='Zm9vYmFy')
utils._get_configdrive('http://1.2.3.4/cd', 'fake-node-uuid')
mock_requests.assert_called_once_with('http://1.2.3.4/cd')
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive_base64_string(self, mock_gzip, mock_requests,
mock_copy):
utils._get_configdrive('Zm9vYmFy', 'fake-node-uuid')
self.assertFalse(mock_requests.called)
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
def test_get_configdrive_bad_url(self, mock_requests, mock_copy):
mock_requests.side_effect = requests.exceptions.RequestException
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive, 'http://1.2.3.4/cd',
'fake-node-uuid')
self.assertFalse(mock_copy.called)
@mock.patch.object(base64, 'b64decode', autospec=True)
def test_get_configdrive_base64_error(self, mock_b64, mock_requests,
mock_copy):
mock_b64.side_effect = TypeError
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive,
'malformed', 'fake-node-uuid')
mock_b64.assert_called_once_with('malformed')
self.assertFalse(mock_copy.called)
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests,
mock_copy):
mock_requests.return_value = mock.MagicMock(
spec_set=['content'], content='Zm9vYmFy')
mock_copy.side_effect = IOError
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive, 'http://1.2.3.4/cd',
'fake-node-uuid')
mock_requests.assert_called_once_with('http://1.2.3.4/cd')
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
class VirtualMediaDeployUtilsTestCase(db_base.DbTestCase):
def setUp(self):
super(VirtualMediaDeployUtilsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
info_dict = db_utils.get_test_ilo_info()
self.node = obj_utils.create_test_node(self.context,
driver='iscsi_ilo', driver_info=info_dict)
def test_get_single_nic_with_vif_port_id(self):
obj_utils.create_test_port(self.context, node_id=self.node.id,
address='aa:bb:cc', uuid=uuidutils.generate_uuid(),
extra={'vif_port_id': 'test-vif-A'}, driver='iscsi_ilo')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
address = utils.get_single_nic_with_vif_port_id(task)
self.assertEqual('aa:bb:cc', address)
class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):
def setUp(self):
super(ParseInstanceInfoCapabilitiesTestCase, self).setUp()
self.node = obj_utils.get_test_node(self.context, driver='fake')
def test_parse_instance_info_capabilities_string(self):
self.node.instance_info = {'capabilities': '{"cat": "meow"}'}
expected_result = {"cat": "meow"}
result = utils.parse_instance_info_capabilities(self.node)
self.assertEqual(expected_result, result)
def test_parse_instance_info_capabilities(self):
self.node.instance_info = {'capabilities': {"dog": "wuff"}}
expected_result = {"dog": "wuff"}
result = utils.parse_instance_info_capabilities(self.node)
self.assertEqual(expected_result, result)
def test_parse_instance_info_invalid_type(self):
self.node.instance_info = {'capabilities': 'not-a-dict'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_instance_info_capabilities, self.node)
def test_is_secure_boot_requested_true(self):
self.node.instance_info = {'capabilities': {"secure_boot": "tRue"}}
self.assertTrue(utils.is_secure_boot_requested(self.node))
def test_is_secure_boot_requested_false(self):
self.node.instance_info = {'capabilities': {"secure_boot": "false"}}
self.assertFalse(utils.is_secure_boot_requested(self.node))
def test_is_secure_boot_requested_invalid(self):
self.node.instance_info = {'capabilities': {"secure_boot": "invalid"}}
self.assertFalse(utils.is_secure_boot_requested(self.node))
def test_get_boot_mode_for_deploy_using_capabilities(self):
properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
self.node.properties = properties
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('uefi', result)
def test_get_boot_mode_for_deploy_using_instance_info_cap(self):
instance_info = {'capabilities': {'secure_boot': 'True'}}
self.node.instance_info = instance_info
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('uefi', result)
def test_get_boot_mode_for_deploy_using_instance_info(self):
instance_info = {'deploy_boot_mode': 'bios'}
self.node.instance_info = instance_info
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('bios', result)
class TrySetBootDeviceTestCase(db_base.DbTestCase):
def setUp(self):
super(TrySetBootDeviceTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake")
self.node = obj_utils.create_test_node(self.context, driver="fake")
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_okay(self, node_set_boot_device_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
utils.try_set_boot_device(task, boot_devices.DISK,
persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
@mock.patch.object(utils, 'LOG', autospec=True)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_ipmifailure_uefi(self,
node_set_boot_device_mock, log_mock):
self.node.properties = {'capabilities': 'boot_mode:uefi'}
self.node.save()
node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
utils.try_set_boot_device(task, boot_devices.DISK,
persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
log_mock.warning.assert_called_once_with(mock.ANY)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_ipmifailure_bios(
self, node_set_boot_device_mock):
node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.IPMIFailure,
utils.try_set_boot_device,
task, boot_devices.DISK, persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_some_other_exception(
self, node_set_boot_device_mock):
exc = exception.IloOperationError(operation="qwe", error="error")
node_set_boot_device_mock.side_effect = exc
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.IloOperationError,
utils.try_set_boot_device,
task, boot_devices.DISK, persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
class AgentCleaningTestCase(db_base.DbTestCase):
def setUp(self):
super(AgentCleaningTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_agent')
n = {'driver': 'fake_agent',
'driver_internal_info': {'agent_url': 'http://127.0.0.1:9999'}}
self.node = obj_utils.create_test_node(self.context, **n)
self.ports = [obj_utils.create_test_port(self.context,
node_id=self.node.id)]
self.clean_steps = {
'hardware_manager_version': '1',
'clean_steps': {
'GenericHardwareManager': [
{'interface': 'deploy',
'step': 'erase_devices',
'priority': 20},
],
'SpecificHardwareManager': [
{'interface': 'deploy',
'step': 'update_firmware',
'priority': 30},
{'interface': 'raid',
'step': 'create_raid',
'priority': 10},
]
}
}
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
autospec=True)
def test_get_clean_steps(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_result': self.clean_steps}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_get_clean_steps(task)
client_mock.assert_called_once_with(mock.ANY, task.node,
self.ports)
self.assertEqual('1', task.node.driver_internal_info[
'hardware_manager_version'])
# Since steps are returned in dicts, they have non-deterministic
# ordering
self.assertEqual(2, len(response))
self.assertIn(self.clean_steps['clean_steps'][
'GenericHardwareManager'][0], response)
self.assertIn(self.clean_steps['clean_steps'][
'SpecificHardwareManager'][0], response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
autospec=True)
def test_get_clean_steps_missing_steps(self, client_mock,
list_ports_mock):
del self.clean_steps['clean_steps']
client_mock.return_value = {
'command_result': self.clean_steps}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
self.assertRaises(exception.NodeCleaningFailure,
utils.agent_get_clean_steps,
task)
client_mock.assert_called_once_with(mock.ANY, task.node,
self.ports)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_status': 'SUCCEEDED'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step_running(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_status': 'RUNNING'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step_version_mismatch(self, client_mock,
list_ports_mock):
client_mock.return_value = {
'command_status': 'RUNNING'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch.object(utils, 'is_block_device', autospec=True)
@mock.patch.object(utils, 'login_iscsi', lambda *_: None)
@mock.patch.object(utils, 'discovery', lambda *_: None)
@mock.patch.object(utils, 'logout_iscsi', lambda *_: None)
@mock.patch.object(utils, 'delete_iscsi', lambda *_: None)
@mock.patch.object(utils, 'get_dev', lambda *_: '/dev/fake')
class ISCSISetupAndHandleErrorsTestCase(tests_base.TestCase):
def test_no_parent_device(self, mock_ibd):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
mock_ibd.return_value = False
expected_dev = '/dev/fake'
with testtools.ExpectedException(exception.InstanceDeployFailure):
with utils._iscsi_setup_and_handle_errors(
address, port, iqn, lun) as dev:
self.assertEqual(expected_dev, dev)
mock_ibd.assert_called_once_with(expected_dev)
def test_parent_device_yield(self, mock_ibd):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
expected_dev = '/dev/fake'
mock_ibd.return_value = True
with utils._iscsi_setup_and_handle_errors(address, port,
iqn, lun) as dev:
self.assertEqual(expected_dev, dev)
mock_ibd.assert_called_once_with(expected_dev)
| apache-2.0 | -3,904,300,071,440,802,000 | 42.30977 | 79 | 0.553046 | false |
mesheven/pyOCD | pyocd/target/target_MKL43Z256xxx4.py | 1 | 6800 | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013,2018 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .family.target_kinetis import Kinetis
from .family.flash_kinetis import Flash_Kinetis
from ..core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ..debug.svd import SVDFile
import logging
FLASH_ALGO = { 'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x09032200, 0xd373428b, 0x428b0a03, 0x0b03d358, 0xd33c428b, 0x428b0c03, 0xe012d321, 0x430b4603,
0x2200d47f, 0x428b0843, 0x0903d374, 0xd35f428b, 0x428b0a03, 0x0b03d344, 0xd328428b, 0x428b0c03,
0x22ffd30d, 0xba120209, 0x428b0c03, 0x1212d302, 0xd0650209, 0x428b0b03, 0xe000d319, 0x0bc30a09,
0xd301428b, 0x1ac003cb, 0x0b834152, 0xd301428b, 0x1ac0038b, 0x0b434152, 0xd301428b, 0x1ac0034b,
0x0b034152, 0xd301428b, 0x1ac0030b, 0x0ac34152, 0xd301428b, 0x1ac002cb, 0x0a834152, 0xd301428b,
0x1ac0028b, 0x0a434152, 0xd301428b, 0x1ac0024b, 0x0a034152, 0xd301428b, 0x1ac0020b, 0xd2cd4152,
0x428b09c3, 0x01cbd301, 0x41521ac0, 0x428b0983, 0x018bd301, 0x41521ac0, 0x428b0943, 0x014bd301,
0x41521ac0, 0x428b0903, 0x010bd301, 0x41521ac0, 0x428b08c3, 0x00cbd301, 0x41521ac0, 0x428b0883,
0x008bd301, 0x41521ac0, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41, 0x41524601, 0x47704610,
0x0fcae05d, 0x4249d000, 0xd3001003, 0x40534240, 0x469c2200, 0x428b0903, 0x0a03d32d, 0xd312428b,
0x018922fc, 0x0a03ba12, 0xd30c428b, 0x11920189, 0xd308428b, 0x11920189, 0xd304428b, 0xd03a0189,
0xe0001192, 0x09c30989, 0xd301428b, 0x1ac001cb, 0x09834152, 0xd301428b, 0x1ac0018b, 0x09434152,
0xd301428b, 0x1ac0014b, 0x09034152, 0xd301428b, 0x1ac0010b, 0x08c34152, 0xd301428b, 0x1ac000cb,
0x08834152, 0xd301428b, 0x1ac0008b, 0xd2d94152, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41,
0x46634601, 0x105b4152, 0xd3014610, 0x2b004240, 0x4249d500, 0x46634770, 0xd300105b, 0xb5014240,
0x46c02000, 0xbd0246c0, 0xb510480a, 0x44484908, 0xf8ecf000, 0xd1042800, 0x21004806, 0xf0004448,
0x4a05f9b1, 0x230168d1, 0x4319029b, 0xbd1060d1, 0x6b65666b, 0x00000004, 0xf0003000, 0x4c0cb570,
0x444c4605, 0x4b0b4601, 0x68e24620, 0xf894f000, 0xd1052800, 0x46292300, 0x68e24620, 0xf956f000,
0x68ca4905, 0x029b2301, 0x60ca431a, 0x0000bd70, 0x00000004, 0x6b65666b, 0xf0003000, 0x4905b510,
0x60082000, 0x44484804, 0xf8e8f000, 0xd0002800, 0xbd102001, 0x40048100, 0x00000004, 0x460cb570,
0x4606460b, 0x480d4601, 0x4615b084, 0xf0004448, 0x2800f8f5, 0x9001d10a, 0x21019002, 0x91004807,
0x4622462b, 0x44484631, 0xf96af000, 0x68ca4904, 0x029b2301, 0x60ca431a, 0xbd70b004, 0x00000004,
0xf0003000, 0x47702000, 0xd0032800, 0xd801290f, 0xd0012a04, 0x47702004, 0x47702000, 0xd1012800,
0x47702004, 0x1e5bb410, 0x421c460c, 0x421ad101, 0xbc10d002, 0x47702065, 0x428b6803, 0x6840d804,
0x18181889, 0xd2024288, 0x2066bc10, 0xbc104770, 0x47702000, 0x42884903, 0x206bd001, 0x20004770,
0x00004770, 0x6b65666b, 0x2170480a, 0x21807001, 0x78017001, 0xd5fc0609, 0x06817800, 0x2067d501,
0x06c14770, 0x2068d501, 0x07c04770, 0x2069d0fc, 0x00004770, 0x40020000, 0x4605b5f8, 0x460c4616,
0xf7ff4618, 0x2800ffd7, 0x2304d12b, 0x46214632, 0xf7ff4628, 0x0007ffb3, 0x19a6d123, 0x68e91e76,
0x91004630, 0xfe3cf7ff, 0xd0032900, 0x1c409e00, 0x1e764346, 0xd81342b4, 0x4478480a, 0x60046800,
0x20094909, 0xf7ff71c8, 0x4607ffbf, 0x280069a8, 0x4780d000, 0xd1032f00, 0x190468e8, 0xd9eb42b4,
0xbdf84638, 0x0000026a, 0x40020000, 0x4604b510, 0xf7ff4608, 0x2800ff9f, 0x2c00d106, 0x4904d005,
0x71c82044, 0xffa0f7ff, 0x2004bd10, 0x0000bd10, 0x40020000, 0xd00c2800, 0xd00a2a00, 0xd21a2908,
0x447b000b, 0x18db791b, 0x0705449f, 0x0d0b0907, 0x2004110f, 0x68c04770, 0x6840e00a, 0x6880e008,
0x6800e006, 0x2000e004, 0x6900e002, 0x6940e000, 0x20006010, 0x206a4770, 0x00004770, 0xd0142800,
0x68c9490c, 0x0e094a0c, 0x447a0049, 0x03095a51, 0x2200d00d, 0x60416002, 0x60812102, 0x61426102,
0x61820249, 0x461060c1, 0x20044770, 0x20644770, 0x00004770, 0x40048040, 0x0000019a, 0xd1012a00,
0x47702004, 0x461cb5ff, 0x4615b081, 0x2304460e, 0x98014622, 0xff22f7ff, 0xd1190007, 0xd0162c00,
0x4478480c, 0x600e6801, 0x6800cd02, 0x490a6041, 0x71c82006, 0xff38f7ff, 0x98014607, 0x28006980,
0x4780d000, 0xd1022f00, 0x1f241d36, 0x4638d1e8, 0xbdf0b005, 0x00000162, 0x40020000, 0xd0022800,
0x20006181, 0x20044770, 0x00004770, 0xb081b5ff, 0x460e4614, 0x23044605, 0xfef0f7ff, 0xd12a2800,
0x686868a9, 0xfd7cf7ff, 0x42719000, 0x40014240, 0x42b7424f, 0x9800d101, 0x2c00183f, 0x1bbdd01a,
0xd90042a5, 0x490d4625, 0x447908a8, 0x600e6809, 0x2201490b, 0x0a0271ca, 0x728872ca, 0x72489804,
0xfef2f7ff, 0xd1062800, 0x1b649800, 0x183f1976, 0xd1e42c00, 0xb0052000, 0x0000bdf0, 0x000000da,
0x40020000, 0xd1012800, 0x47702004, 0x4803b510, 0x71c22240, 0xf7ff7181, 0xbd10fed7, 0x40020000,
0xd1012b00, 0x47702004, 0x461cb5f8, 0x460e4615, 0x9f082304, 0xfea2f7ff, 0xd1192800, 0xd0172d00,
0x447a4a0f, 0x60066810, 0x2102480e, 0x990671c1, 0x681172c1, 0x60886820, 0xfeb6f7ff, 0xd0082800,
0x29009907, 0x600ed000, 0xd0012f00, 0x60392100, 0x1f2dbdf8, 0x1d361d24, 0xd1e12d00, 0x0000bdf8,
0x00000062, 0x40020000, 0x00040002, 0x00080000, 0x00100000, 0x00200000, 0x00400000, 0x00000000,
0x00000000, 0x00400000, 0x40020004, 0x00000000,
],
'pc_init' : 0x2000027D,
'pc_unInit': 0x200002E5,
'pc_program_page': 0x2000029D,
'pc_erase_sector': 0x2000023D,
'pc_eraseAll' : 0x20000209,
'static_base' : 0x20000000 + 0x00000020 + 0x0000060c,
'begin_stack' : 0x20000000 + 0x00000800,
'begin_data' : 0x20000000 + 0x00000A00,
'page_buffers' : [0x20000a00, 0x20001200], # Enable double buffering
'min_program_length' : 4,
'analyzer_supported' : True,
'analyzer_address' : 0x20002000
};
class KL43Z4(Kinetis):
memoryMap = MemoryMap(
FlashRegion( start=0, length=0x40000, blocksize=0x400, is_boot_memory=True,
algo=FLASH_ALGO, flash_class=Flash_Kinetis),
RamRegion( start=0x1fffe000, length=0x8000)
)
def __init__(self, transport):
super(KL43Z4, self).__init__(transport, self.memoryMap)
self._svd_location = SVDFile(vendor="Freescale", filename="MKL43Z4.svd")
| apache-2.0 | 9,023,858,836,470,273,000 | 64.384615 | 101 | 0.769559 | false |
samdsmx/omegaup | stuff/browser_analytics.py | 2 | 7171 | #!/usr/bin/python3
"""Analyze browser usage from Google Analytics.
In order to use this tool, export a .csv report of browsers (Audience >
Technology > Browser & OS), with Secondary dimension of Browser Version.
The mappings of some browser versions to their equivalent Chromium version may
need to be maintained every now and then.
"""
import argparse
import collections
import csv
import dataclasses
from typing import Callable, DefaultDict, List, Sequence, TextIO, Tuple
@dataclasses.dataclass
class Browser:
"""A Browser version"""
name: str = ''
version: str = ''
users: int = 0
users_share: float = 0
def _parse_report(report: TextIO,
column: str) -> Tuple[Browser, List[Browser]]:
# pylint: disable=too-many-branches,too-many-statements
csv_lines: List[str] = []
# Strip the header. It consists of a series of lines that start with #
# followed by an empty line.
for line in report:
if line.strip():
continue
break
# Parse the contents.
for line in report:
line = line.strip()
if not line:
break
csv_lines.append(line)
browser_mapping: DefaultDict[Tuple[str, str],
Browser] = collections.defaultdict(Browser)
reader = csv.DictReader(csv_lines)
totals = Browser(name='Total', users_share=1.)
for row in reader:
version = row['Browser Version'].split('.')[0]
if not version.isnumeric():
version = ''
name = row['Browser']
if name == 'Edge' and version >= '79':
# Edge started using Chromium since version 79.
name = 'Chrome'
elif name == 'Android Webview' and version >= '36':
# Android started using Chromium since Lollipop / version 36.
name = 'Chrome'
elif name == 'UC Browser':
chromium_version_mapping = {
'12': '57',
}
if version in chromium_version_mapping:
name = 'Chrome'
version = chromium_version_mapping[version]
elif name == 'Samsung Internet':
chromium_version_mapping = {
'4': '44',
'5': '51',
'6': '56',
'7': '59',
'8': '63',
'9': '67',
'10': '71',
'11': '75',
'12': '79',
}
if version in chromium_version_mapping:
name = 'Chrome'
version = chromium_version_mapping[version]
elif name == 'Opera':
chromium_version_mapping = {
'47': '48',
'50': '63',
'51': '64',
'52': '65',
'53': '66',
'54': '67',
'55': '68',
'56': '69',
'57': '70',
'58': '71',
'59': '72',
'60': '73',
'61': '74',
'62': '75',
'63': '76',
'64': '77',
'65': '78',
'66': '79',
'67': '80',
'68': '80',
'69': '83',
}
if version in chromium_version_mapping:
name = 'Chrome'
version = chromium_version_mapping[version]
elif name == 'YaBrowser':
chromium_version_mapping = {
'20': '83',
}
if version in chromium_version_mapping:
name = 'Chrome'
version = chromium_version_mapping[version]
elif name == 'Safari':
# Some versions of Safari report the WebKit version, not the Safari
# one.
if version == '602':
version = '10'
if version == '604':
version = '11'
if version == '605':
version = '11'
key = (name, version)
if key == ('', ''):
# This is the totals row
continue
value = int(row[column].replace(',', ''))
browser_mapping[key].users += value
totals.users += value
for (name, version), browser in browser_mapping.items():
browser.name = name
browser.version = version
browser.users_share = browser.users / totals.users
return totals, list(browser_mapping.values())
def _is_filtered(browser: Browser, ignore: Sequence[str]) -> bool:
for descriptor in ignore:
op_mapping: Sequence[Tuple[str, Callable[[int, int], bool]]] = (
('<=', lambda a, b: a <= b),
('=', lambda a, b: a == b),
('<', lambda a, b: a < b),
)
for op, fn in op_mapping:
if op not in descriptor:
continue
name, version = descriptor.split(op)
if browser.name == name and fn(int(browser.version), int(version)):
return True
if browser.name == descriptor:
return True
return False
def _main() -> None:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--ignore',
default=[
'Android Browser',
'Android Runtime',
'Android Webview<36',
'Chrome<51',
'Firefox<68',
'Hexometer',
'Internet Explorer',
'Opera Mini',
'Safari<12',
'Samsung Internet<4',
'[FBAN',
],
type=str,
nargs='*',
help='Ignore browser')
parser.add_argument('--column', default='Users')
parser.add_argument('--sort-by-share', action='store_true')
parser.add_argument('report',
type=argparse.FileType('r'),
metavar='REPORT.CSV',
help='An exported .csv from Google Analytics')
args = parser.parse_args()
totals, browsers = _parse_report(args.report, args.column)
if args.sort_by_share:
browsers.sort(key=lambda b: b.users, reverse=True)
else:
browsers.sort(key=lambda b: (b.name, b.version))
cumulative = 0.
print(f'{"Browser name":20} {"Version":7} '
f'{"Users":>6} {"Share%":>7} {"Cmltiv%":>7} ')
print('=' * 51)
for browser in browsers:
if _is_filtered(browser, args.ignore):
continue
cumulative += browser.users
print(f'{browser.name:20} {browser.version:>7} '
f'{browser.users:6} '
f'{browser.users_share*100:6.2f}% '
f'{cumulative/totals.users*100:6.2f}%')
print('=' * 51)
print(f'{totals.name:20} {totals.version:>7} '
f'{totals.users:6} '
f'{totals.users_share*100:6.2f}% '
f'{cumulative/totals.users*100:6.2f}%')
if __name__ == '__main__':
_main()
| bsd-3-clause | -5,068,709,898,575,506,000 | 32.509346 | 79 | 0.473714 | false |
MariusCC/packstack | packstack/plugins/openstack_client_400.py | 1 | 2747 | """
Installs and configures an OpenStack Client
"""
import logging
from packstack.installer import validators
from packstack.installer import basedefs, output_messages
from packstack.installer import utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
# Controller object will be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-CLIENT"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding OpenStack Client configuration")
paramsList = [
{"CMD_OPTION" : "osclient-host",
"USAGE" : "The IP address of the server on which to install the OpenStack client packages. An admin \"rc\" file will also be installed",
"PROMPT" : "Enter the IP address of the client server",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_OSCLIENT_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "NOVACLIENT",
"DESCRIPTION" : "NOVACLIENT Config parameters",
"PRE_CONDITION" : "CONFIG_CLIENT_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def initSequences(controller):
if controller.CONF['CONFIG_CLIENT_INSTALL'] != 'y':
return
osclientsteps = [
{'title': 'Adding OpenStack Client manifest entries', 'functions':[createmanifest]}
]
controller.addSequence("Installing OpenStack Client", [], [], osclientsteps)
def createmanifest():
client_host = controller.CONF['CONFIG_OSCLIENT_HOST'].strip()
manifestfile = "%s_osclient.pp" % client_host
manifestdata = getManifestTemplate("openstack_client.pp")
appendManifestFile(manifestfile, manifestdata)
server = utils.ScriptRunner(client_host)
server.append('echo $HOME')
rc, root_home = server.execute()
msg = ("To use the command line tools you need to source the file "
"%s/keystonerc_admin created on %s")
controller.MESSAGES.append(msg % (root_home.strip(), client_host))
| apache-2.0 | -7,500,177,406,174,176,000 | 37.152778 | 165 | 0.603568 | false |
pFernbach/hpp-rbprm-corba | script/scenarios/sandbox/dynamic/talos_table.py | 1 | 5194 | from hpp.corbaserver.rbprm.rbprmbuilder import Builder
from hpp.corbaserver.rbprm.rbprmfullbody import FullBody
from hpp.gepetto import Viewer
import time
from hpp.corbaserver import ProblemSolver
from hpp.corbaserver.rbprm.rbprmstate import State,StateHelper
import time
rLegId = 'talos_rleg_rom'
rLeg = 'leg_right_1_joint'
rFoot = 'leg_right_6_joint'
lLegId = 'talos_lleg_rom'
lLeg = 'leg_left_1_joint'
lFoot = 'leg_left_6_joint'
rArmId = 'talos_rarm_rom'
rArm = 'arm_right_1_joint'
rHand = 'arm_right_7_joint'
lArmId = 'talos_larm_rom'
lArm = 'arm_left_1_joint'
lHand = 'arm_left_7_joint'
packageName = "talos_data"
meshPackageName = "talos_data"
rootJointType = "freeflyer"
urdfName = "talos"
urdfSuffix = "_reduced"
srdfSuffix = ""
fullBody = FullBody ()
fullBody.loadFullBodyModel(urdfName, rootJointType, meshPackageName, packageName, urdfSuffix, srdfSuffix)
fullBody.setJointBounds ("root_joint", [-5,5, -1.5, 1.5, 0.95, 1.05])
ps = ProblemSolver( fullBody )
from hpp.gepetto import ViewerFactory
vf = ViewerFactory (ps)
vf.loadObstacleModel ("hpp-rbprm-corba", "table_140_70_73", "planning")
q_ref = [
0.0, 0.0, 1.0232773, 0.0 , 0.0, 0.0, 1, #Free flyer
0.0, 0.0, -0.411354, 0.859395, -0.448041, -0.001708, #Left Leg
0.0, 0.0, -0.411354, 0.859395, -0.448041, -0.001708, #Right Leg
0.0 , 0.006761, #Chest
0.25847 , 0.173046, -0.0002, -0.525366, 0.0, -0.0, 0.1,-0.005, #Left Arm
-0.25847 , -0.173046, 0.0002 , -0.525366, 0.0, 0.0, 0.1,-0.005,#Right Arm
0., 0.]; # head
q_init = q_ref[::]
fullBody.setReferenceConfig(q_ref)
tStart = time.time()
# generate databases :
nbSamples = 1000
rLegOffset = [0,-0.00018,-0.107]
rLegOffset[2] += 0.006
rLegNormal = [0,0,1]
rLegx = 0.1; rLegy = 0.06
fullBody.addLimb(rLegId,rLeg,rFoot,rLegOffset,rLegNormal, rLegx, rLegy, nbSamples, "static", 0.01)
fullBody.runLimbSampleAnalysis(rLegId, "ReferenceConfiguration", True)
#fullBody.saveLimbDatabase(rLegId, "./db/talos_rLeg_walk.db")
lLegOffset = [0,-0.00018,-0.107]
lLegOffset[2] += 0.006
lLegNormal = [0,0,1]
lLegx = 0.1; lLegy = 0.06
fullBody.addLimb(lLegId,lLeg,lFoot,lLegOffset,rLegNormal, lLegx, lLegy, nbSamples, "static", 0.01)
fullBody.runLimbSampleAnalysis(lLegId, "ReferenceConfiguration", True)
#fullBody.saveLimbDatabase(rLegId, "./db/talos_lLeg_walk.db")
#rArmOffset = [0.055,-0.04,-0.13]
rArmOffset = [-0.01,0.,-0.154]
rArmNormal = [0,0,1]
rArmx = 0.005; rArmy = 0.005
fullBody.addLimb(rArmId,rArm,rHand,rArmOffset,rArmNormal, rArmx, rArmy, nbSamples, "EFORT", 0.01)
fullBody.runLimbSampleAnalysis(rArmId, "ReferenceConfiguration", True)
"""
lArmOffset = [0.055,0.04,-0.13]
lArmNormal = [0,0,1]
lArmx = 0.02; lArmy = 0.02
fullBody.addLimb(larmId,larm,lHand,lArmOffset,lArmNormal, lArmx, lArmy, nbSamples, "EFORT", 0.01)
fullBody.runLimbSampleAnalysis(larmId, "ReferenceConfiguration", True)
"""
tGenerate = time.time() - tStart
print "generate databases in : "+str(tGenerate)+" s"
v = vf.createViewer(displayCoM=True)
v(q_init)
v.addLandmark(v.sceneName,0.5)
v.addLandmark('talos/arm_right_7_link',0.1)
q_init[0:2] = [-0.5,0.8]
q_init[32] -=1.5 # right elbow
v(q_init)
# sphere = target
from display_tools import *
createSphere('target',v,size=0.05,color=v.color.red)
v.client.gui.setVisibility('target','ON')
moveSphere('target',v,[0,0,0.5])
# create contact :
fullBody.setStartState(q_init,[lLegId,rLegId])
q_ref[0:3] = q_init[0:3]
sref = State(fullBody,q=q_ref,limbsIncontact=[lLegId,rLegId])
s0 = State(fullBody,q=q_init,limbsIncontact=[lLegId,rLegId])
createSphere('s',v)
p0 = [-0.25,0.5,0.75]
#p1 = [-0,0.45,0.8]
p = [0.1,0.5,0.75]
moveSphere('s',v,p)
s0_bis,success = StateHelper.addNewContact(sref,rArmId,p0,[0,0,1])
#s0_bis2,success = StateHelper.addNewContact(s0_bis,rArmId,p1,[0,0,1])
s1,success = StateHelper.addNewContact(s0_bis,rArmId,p,[0,0,1])
assert(success)
v(s1.q())
#project com
v(q_init)
com_i = fullBody.getCenterOfMass()
com_i[2] -= 0.03
com_i[0] += 0.06
createSphere("com",v)
moveSphere("com",v,com_i)
s1.projectToCOM(com_i)
v(s1.q())
s1_feet = State(fullBody,q=s1.q(),limbsIncontact=[lLegId,rLegId])
s2,success = StateHelper.addNewContact(s0_bis,rArmId,p,[0,0,1])
com=s2.getCenterOfMass()
#com[0] += 0.03
com[1] -= 0.06
com[2] -= 0.02
moveSphere("com",v,com)
s2.projectToCOM(com)
v(s2.q())
q3=q_init[::]
q3[20]=1.2
s3_0 = State(fullBody,q=q3,limbsIncontact=[lLegId,rLegId])
s3,success = StateHelper.addNewContact(s3_0,rArmId,p,[0,0,1])
assert(success)
com=s3.getCenterOfMass()
#com[0] += 0.03
com[1] -= 0.1
com[2] -= 0.02
moveSphere("com",v,com)
s3.projectToCOM(com)
v(s3.q())
"""
jointsName = [rFoot,lFoot,rHand]
contactPos = []
contactNormal = []
pn = s1.getCenterOfContactForLimb(rLegId)
contactPos += [pn[0]]
contactNormal += [pn[1]]
pn = s1.getCenterOfContactForLimb(lLegId)
contactPos += [pn[0]]
contactNormal += [pn[1]]
pn = s1.getCenterOfContactForLimb(rArmId)
contactPos += [pn[0]]
contactNormal += [pn[1]]
ps.client.problem.createStaticStabilityConstraint ('staticStability',
jointsName, contactPos, contactNormal,'root_joint')
"""
q2 = q_ref[::]
| lgpl-3.0 | -497,338,689,015,721,860 | 27.075676 | 105 | 0.684636 | false |
renatopp/psi-robotics | psi/engine/render_batch.py | 1 | 4484 | # =============================================================================
# Federal University of Rio Grande do Sul (UFRGS)
# Connectionist Artificial Intelligence Laboratory (LIAC)
# Renato de Pontes Pereira - [email protected]
# =============================================================================
# Copyright (c) 2011 Renato de Pontes Pereira, renato.ppontes at gmail dot com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
import psi
import numpy as np
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
from OpenGL.arrays.vbo import VBO
__all__ = ['RenderBatch', 'RenderBatchOpt']
class RenderBatch(object):
def __init__(self, draw_type=GL_QUADS):
self.count = 0
self.color_data = []
self.position_data = []
self.color_buffer = VBO(np.array([]))
self.position_buffer = VBO(np.array([]))
self.draw_type = draw_type
def draw2d(self, points, color=(0, 0, 0, 1), rotation=0, center=(0, 0)):
n = len(points)
self.count += n
if not isinstance(color[0], (tuple, list)):
color = [color]*n
if rotation:
transform = psi.calc.rotation_matrix(rotation)
temp = np.array(points) - center
temp = transform.dot(temp.T).T + center
points = temp.tolist()
self.color_data.extend(color)
self.position_data.extend(points)
def clear(self):
self.position_data = []
self.color_data = []
self.count = 0
def render(self):
self.color_buffer.set_array(np.array(self.color_data, dtype='float32'))
self.position_buffer.set_array(np.array(self.position_data, dtype='float32'))
self.color_buffer.bind()
glColorPointer(4, GL_FLOAT, 0, self.color_buffer)
self.position_buffer.bind()
glVertexPointer(2, GL_FLOAT, 0, self.position_buffer)
glEnableClientState(GL_VERTEX_ARRAY)
glEnableClientState(GL_COLOR_ARRAY)
glDrawArrays(self.draw_type, 0, self.count)
glDisableClientState(GL_COLOR_ARRAY)
glDisableClientState(GL_VERTEX_ARRAY)
class RenderBatchOpt(object):
def __init__(self, draw_type=GL_QUADS):
self.count = 0
self.color_buffer = VBO(np.array([]))
self.vertex_buffer = VBO(np.array([]))
self.draw_type = draw_type
def draw2d(self, points, color=(0, 0, 0, 1), rotation=0, center=(0, 0)):
n = points.shape[0]
self.count += n
if rotation:
transform = psi.calc.rotation_matrix(rotation)
temp = points - center
temp = transform.dot(temp.T).T + center
points = temp.tolist()
self.color_buffer.set_array(color)
self.vertex_buffer.set_array(points)
def clear(self):
self.color_buffer.set_array(np.array([]))
self.vertex_buffer.set_array(np.array([]))
self.count = 0
def render(self):
self.color_buffer.bind()
glColorPointer(4, GL_FLOAT, 0, self.color_buffer)
self.vertex_buffer.bind()
glVertexPointer(2, GL_FLOAT, 0, self.vertex_buffer)
glEnableClientState(GL_VERTEX_ARRAY)
glEnableClientState(GL_COLOR_ARRAY)
glDrawArrays(self.draw_type, 0, self.count)
glDisableClientState(GL_COLOR_ARRAY)
glDisableClientState(GL_VERTEX_ARRAY) | mit | -5,479,215,739,809,386,000 | 34.595238 | 85 | 0.622212 | false |
hfeeki/transifex | transifex/txcommon/listeners.py | 1 | 1502 | from django.conf import settings
from django.contrib.auth.models import Group, SiteProfileNotAvailable
from django.core.exceptions import ImproperlyConfigured
from django.db import models, transaction
from transifex.txcommon.log import logger
if not settings.AUTH_PROFILE_MODULE:
raise SiteProfileNotAvailable
try:
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
Profile = models.get_model(app_label, model_name)
except (ImportError, ImproperlyConfigured):
raise SiteProfileNotAvailable
if not Profile:
raise SiteProfileNotAvailable
@transaction.commit_manually
def add_user_to_registered_group(sender, **kwargs):
"""
Add any user created on the system to the `registered` group.
This signal must be called by the post_save signal from the User class.
This signal also creates a public profile for the user if it does not exist.
"""
if 'created' in kwargs and kwargs['created'] is True:
user = kwargs['instance']
# Add user to registered group
group, created = Group.objects.get_or_create(name='registered')
user.groups.add(group)
sid = transaction.savepoint()
# Create Public Profile
try:
profile, created = Profile.objects.get_or_create(user=user)
profile.save()
transaction.savepoint_commit(sid)
except:
logger.debug("User profile not created.")
transaction.savepoint_rollback(sid)
transaction.commit()
| gpl-2.0 | 5,225,806,281,291,356,000 | 33.136364 | 80 | 0.703728 | false |
mkreider/cocotb2 | cocotb/__init__.py | 2 | 6072 | ''' Copyright (c) 2013 Potential Ventures Ltd
Copyright (c) 2013 SolarFlare Communications Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Potential Ventures Ltd,
SolarFlare Communications Inc nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''
"""
Cocotb is a coroutine, cosimulation framework for writing testbenches in Python.
See http://cocotb.readthedocs.org for full documentation
"""
import os
import sys
import logging
import threading
import random
import time
import cocotb.handle
from cocotb.scheduler import Scheduler
from cocotb.log import SimLogFormatter, SimBaseLog, SimLog
from cocotb.regression import RegressionManager
# Things we want in the cocotb namespace
from cocotb.decorators import test, coroutine, function, external
# Singleton scheduler instance
# NB this cheekily ensures a singleton since we're replacing the reference
# so that cocotb.scheduler gives you the singleton instance and not the
# scheduler package
# GPI logging instance
# For autodocumentation don't need the extension modules
if "SPHINX_BUILD" not in os.environ:
logging.basicConfig()
logging.setLoggerClass(SimBaseLog)
log = SimLog('cocotb')
level = os.getenv("COCOTB_LOG_LEVEL", "INFO")
try:
_default_log = getattr(logging, level)
except AttributeError as e:
log.error("Unable to set loging level to %s" % level)
_default_log = logging.INFO
log.setLevel(_default_log)
loggpi = SimLog('cocotb.gpi')
scheduler = Scheduler()
regression = None
plusargs = {}
# To save typing provide an alias to scheduler.add
fork = scheduler.add
# FIXME is this really required?
_rlock = threading.RLock()
def mem_debug(port):
import cocotb.memdebug
cocotb.memdebug.start(port)
def _initialise_testbench(root_name):
"""
This function is called after the simulator has elaborated all
entities and is ready to run the test.
The test must be defined by the environment variables
MODULE
TESTCASE
"""
_rlock.acquire()
memcheck_port = os.getenv('MEMCHECK')
if memcheck_port is not None:
mem_debug(int(memcheck_port))
exec_path = os.getenv('SIM_ROOT')
if exec_path is None:
exec_path = 'Unknown'
version = os.getenv('VERSION')
if version is None:
log.info("Unable to determine Cocotb version from %s" % exec_path)
else:
log.info("Running tests with Cocotb v%s from %s" %
(version, exec_path))
# Create the base handle type
process_plusargs()
# Seed the Python random number generator to make this repeatable
seed = os.getenv('RANDOM_SEED')
if seed is None:
if 'ntb_random_seed' in plusargs:
seed = eval(plusargs['ntb_random_seed'])
elif 'seed' in plusargs:
seed = eval(plusargs['seed'])
else:
seed = int(time.time())
log.info("Seeding Python random module with %d" % (seed))
else:
seed = int(seed)
log.info("Seeding Python random module with supplied seed %d" % (seed))
random.seed(seed)
module_str = os.getenv('MODULE')
test_str = os.getenv('TESTCASE')
if not module_str:
raise ImportError("Environment variables defining the module(s) to \
execute not defined. MODULE=\"%s\"\"" % (module_str))
modules = module_str.split(',')
global regression
regression = RegressionManager(root_name, modules, tests=test_str)
regression.initialise()
regression.execute()
_rlock.release()
return True
def _sim_event(level, message):
"""Function that can be called externally to signal an event"""
SIM_INFO = 0
SIM_TEST_FAIL = 1
SIM_FAIL = 2
from cocotb.result import TestFailure, SimFailure
if level is SIM_TEST_FAIL:
scheduler.log.error("Failing test at simulator request")
scheduler.finish_test(TestFailure("Failure from external source: %s" %
message))
elif level is SIM_FAIL:
# We simply return here as the simulator will exit
# so no cleanup is needed
msg = ("Failing test at simulator request before test run completion: "
"%s" % message)
scheduler.log.error(msg)
scheduler.finish_scheduler(SimFailure(msg))
else:
scheduler.log.error("Unsupported sim event")
return True
def process_plusargs():
global plusargs
plusargs = {}
for option in cocotb.argv:
if option.startswith('+'):
if option.find('=') != -1:
(name, value) = option[1:].split('=')
plusargs[name] = value
else:
plusargs[option[1:]] = True
| bsd-3-clause | 391,953,007,439,274,500 | 31.126984 | 80 | 0.687747 | false |
Esri/solutions-raster-functions | scripts/WindChillNonUV.py | 1 | 6467 | # ----------------------------------------------------------------------------------
# Copyright 2015 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------------
# Name: Windchill_non_uv
# Description: Raster function that calculates wind chill using a single variable for windspeed.
# Date Edited: 24/03/2015
#-----------------------------------------------------------------------------------
import numpy as np
class Windchill_non_uv():
def __init__(self):
self.name = "Wind Chill Function"
self.description = "This function computes wind chill on the Fahrenheit scale given wind speed and air temperature."
self.tempunits = "celsius"
self.windunits = "mps"
def getParameterInfo(self):
return [
{
'name': 'temperature', # Needs to be edited by user to match name of varaiable in their dataset
'dataType': 'raster',
'value': None,
'required': True,
'displayName': "Temperature Raster",
'description': "A single-band raster where pixel values represent ambient air temperature in Fahrenheit."
},
{
'name': 'units',
'dataType': 'string',
'value': 'Kelvin', # Needs to be edited by the user to match what their units are for the temperature variable.
'required': True,
'domain': ('Celsius', 'Fahrenheit', 'Kelvin'),
'displayName': "Temperature Measured In",
'description': "The unit of measurement associated with the temperature raster."
},
{
'name': 'units2',
'dataType': 'string',
'value': 'mps', # Needs to be edited by the user to match what their units are for the wind speed variable.
'required': True,
'domain': ('mps', 'mph', 'kmph', 'knots'),
'displayName': "Temperature Measured In",
'description': "The unit of measurement associated with the temperature raster."
},
{
'name': 'ws', # Needs to be edited by user to match name of varaiable in their dataset
'dataType': 'raster',
'value': None,
'required': True,
'displayName': "Wind-speed Raster",
'description': "A single-band raster where pixel values represent wind speed measured in miles per hour."
},
]
def getConfiguration(self, **scalars):
return {
'inheritProperties': 4 | 8, # inherit all but the pixel type and NoData from the input raster
'invalidateProperties': 2 | 4 | 8, # invalidate statistics & histogram on the parent dataset because we modify pixel values.
'inputMask': False # Don't need input raster mask in .updatePixels(). Simply use the inherited NoData.
}
def updateRasterInfo(self, **kwargs):
kwargs['output_info']['bandCount'] = 1 # output is a single band raster
kwargs['output_info']['statistics'] = ({'minimum': -90, 'maximum': 40}, ) # we know nothing about the stats of the outgoing raster.
kwargs['output_info']['histogram'] = () # we know nothing about the histogram of the outgoing raster.
kwargs['output_info']['pixelType'] = 'f4'
# Getting and then setting the Temprature Units for use later
if kwargs.get('units').lower() == 'celsius':
self.tempunits = 'celsius'
elif kwargs.get('units').lower() == 'farenheit':
self.tempunits = 'farenheit'
elif kwargs.get('units').lower() == 'kelvin':
self.tempunits = 'kelvin'
# Getting and then setting the Windspeed Units for use later
if kwargs.get('units2').lower() == 'mps':
self.windunits = 'mps'
elif kwargs.get('units2').lower() == 'mph':
self.windunits = 'mph'
elif kwargs.get('units2').lower() == 'kmph':
self.windunits = 'kmph'
elif kwargs.get('units2').lower() == 'knots':
self.windunits = 'knots'
#self.doConversion = bool(kwargs.get('units', 'Fahrenheit').lower() == 'Celsius')
return kwargs
def updatePixels(self, tlc, size, props, **pixelBlocks):
ws = np.array(pixelBlocks['ws_pixels'], dtype='f4')
t = np.array(pixelBlocks['temperature_pixels'], dtype='f4')
# Using the temperature variable generated earlier to know if a calculation is needed to turn the temp into degrees F
if self.tempunits.lower() == "celsius":
t = (9.0/5.0 * t) + 32.0
elif self.tempunits.lower() == "kelvin":
t = ((((t)-273.15)*1.8000) +32.00)
else:
t = t
# Using the windspeed variable generated earlier to know if a calculation is needed to turn the windspeed into mph
if self.windunits.lower() == "mps":
ws = ws * 2.2369362920544
elif self.windunits.lower() == "kmph":
ws = ws * 0.621371
elif self.windunits() == "knots"
ws = ws * 1.15078
else:
ws = ws
ws16 = np.power(ws, 0.16)
outBlock = 35.74 + (0.6215 * t) - (35.75 * ws16) + (0.4275 * t * ws16)
pixelBlocks['output_pixels'] = outBlock.astype(props['pixelType'])
return pixelBlocks
def updateKeyMetadata(self, names, bandIndex, **keyMetadata):
if bandIndex == -1:
keyMetadata['datatype'] = 'Scientific'
keyMetadata['datatype'] = 'Windchill'
elif bandIndex == 0:
keyMetadata['wavelengthmin'] = None # reset inapplicable band-specific key metadata
keyMetadata['wavelengthmax'] = None
keyMetadata['bandname'] = 'Windchill'
return keyMetadata
| apache-2.0 | -6,303,209,935,558,709,000 | 43.6 | 141 | 0.562394 | false |
osroom/osroom | apps/core/flask/reqparse.py | 1 | 4656 | #!/usr/bin/env python
# -*-coding:utf-8-*-
# @Time : 2017/11/1 ~ 2019/9/1
# @Author : Allen Woo
from flask_babel import gettext
import regex as re
class ArgVerify:
def required(self, **kwargs):
for reqarg in kwargs.get("reqargs"):
if not reqarg[1]:
data = {'msg': gettext('The "{}" cannot be empty').format(
reqarg[0]), 'msg_type': "w", "custom_status": 422}
return False, data
return True, None
def min_len(self, **kwargs):
vr = kwargs.get("vr")
for reqarg in kwargs.get("reqargs"):
if len(reqarg[1]) < vr:
data = {'msg': gettext('The minimum length of "{}" is {} characters').format(
reqarg[0], vr), 'msg_type': "w", "custom_status": 422}
return False, data
return True, None
def max_len(self, **kwargs):
vr = kwargs.get("vr")
for reqarg in kwargs.get("reqargs"):
if len(reqarg[1]) > vr:
data = {'msg': gettext('The maximum length of "{}" is {} characters').format(
reqarg[0], vr), 'msg_type': "w", "custom_status": 422}
return False, data
return True, None
def need_type(self, **kwargs):
vr = kwargs.get("vr")
for reqarg in kwargs.get("reqargs"):
if not isinstance(reqarg[1], vr):
data = {'msg': gettext('"{}" needs to be of type {}').format(
reqarg[0], vr.__name__), 'msg_type': "w", "custom_status": 422}
return False, data
return True, None
def only(self, **kwargs):
vr = kwargs.get("vr")
for reqarg in kwargs.get("reqargs"):
if not reqarg[1] in kwargs.get("vr"):
data = {
'msg': gettext('The value of parameter "{}" can only be one of "{}"').format(
reqarg[0],
",".join(vr)),
'msg_type': "w",
"custom_status": 422}
return False, data
return True, None
def can_not(self, **kwargs):
vr = kwargs.get("vr")
for reqarg in kwargs.get("reqargs"):
if reqarg[1] in vr:
data = {'msg': gettext('The value of parameter "{}" can not be "{}"').format(
reqarg[0], ",".join(vr)), 'msg_type': "w", "custom_status": 422}
return False, data
return True, None
def allowed_type(self, **kwargs):
vr = kwargs.get("vr")
for reqarg in kwargs.get("reqargs"):
if type(reqarg[1]) not in vr:
data = {
'msg': gettext('Parameter {} can only be of the following type: "{}"').format(
reqarg[0],
",".join(vr)),
'msg_type': 'error',
"custom_status": 422}
return False, data
return True, None
def regex_rule(self, **kwargs):
vr = kwargs.get("vr")
if vr["is_match"]:
for reqarg in kwargs.get("reqargs"):
if not re.search(vr["rule"], reqarg[1]):
return False, {
'msg': gettext('The value of parameter "{}" is illegal').format(
reqarg[0]), 'msg_type': "w", "custom_status": 422}
else:
for reqarg in kwargs.get("reqargs"):
if re.search(vr["rule"], reqarg[1]):
return False, {
'msg': gettext('The value of parameter "{}" is illegal').format(
reqarg[0]), 'msg_type': "w", "custom_status": 422}
return True, None
arg_ver = ArgVerify()
def arg_verify(reqargs=[], **kwargs):
"""
:param reqargs:数组,如:[(arg_key, arg_value)]
:param required:bool, 为True表示不能为空
:param min_len: int, 最小长度
:param max_len: int, 最大长度
:param need_type: 类型如int, dict, list .tuple
:param only: 数组, 只能是only数组中的元素
:param can_not: 数组, 不能是can_not中的元素
:param allowed_type: 数组, 允许数据的类型是allowed_type中的元素
:param regex_rule: Such as::{"rule":r".*", "is_match":True}
is_match :True 表示需要匹配成功, False 表示需要不匹配该规则的
:param args:
:param kwargs:
:return:验证状态,验证信息
"""
for k, v in kwargs.items():
s, r = getattr(arg_ver, k)(reqargs=reqargs, vr=v)
if not s:
return s, r
return True, None
| bsd-2-clause | 7,536,587,753,218,895,000 | 34.322835 | 98 | 0.488185 | false |
bruckhaus/challenges | python_challenges/set_game.py | 1 | 3414 | import pprint as pp
import random
class SetGame:
interactive_mode = False
NUM_CARDS_IN_DECK = 81
NUM_CARDS_IN_HAND = 12
NUM_ATTRIBUTES = 4
COUNTS = [1, 2, 3]
FILLS = ['empty', 'striped', 'full']
COLORS = ['red', 'green', 'blue']
SHAPES = ['diamond', 'squiggly', 'oval']
deck = []
hand = []
triplet = None
def __init__(self):
self.triplet = [0, 0, 0]
def play(self):
self.make_deck()
self.deal_hand()
self.check_hand()
def make_deck(self):
self.deck = []
for count in self.COUNTS:
for fill in self.FILLS:
for color in self.COLORS:
for shape in self.SHAPES:
card = [count, fill, color, shape]
self.deck.append(card)
if self.interactive_mode:
print "\nDeck:"
pp.pprint(self.deck)
return self.deck
def deal_hand(self):
for i in range(self.NUM_CARDS_IN_HAND):
r = random.randint(0, self.NUM_CARDS_IN_DECK - 1 - i)
card = self.deck[r]
self.hand.append(card)
self.deck.remove(card)
if self.interactive_mode:
print "\nHand:"
pp.pprint(self.hand)
return self.hand
def check_hand(self):
matches = []
if self.interactive_mode:
print "\nMatches:"
self.next_valid_triplet()
while self.triplet:
if self.check_match():
matches.append(self.triplet[:])
if self.interactive_mode:
self.show_triplet()
self.next_valid_triplet()
return matches
def check_match(self):
for p in range(self.NUM_ATTRIBUTES):
if not (self.all_same(p) or self.all_different(p)):
return False
return True
def all_same(self, p):
t = self.triplet
t1 = self.hand[t[0]]
t2 = self.hand[t[1]]
t3 = self.hand[t[2]]
return t1[p] == t2[p] and t2[p] == t3[p]
def all_different(self, p):
t = self.triplet
t1 = self.hand[t[0]]
t2 = self.hand[t[1]]
t3 = self.hand[t[2]]
return t1[p] != t2[p] and t2[p] != t3[p] and t1[p] != t3[p]
def show_triplet(self):
print " ", self.triplet
print " ", self.hand[self.triplet[0]]
print " ", self.hand[self.triplet[1]]
print " ", self.hand[self.triplet[2]]
def next_valid_triplet(self):
while True:
self.next_triplet()
if (not self.triplet) or self.is_triplet_valid():
break
def next_triplet(self):
for p in reversed(range(3)):
if self.triplet[p] < self.NUM_CARDS_IN_HAND - 1:
self.triplet[p] += 1
return
else:
self.triplet[p] = 0
self.triplet = None
def is_triplet_valid(self):
t = self.triplet
# can't choose same card twice:
if t[0] == t[1] or t[1] == t[2]:
return False
# order of cards is not significant: allow only smallest sort order of each combination and reject others:
if t[0] > t[1] or t[1] > t[2]:
return False
return True
if __name__ == '__main__':
game = SetGame()
game.interactive_mode = True
game.play() | mit | 5,268,973,161,389,575,000 | 27.940678 | 114 | 0.503222 | false |
odoousers2014/odoo_addons-2 | clv_place/__openerp__.py | 1 | 2321 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
{
'name': 'Place',
'version': '1.0',
'author': 'Carlos Eduardo Vercelino - CLVsol',
'category': 'Generic Modules/Others',
'license': 'AGPL-3',
'website': 'http://clvsol.com',
'description': '''
Place
=====
''',
'depends': [
'clv_base',
'clv_tag',
'clv_annotation',
],
'data': [
'security/clv_place_security.xml',
'security/ir.model.access.csv',
'clv_place_view.xml',
'category/clv_place_category_view.xml',
'clv_tag/clv_tag_view.xml',
'clv_annotation/clv_annotation_view.xml',
'seq/clv_place_seq_view.xml',
'seq/clv_place_sequence.xml',
'seq/clv_place_category_sequence.xml',
'wkf/clv_place_workflow.xml',
'wkf/clv_place_wkf_view.xml',
'history/clv_place_history_view.xml',
],
'test': [],
'installable': True,
'active': False,
}
| agpl-3.0 | -8,132,136,832,875,332,000 | 42.792453 | 80 | 0.453253 | false |
beni55/flocker | flocker/node/_deploy.py | 1 | 18591 | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
# -*- test-case-name: flocker.node.test.test_deploy -*-
"""
Deploy applications on nodes.
"""
from zope.interface import Interface, implementer
from characteristic import attributes
from twisted.internet.defer import gatherResults, fail, DeferredList, succeed
from twisted.python.filepath import FilePath
from .gear import GearClient, PortMap, GearEnvironment
from ._model import (
Application, VolumeChanges, AttachedVolume, VolumeHandoff,
)
from ..route import make_host_network, Proxy
from ..volume._ipc import RemoteVolumeManager
from ..common._ipc import ProcessNode
# Path to SSH private key available on nodes and used to communicate
# across nodes.
# XXX duplicate of same information in flocker.cli:
# https://github.com/ClusterHQ/flocker/issues/390
SSH_PRIVATE_KEY_PATH = FilePath(b"/etc/flocker/id_rsa_flocker")
@attributes(["running", "not_running"])
class NodeState(object):
"""
The current state of a node.
:ivar running: A ``list`` of ``Application`` instances on this node
that are currently running or starting up.
:ivar not_running: A ``list`` of ``Application`` instances on this
node that are currently shutting down or stopped.
"""
class IStateChange(Interface):
"""
An operation that changes the state of the local node.
"""
def run(deployer):
"""
Run the change.
:param Deployer deployer: The ``Deployer`` to use.
:return: ``Deferred`` firing when the change is done.
"""
def __eq__(other):
"""
Return whether this change is equivalent to another.
"""
def __ne__(other):
"""
Return whether this change is not equivalent to another.
"""
@implementer(IStateChange)
@attributes(["changes"])
class Sequentially(object):
"""
Run a series of changes in sequence, one after the other.
Failures in earlier changes stop later changes.
"""
def run(self, deployer):
d = succeed(None)
for change in self.changes:
d.addCallback(lambda _, change=change: change.run(deployer))
return d
@implementer(IStateChange)
@attributes(["changes"])
class InParallel(object):
"""
Run a series of changes in parallel.
Failures in one change do not prevent other changes from continuing.
"""
def run(self, deployer):
return gatherResults((change.run(deployer) for change in self.changes),
consumeErrors=True)
@implementer(IStateChange)
@attributes(["application"])
class StartApplication(object):
"""
Launch the supplied application as a gear unit.
:ivar Application application: The ``Application`` to create and
start.
"""
def run(self, deployer):
application = self.application
if application.volume is not None:
volume = deployer.volume_service.get(application.volume.name)
d = volume.expose_to_docker(application.volume.mountpoint)
else:
d = succeed(None)
if application.ports is not None:
port_maps = map(lambda p: PortMap(internal_port=p.internal_port,
external_port=p.external_port),
application.ports)
else:
port_maps = []
if application.environment is not None:
environment = GearEnvironment(
id=application.name,
variables=application.environment)
else:
environment = None
d.addCallback(lambda _: deployer.gear_client.add(
application.name,
application.image.full_name,
ports=port_maps,
environment=environment
))
return d
@implementer(IStateChange)
@attributes(["application"])
class StopApplication(object):
"""
Stop and disable the given application.
:ivar Application application: The ``Application`` to stop.
"""
def run(self, deployer):
application = self.application
unit_name = application.name
result = deployer.gear_client.remove(unit_name)
def unit_removed(_):
if application.volume is not None:
volume = deployer.volume_service.get(application.volume.name)
return volume.remove_from_docker()
result.addCallback(unit_removed)
return result
@implementer(IStateChange)
@attributes(["volume"])
class CreateVolume(object):
"""
Create a new locally-owned volume.
:ivar AttachedVolume volume: Volume to create.
"""
def run(self, deployer):
return deployer.volume_service.create(self.volume.name)
@implementer(IStateChange)
@attributes(["volume"])
class WaitForVolume(object):
"""
Wait for a volume to exist and be owned locally.
:ivar AttachedVolume volume: Volume to wait for.
"""
def run(self, deployer):
return deployer.volume_service.wait_for_volume(self.volume.name)
@implementer(IStateChange)
@attributes(["volume", "hostname"])
class HandoffVolume(object):
"""
A volume handoff that needs to be performed from this node to another
node.
See :cls:`flocker.volume.VolumeService.handoff` for more details.
:ivar AttachedVolume volume: The volume to hand off.
:ivar bytes hostname: The hostname of the node to which the volume is
meant to be handed off.
"""
def run(self, deployer):
service = deployer.volume_service
destination = ProcessNode.using_ssh(
self.hostname, 22, b"root",
SSH_PRIVATE_KEY_PATH)
return service.handoff(service.get(self.volume.name),
RemoteVolumeManager(destination))
@implementer(IStateChange)
@attributes(["ports"])
class SetProxies(object):
"""
Set the ports which will be forwarded to other nodes.
:ivar ports: A collection of ``Port`` objects.
"""
def run(self, deployer):
results = []
# XXX: Errors in these operations should be logged. See
# https://github.com/ClusterHQ/flocker/issues/296
# XXX: The proxy manipulation operations are blocking. Convert to a
# non-blocking API. See https://github.com/ClusterHQ/flocker/issues/320
for proxy in deployer.network.enumerate_proxies():
try:
deployer.network.delete_proxy(proxy)
except:
results.append(fail())
for proxy in self.ports:
try:
deployer.network.create_proxy_to(proxy.ip, proxy.port)
except:
results.append(fail())
return DeferredList(results, fireOnOneErrback=True, consumeErrors=True)
class Deployer(object):
"""
Start and stop applications.
:ivar VolumeService volume_service: The volume manager for this node.
:ivar IGearClient gear_client: The gear client API to use in
deployment operations. Default ``GearClient``.
:ivar INetwork network: The network routing API to use in
deployment operations. Default is iptables-based implementation.
"""
def __init__(self, volume_service, gear_client=None, network=None):
if gear_client is None:
gear_client = GearClient(hostname=u'127.0.0.1')
self.gear_client = gear_client
if network is None:
network = make_host_network()
self.network = network
self.volume_service = volume_service
def discover_node_configuration(self):
"""
List all the ``Application``\ s running on this node.
:returns: A ``Deferred`` which fires with a ``NodeState``
instance.
"""
volumes = self.volume_service.enumerate()
volumes.addCallback(lambda volumes: set(
volume.name for volume in volumes
if volume.uuid == self.volume_service.uuid))
d = gatherResults([self.gear_client.list(), volumes])
def applications_from_units(result):
units, available_volumes = result
running = []
not_running = []
for unit in units:
# XXX: The container_image will be available on the
# Unit when
# https://github.com/ClusterHQ/flocker/issues/207 is
# resolved.
if unit.name in available_volumes:
# XXX Mountpoint is not available, see
# https://github.com/ClusterHQ/flocker/issues/289
volume = AttachedVolume(name=unit.name, mountpoint=None)
else:
volume = None
application = Application(name=unit.name,
volume=volume)
if unit.activation_state in (u"active", u"activating"):
running.append(application)
else:
not_running.append(application)
return NodeState(running=running, not_running=not_running)
d.addCallback(applications_from_units)
return d
def calculate_necessary_state_changes(self, desired_state,
current_cluster_state, hostname):
"""
Work out which changes need to happen to the local state to match
the given desired state.
Currently this involves the following phases:
1. Change proxies to point to new addresses (should really be
last, see https://github.com/ClusterHQ/flocker/issues/380)
2. Stop all relevant containers.
3. Handoff volumes.
4. Wait for volumes.
5. Create volumes.
6. Start and restart any relevant containers.
:param Deployment desired_state: The intended configuration of all
nodes.
:param Deployment current_cluster_state: The current configuration
of all nodes. While technically this also includes the current
node's state, this information may be out of date so we check
again to ensure we have absolute latest information.
:param unicode hostname: The hostname of the node that this is running
on.
:return: A ``Deferred`` which fires with a ``IStateChange``
provider.
"""
phases = []
desired_proxies = set()
desired_node_applications = []
for node in desired_state.nodes:
if node.hostname == hostname:
desired_node_applications = node.applications
else:
for application in node.applications:
for port in application.ports:
# XXX: also need to do DNS resolution. See
# https://github.com/ClusterHQ/flocker/issues/322
desired_proxies.add(Proxy(ip=node.hostname,
port=port.external_port))
if desired_proxies != set(self.network.enumerate_proxies()):
phases.append(SetProxies(ports=desired_proxies))
d = self.discover_node_configuration()
def find_differences(current_node_state):
current_node_applications = current_node_state.running
all_applications = (current_node_state.running +
current_node_state.not_running)
# Compare the applications being changed by name only. Other
# configuration changes aren't important at this point.
current_state = {app.name for app in current_node_applications}
desired_local_state = {app.name for app in
desired_node_applications}
not_running = {app.name for app in current_node_state.not_running}
# Don't start applications that exist on this node but aren't
# running; instead they should be restarted:
start_names = desired_local_state.difference(
current_state | not_running)
stop_names = {app.name for app in all_applications}.difference(
desired_local_state)
start_containers = [
StartApplication(application=app)
for app in desired_node_applications
if app.name in start_names
]
stop_containers = [
StopApplication(application=app) for app in all_applications
if app.name in stop_names
]
restart_containers = [
Sequentially(changes=[StopApplication(application=app),
StartApplication(application=app)])
for app in desired_node_applications
if app.name in not_running
]
# Find any applications with volumes that are moving to or from
# this node - or that are being newly created by this new
# configuration.
volumes = find_volume_changes(hostname, current_cluster_state,
desired_state)
if stop_containers:
phases.append(InParallel(changes=stop_containers))
if volumes.going:
phases.append(InParallel(changes=[
HandoffVolume(volume=handoff.volume,
hostname=handoff.hostname)
for handoff in volumes.going]))
if volumes.coming:
phases.append(InParallel(changes=[
WaitForVolume(volume=volume)
for volume in volumes.coming]))
if volumes.creating:
phases.append(InParallel(changes=[
CreateVolume(volume=volume)
for volume in volumes.creating]))
start_restart = start_containers + restart_containers
if start_restart:
phases.append(InParallel(changes=start_restart))
d.addCallback(find_differences)
d.addCallback(lambda _: Sequentially(changes=phases))
return d
def change_node_state(self, desired_state,
current_cluster_state,
hostname):
"""
Change the local state to match the given desired state.
:param Deployment desired_state: The intended configuration of all
nodes.
:param Deployment current_cluster_state: The current configuration
of all nodes.
:param unicode hostname: The hostname of the node that this is running
on.
:return: ``Deferred`` that fires when the necessary changes are done.
"""
d = self.calculate_necessary_state_changes(
desired_state=desired_state,
current_cluster_state=current_cluster_state,
hostname=hostname)
d.addCallback(lambda change: change.run(self))
return d
def find_volume_changes(hostname, current_state, desired_state):
"""
Find what actions need to be taken to deal with changes in volume
location between current state and desired state of the cluster.
XXX The logic here assumes the mountpoints have not changed,
and will act unexpectedly if that is the case. See
https://github.com/ClusterHQ/flocker/issues/351 for more details.
XXX The logic here assumes volumes are never added or removed to
existing applications, merely moved across nodes. As a result test
coverage for those situations is not implemented. See
https://github.com/ClusterHQ/flocker/issues/352 for more details.
XXX Comparison is done via volume name, rather than AttachedVolume
objects, until https://github.com/ClusterHQ/flocker/issues/289 is fixed.
:param unicode hostname: The name of the node for which to find changes.
:param Deployment current_state: The old state of the cluster on which the
changes are based.
:param Deployment desired_state: The new state of the cluster towards which
the changes are working.
"""
desired_volumes = {node.hostname: set(application.volume for application
in node.applications
if application.volume)
for node in desired_state.nodes}
current_volumes = {node.hostname: set(application.volume for application
in node.applications
if application.volume)
for node in current_state.nodes}
local_desired_volumes = desired_volumes.get(hostname, set())
local_desired_volume_names = set(volume.name for volume in
local_desired_volumes)
local_current_volume_names = set(volume.name for volume in
current_volumes.get(hostname, set()))
remote_current_volume_names = set()
for volume_hostname, current in current_volumes.items():
if volume_hostname != hostname:
remote_current_volume_names |= set(
volume.name for volume in current)
# Look at each application volume that is going to be running
# elsewhere and is currently running here, and add a VolumeHandoff for
# it to `going`.
going = set()
for volume_hostname, desired in desired_volumes.items():
if volume_hostname != hostname:
for volume in desired:
if volume.name in local_current_volume_names:
going.add(VolumeHandoff(volume=volume,
hostname=volume_hostname))
# Look at each application volume that is going to be started on this
# node. If it was running somewhere else, we want that Volume to be
# in `coming`.
coming_names = local_desired_volume_names.intersection(
remote_current_volume_names)
coming = set(volume for volume in local_desired_volumes
if volume.name in coming_names)
# For each application volume that is going to be started on this node
# that was not running anywhere previously, make sure that Volume is
# in `creating`.
creating_names = local_desired_volume_names.difference(
local_current_volume_names | remote_current_volume_names)
creating = set(volume for volume in local_desired_volumes
if volume.name in creating_names)
return VolumeChanges(going=going, coming=coming, creating=creating)
| apache-2.0 | 2,322,558,771,706,020,000 | 36.709939 | 79 | 0.612232 | false |
superbob/YunoSeeMe | test_geometry.py | 1 | 1158 | """
Tests for the geometry module
"""
import geometry
EPSILON = 0.001
EPSILON_L = 0.0000001
EPSILON_H = 0.1
def test_half_central_angle():
expected = 0.0016830423969495
actual = geometry.half_central_angle(0.76029552909832, 0.0252164472196439, 0.76220881138424, 0.0213910869250003)
assert abs(expected - actual) <= EPSILON_L
def test_central_angle():
expected = 0.003366084793899
actual = geometry.central_angle(0.76029552909832, 0.0252164472196439, 0.76220881138424, 0.0213910869250003)
assert abs(expected - actual) <= EPSILON_L
def test_quadratic_mean():
expected = 6367453.627
actual = geometry.quadratic_mean(geometry.EQUATORIAL_RADIUS, geometry.POLAR_RADIUS)
assert abs(expected - actual) <= EPSILON_H
def test_distance_between_wgs84_coordinates():
expected = 21433.388831
actual = geometry.distance_between_wgs84_coordinates(43.561725, 1.444796, 43.671348, 1.225619)
assert abs(expected - actual) <= EPSILON
def test_overhead_height():
expected = 2.731679321737121
actual = geometry.overhead_height(0.00092629, geometry.EARTH_RADIUS)
assert abs(expected - actual) <= EPSILON
| bsd-2-clause | 2,468,521,084,359,345,000 | 28.692308 | 116 | 0.729706 | false |
Informatic/python-ddcci | qddccigui.py | 1 | 3058 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
PyQt4 DDC/CI GUI, python-ddcci example
"""
import sys
import ddcci
import os
from PyQt4 import QtGui, QtCore
from PyKDE4.kdeui import KStatusNotifierItem
script_path = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
assets_path = os.path.join(script_path, 'assets')
def asset(name):
return os.path.join(assets_path, name)
class QDDCCIGui(QtGui.QWidget):
controls = [{
'tag': 'brightness',
'name': 'Brightness',
'id': 0x10,
}, {
'tag': 'contrast',
'name': 'Constrast',
'id': 0x12,
}]
scroll_control = controls[1]
def __init__(self, busid):
super(QDDCCIGui, self).__init__()
self.device = ddcci.DDCCIDevice(busid)
self.init_ui()
def init_ui(self):
grid = QtGui.QGridLayout()
grid.setSpacing(2)
for i, control in enumerate(self.controls):
icon = QtGui.QLabel(self)
icon.setPixmap(QtGui.QPixmap(asset('%s.png' % control['tag'])))
icon.setToolTip(control['name'])
grid.addWidget(icon, i+1, 0)
label = QtGui.QLabel(self)
label.setMinimumWidth(32)
label.setAlignment(QtCore.Qt.AlignVCenter | QtCore.Qt.AlignRight)
grid.addWidget(label, i+1, 1)
sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)
sld.label = label
sld.control = control
value, max_value = self.device.read(control['id'], True)
sld.setMinimum(0)
sld.setMaximum(max_value)
sld.setValue(value)
self.update_label(sld)
sld.setMinimumWidth(150)
sld.setFocusPolicy(QtCore.Qt.NoFocus)
sld.valueChanged[int].connect(self.change_value)
control['slider'] = sld # FIXME circular reference
grid.addWidget(sld, i+1, 2)
self.setLayout(grid)
self.setGeometry(300, 300, 280, 70)
self.setWindowTitle('Qt DDC/CI Gui')
self.show()
if self.scroll_control:
self.tray_icon = KStatusNotifierItem("qddccigui", self)
self.tray_icon.setIconByPixmap(QtGui.QIcon(QtGui.QPixmap(
asset('%s.png' % self.scroll_control['tag']))))
self.tray_icon.scrollRequested[int, QtCore.Qt.Orientation].\
connect(self.scroll_requested)
def change_value(self, value, update=True):
self.update_label(self.sender())
if update:
self.device.write(self.sender().control['id'], value)
def scroll_requested(self, delta, orientation):
new_value = self.scroll_control['slider'].value() + delta/24
self.scroll_control['slider'].setValue(new_value)
def update_label(self, sld):
sld.label.setText('%d%%' % sld.value())
def main():
app = QtGui.QApplication(sys.argv)
argv = app.arguments()
ex = QDDCCIGui(int(argv[1]) if len(argv) > 1 else 8)
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| mit | 294,655,486,794,354,240 | 27.055046 | 77 | 0.584696 | false |
yaubi/InsightBlackjack | blackjack/tests/test_player.py | 1 | 2794 | """Unit-tests for blackjack/player.py module."""
import unittest
import unittest.mock
import blackjack.card
import blackjack.player
class TestPlayer(unittest.TestCase):
def setUp(self):
self.deck = blackjack.card.Deck()
self.player = blackjack.player.Player('John', 5)
self.player.hand = blackjack.card.Hand()
def test_str(self):
self.assertEqual(str(self.player), self.player.name)
def test_bet(self):
chip_count_before = self.player.chip_count
self.player.bet(1)
self.assertEqual(self.player.hand.wager, 1)
self.assertEqual(self.player.chip_count, chip_count_before - 1)
def test_bet_too_much(self):
with self.assertRaises(blackjack.player.NoEnoughChip):
self.player.bet(10)
def test_earn(self):
chip_count_before = self.player.chip_count
self.player.earn(1)
self.assertEqual(self.player.chip_count, chip_count_before + 1)
def test_drop_hand(self):
self.player.drop_hand()
self.assertEqual(self.player.hand, None)
def test_repr_with_no_card(self):
self.player.hand = None
txt = repr(self.player)
self.assertIn(self.player.name, txt)
self.assertIn('0', txt)
self.assertIn(str(self.player.chip_count), txt)
def test_repr_with_cards(self):
self.player.hand.add_card(self.deck[0])
txt = repr(self.player)
self.assertIn(self.player.name, txt)
self.assertIn('1', txt)
self.assertIn(str(self.player.chip_count), txt)
class TestDealer(unittest.TestCase):
def setUp(self):
self.deck = blackjack.card.Deck()
self.dealer = blackjack.player.Dealer('Artur')
def test_str(self):
self.assertIn(self.dealer.name, str(self.dealer))
def test_str_without_name(self):
dealer = blackjack.player.Dealer()
self.assertEqual(str(dealer), '')
def test_drop_hand(self):
self.dealer.hand = blackjack.card.Hand()
self.dealer.drop_hand()
self.assertEqual(self.dealer.hand, None)
def test_repr_with_no_card(self):
self.assertIn('0', repr(self.dealer))
def test_repr_with_cards(self):
self.dealer.hand = blackjack.card.Hand()
self.dealer.hand.add_card(self.deck[0])
self.assertIn('1', repr(self.dealer))
class TestTable(unittest.TestCase):
def setUp(self):
deck = blackjack.card.Deck()
shoe = blackjack.card.Shoe(deck)
dealer = blackjack.player.Dealer()
players = [blackjack.player.Player('John', 5)]
self.table = blackjack.player.Table(shoe, dealer, players)
def test_play(self):
game = unittest.mock.Mock()
self.table.play(game)
game.run.assert_called_with(self.table)
| mit | 893,114,321,195,372,500 | 28.723404 | 71 | 0.638153 | false |
wfxiang08/Nuitka | nuitka/codegen/LoaderCodes.py | 1 | 2878 | # Copyright 2015, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Code to generate and interact with module loaders.
This is for generating the look-up table for the modules included in a binary
or distribution folder.
"""
from .Indentation import indented
from .templates.CodeTemplatesLoader import (
template_metapath_loader_body,
template_metapath_loader_compiled_module_entry,
template_metapath_loader_compiled_package_entry,
template_metapath_loader_shlib_module_entry
)
def getModuleMetapathLoaderEntryCode(module_name, module_identifier,
is_shlib, is_package):
if is_shlib:
assert module_name != "__main__"
assert not is_package
return template_metapath_loader_shlib_module_entry % {
"module_name" : module_name
}
elif is_package:
return template_metapath_loader_compiled_package_entry % {
"module_name" : module_name,
"module_identifier" : module_identifier,
}
else:
return template_metapath_loader_compiled_module_entry % {
"module_name" : module_name,
"module_identifier" : module_identifier,
}
def getMetapathLoaderBodyCode(other_modules):
metapath_loader_inittab = []
metapath_module_decls = []
for other_module in other_modules:
metapath_loader_inittab.append(
getModuleMetapathLoaderEntryCode(
module_name = other_module.getFullName(),
module_identifier = other_module.getCodeName(),
is_shlib = other_module.isPythonShlibModule(),
is_package = other_module.isPythonPackage()
)
)
if not other_module.isPythonShlibModule():
metapath_module_decls.append(
"MOD_INIT_DECL( %s );" % other_module.getCodeName()
)
return template_metapath_loader_body % {
"use_loader" : 1 if other_modules else 0,
"metapath_module_decls" : indented(metapath_module_decls, 0),
"metapath_loader_inittab" : indented(metapath_loader_inittab)
}
| apache-2.0 | -997,178,309,059,129,200 | 35.43038 | 78 | 0.64663 | false |
jcastillocano/python-route53 | route53/connection.py | 1 | 17310 | from lxml import etree
from route53 import xml_parsers, xml_generators
from route53.exceptions import Route53Error
from route53.transport import RequestsTransport
from route53.xml_parsers.common_change_info import parse_change_info
class Route53Connection(object):
"""
Instances of this class are instantiated by the top-level
:py:func:`route53.connect` function, and serve as a high level gateway
to the Route 53 API. The majority of your interaction with these
instances will probably be creating, deleting, and retrieving
:py:class:`HostedZone <route53.hosted_zone.HostedZone>` instances.
.. warning:: Do not instantiate instances of this class yourself.
"""
endpoint_version = '2012-02-29'
"""The date-based API version. Mostly visible for your reference."""
def __init__(self, aws_access_key_id, aws_secret_access_key, endpoint_version = '2012-02-29', **kwargs):
"""
:param str aws_access_key_id: An account's access key ID.
:param str aws_secret_access_key: An account's secret access key.
"""
self.endpoint_version = endpoint_version
self._endpoint = 'https://route53.amazonaws.com/%s/' % self.endpoint_version
self._xml_namespace = 'https://route53.amazonaws.com/doc/%s/' % self.endpoint_version
self._aws_access_key_id = aws_access_key_id
self._aws_secret_access_key = aws_secret_access_key
if 'transport_class' not in kwargs or kwargs['transport_class'] is None:
self._transport = RequestsTransport(self)
else:
self._transport = kwargs['transport_class'](self)
def _send_request(self, path, data, method):
"""
Uses the HTTP transport to query the Route53 API. Runs the response
through lxml's parser, before we hand it off for further picking
apart by our call-specific parsers.
:param str path: The RESTful path to tack on to the :py:attr:`endpoint`.
:param data: The params to send along with the request.
:type data: Either a dict or bytes, depending on the request type.
:param str method: One of 'GET', 'POST', or 'DELETE'.
:rtype: lxml.etree._Element
:returns: An lxml Element root.
"""
response_body = self._transport.send_request(path, data, method)
root = etree.fromstring(response_body)
return root
def _do_autopaginating_api_call(self, path, params, method, parser_func,
next_marker_xpath, next_marker_param_name,
next_type_xpath=None, parser_kwargs=None):
"""
Given an API method, the arguments passed to it, and a function to
hand parsing off to, loop through the record sets in the API call
until all records have been yielded.
:param str method: The API method on the endpoint.
:param dict params: The kwargs from the top-level API method.
:param callable parser_func: A callable that is used for parsing the
output from the API call.
:param str next_marker_param_name: The XPath to the marker tag that
will determine whether we continue paginating.
:param str next_marker_param_name: The parameter name to manipulate
in the request data to bring up the next page on the next
request loop.
:keyword str next_type_xpath: For the
py:meth:`list_resource_record_sets_by_zone_id` method, there's
an additional paginator token. Specifying this XPath looks for it.
:keyword dict parser_kwargs: Optional dict of additional kwargs to pass
on to the parser function.
:rtype: generator
:returns: Returns a generator that may be returned by the top-level
API method.
"""
if not parser_kwargs:
parser_kwargs = {}
# We loop indefinitely since we have no idea how many "pages" of
# results we're going to have to go through.
while True:
# An lxml Element node.
root = self._send_request(path, params, method)
# Individually yield HostedZone instances after parsing/instantiating.
for record in parser_func(root, connection=self, **parser_kwargs):
yield record
# This will determine at what offset we start the next query.
next_marker = root.find(next_marker_xpath)
if next_marker is None:
# If the NextMarker tag is absent, we know we've hit the
# last page.
break
# if NextMarker is present, we'll adjust our API request params
# and query again for the next page.
params[next_marker_param_name] = next_marker.text
if next_type_xpath:
# This is a _list_resource_record_sets_by_zone_id call. Look
# for the given tag via XPath and adjust our type arg for
# the next request. Without specifying this, we loop
# infinitely.
next_type = root.find(next_type_xpath)
params['type'] = next_type.text
def list_hosted_zones(self, page_chunks=100):
"""
List all hosted zones associated with this connection's account. Since
this method returns a generator, you can pull as many or as few
entries as you'd like, without having to query and receive every
hosted zone you may have.
:keyword int page_chunks: This API call is "paginated" behind-the-scenes
in order to break up large result sets. This number determines
the maximum number of
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instances to retrieve per request. The default is fine for almost
everyone.
:rtype: generator
:returns: A generator of :py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instances.
"""
return self._do_autopaginating_api_call(
path='hostedzone',
params={'maxitems': page_chunks},
method='GET',
parser_func=xml_parsers.list_hosted_zones_parser,
next_marker_xpath="./{*}NextMarker",
next_marker_param_name="marker",
)
def create_hosted_zone(self, name, caller_reference=None, comment=None):
"""
Creates and returns a new hosted zone. Once a hosted zone is created,
its details can't be changed.
:param str name: The name of the hosted zone to create.
:keyword str caller_reference: A unique string that identifies the
request and that allows failed create_hosted_zone requests to be
retried without the risk of executing the operation twice. If no
value is given, we'll generate a Type 4 UUID for you.
:keyword str comment: An optional comment to attach to the zone.
:rtype: tuple
:returns: A tuple in the form of ``(hosted_zone, change_info)``.
The ``hosted_zone`` variable contains a
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance matching the newly created zone, and ``change_info``
is a dict with some details about the API request.
"""
body = xml_generators.create_hosted_zone_writer(
connection=self,
name=name,
caller_reference=caller_reference,
comment=comment
)
root = self._send_request(
path='hostedzone',
data=body,
method='POST',
)
return xml_parsers.created_hosted_zone_parser(
root=root,
connection=self
)
def get_hosted_zone_by_id(self, id):
"""
Retrieves a hosted zone, by hosted zone ID (not name).
:param str id: The hosted zone's ID (a short hash string).
:rtype: :py:class:`HostedZone <route53.hosted_zone.HostedZone>`
:returns: An :py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance representing the requested hosted zone.
"""
root = self._send_request(
path='hostedzone/%s' % id,
data={},
method='GET',
)
return xml_parsers.get_hosted_zone_by_id_parser(
root=root,
connection=self,
)
def delete_hosted_zone_by_id(self, id):
"""
Deletes a hosted zone, by hosted zone ID (not name).
.. tip:: For most cases, we recommend deleting hosted zones via a
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance's
:py:meth:`HostedZone.delete <route53.hosted_zone.HostedZone.delete>`
method, but this saves an HTTP request if you already know the zone's ID.
.. note:: Unlike
:py:meth:`HostedZone.delete <route53.hosted_zone.HostedZone.delete>`,
this method has no optional ``force`` kwarg.
:param str id: The hosted zone's ID (a short hash string).
:rtype: dict
:returns: A dict of change info, which contains some details about
the request.
"""
root = self._send_request(
path='hostedzone/%s' % id,
data={},
method='DELETE',
)
return xml_parsers.delete_hosted_zone_by_id_parser(
root=root,
connection=self,
)
def _list_resource_record_sets_by_zone_id(self, id, rrset_type=None,
identifier=None, name=None,
page_chunks=100):
"""
Lists a hosted zone's resource record sets by Zone ID, if you
already know it.
.. tip:: For most cases, we recommend going through a
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance's
:py:meth:`HostedZone.record_sets <route53.hosted_zone.HostedZone.record_sets>`
property, but this saves an HTTP request if you already know the
zone's ID.
:param str id: The ID of the zone whose record sets we're listing.
:keyword str rrset_type: The type of resource record set to begin the
record listing from.
:keyword str identifier: Weighted and latency resource record sets
only: If results were truncated for a given DNS name and type,
the value of SetIdentifier for the next resource record set
that has the current DNS name and type.
:keyword str name: Not really sure what this does.
:keyword int page_chunks: This API call is paginated behind-the-scenes
by this many ResourceRecordSet instances. The default should be
fine for just about everybody, aside from those with tons of RRS.
:rtype: generator
:returns: A generator of ResourceRecordSet instances.
"""
params = {
'name': name,
'type': rrset_type,
'identifier': identifier,
'maxitems': page_chunks,
}
return self._do_autopaginating_api_call(
path='hostedzone/%s/rrset' % id,
params=params,
method='GET',
parser_func=xml_parsers.list_resource_record_sets_by_zone_id_parser,
parser_kwargs={'zone_id': id},
next_marker_xpath="./{*}NextRecordName",
next_marker_param_name="name",
next_type_xpath="./{*}NextRecordType"
)
def _change_resource_record_sets(self, change_set, comment=None):
"""
Given a ChangeSet, POST it to the Route53 API.
.. note:: You probably shouldn't be using this method directly,
as there are convenience methods on the ResourceRecordSet
sub-classes.
:param change_set.ChangeSet change_set: The ChangeSet object to create
the XML doc from.
:keyword str comment: An optional comment to go along with the request.
:rtype: dict
:returns: A dict of change info, which contains some details about
the request.
"""
body = xml_generators.change_resource_record_set_writer(
connection=self,
change_set=change_set,
comment=comment
)
root = self._send_request(
path='hostedzone/%s/rrset' % change_set.hosted_zone_id,
data=body,
method='POST',
)
e_change_info = root.find('./{*}ChangeInfo')
if e_change_info is None:
error = root.find('./{*}Error').find('./{*}Message').text
raise Route53Error(error)
return parse_change_info(e_change_info)
def list_health_checks(self, page_chunks=100):
"""
List all health checks associated with this connection's account. Since
this method returns a generator, you can pull as many or as few
entries as you'd like, without having to query and receive every
hosted zone you may have.
:keyword int page_chunks: This API call is "paginated" behind-the-scenes
in order to break up large result sets. This number determines
the maximum number of
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instances to retrieve per request. The default is fine for almost
everyone.
:rtype: generator
:returns: A generator of :py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instances.
"""
return self._do_autopaginating_api_call(
path='healthcheck',
params={'maxitems': page_chunks},
method='GET',
parser_func=xml_parsers.list_health_checks_parser,
next_marker_xpath="./{*}NextMarker",
next_marker_param_name="marker",
)
def create_health_check(self, ipaddress, port, type, resource_path, fqdn, search_string, caller_reference=None):
"""
Creates and returns a new hosted zone. Once a hosted zone is created,
its details can't be changed.
:param str name: The name of the hosted zone to create.
:keyword str caller_reference: A unique string that identifies the
request and that allows failed create_health_check requests to be
retried without the risk of executing the operation twice. If no
value is given, we'll generate a Type 4 UUID for you.
:keyword str comment: An optional comment to attach to the zone.
:rtype: tuple
:returns: A tuple in the form of ``(hosted_zone, change_info)``.
The ``hosted_zone`` variable contains a
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance matching the newly created zone, and ``change_info``
is a dict with some details about the API request.
"""
body = xml_generators.create_health_check_writer(
connection=self,
caller_reference=caller_reference,
ipaddress=ipaddress,
port=port,
type=type,
resource_path=resource_path,
fqdn=fqdn,
search_string=search_string
)
root = self._send_request(
path='healthcheck',
data=body,
method='POST',
)
return xml_parsers.created_health_check_parser(
root=root,
connection=self
)
def get_health_check_by_id(self, id):
"""
Retrieves a hosted zone, by hosted zone ID (not name).
:param str id: The hosted zone's ID (a short hash string).
:rtype: :py:class:`HostedZone <route53.hosted_zone.HostedZone>`
:returns: An :py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance representing the requested hosted zone.
"""
root = self._send_request(
path='healthcheck/%s' % id,
data={},
method='GET',
)
return xml_parsers.get_health_check_by_id_parser(
root=root,
connection=self,
)
def delete_health_check_by_id(self, id):
"""
Deletes a hosted zone, by hosted zone ID (not name).
.. tip:: For most cases, we recommend deleting hosted zones via a
:py:class:`HostedZone <route53.hosted_zone.HostedZone>`
instance's
:py:meth:`HostedZone.delete <route53.hosted_zone.HostedZone.delete>`
method, but this saves an HTTP request if you already know the zone's ID.
.. note:: Unlike
:py:meth:`HostedZone.delete <route53.hosted_zone.HostedZone.delete>`,
this method has no optional ``force`` kwarg.
:param str id: The hosted zone's ID (a short hash string).
:rtype: dict
:returns: A dict of change info, which contains some details about
the request.
"""
root = self._send_request(
path='healthcheck/%s' % id,
data={},
method='DELETE',
)
return xml_parsers.delete_health_check_by_id_parser(
root=root,
connection=self,
)
| mit | 6,657,116,449,463,134,000 | 38.884793 | 116 | 0.602137 | false |
scipag/btle-sniffer | src/btlesniffer/hci_constants.py | 1 | 50832 | # -*- coding: utf-8 -*-
"""
Provides constants common in the Bluetooth HCI protocol.
"""
import enum
HCI_MAX_EVENT_SIZE = 260
class Status(enum.IntEnum):
"""
Collection of HCI return states.
"""
Success = 0x00
UnknownHciCommand = 0x01
UnknownConnectionIdentifier = 0x02
HardwareFailure = 0x03
PageTimeout = 0x04
AuthenticationFailure = 0x05
PinOrKeyMissing = 0x06
MemoryCapacityExceeded = 0x07
ConnectionTimeout = 0x08
ConnectionLimitExceeded = 0x09
SynchronousConnectionLimitExceeded = 0x0a
ACLConnectionAlreadyExists = 0x0b
CommandDisallowed = 0x0c
ConnectionRejectedLimitedResources = 0x0d
ConnectionRejectedSecurityReasons = 0x0e
ConnectionRejectedUnnacceptableBDAddr = 0x0f
ConnectionAcceptTimeoutExceeded = 0x10
UnsupportedFeatureOrParameterValue = 0x11
InvalidHciCommandParameters = 0x12
RemoteUserTerminatedConnection = 0x13
RemoteDeviceTerminatedConnectionLowResources = 0x14
RemoteDeviceTerminatedConnectionPowerOff = 0x15
ConnectionTerminatedLocalHost = 0x16
RepeatedAttempts = 0x17
PairingNotAllowed = 0x18
UnknownLmpPdu = 0x19
UnsupportedRemoteFeature = 0x1a
ScoOffsetRejected = 0x1b
ScoIntervalRejected = 0x1c
ScoAirModeRejected = 0x1d
InvalidLmpParameters = 0x1e
UnspecifiedError = 0x1f
UnsupportedLmpParameterValue = 0x20
RoleChangeNotAllowed = 0x21
LmpResponseTimeout = 0x22
LmpErrorTransactionCollision = 0x23
LmpPduNotAllowed = 0x24
EncryptionModeNotAcceptable = 0x25
LinkKeyCannotChange = 0x26
RequestedQosNotSupported = 0x27
InstantPassed = 0x28
PairingWithUnitKeyNotSupported = 0x29
DifferentTransactionCollision = 0x2a
QosUnnacceptableParameter = 0x2c
QosRejected = 0x2d
ChannelClassificationNotSupported = 0x2e
InsufficientSecurity = 0x2f
ParameterOutOfMandatoryRange = 0x30
RoleSwitchPending = 0x32
RoleSwitchFailed = 0x35
ExtendedInquiryResponseTooLarge = 0x36
SecureSimplePairingNotSupportedByHost = 0x37
HostBusyPairing = 0x38
ConnectionRejectedNoSuitableChannel = 0x39
ControllerBusy = 0x3a
UnacceptableConnectionParameters = 0x3b
DirectedAdvertisingTimeout = 0x3c
ConnectionTerminatedMicFailure = 0x3d
ConnectionEstablishFailure = 0x3e
MacConnectionFailed = 0x3f
CoarseClockAdjustmentRejected = 0x40
class PacketType(enum.IntEnum):
"""
Known HCI packet types.
"""
Invalid = 0x00
Command = 0x01
Async = 0x02
Sync = 0x03
Event = 0x04
class Event(enum.IntEnum):
"""
Common HCI event types.
"""
CommandComplete = 0x0e
CommandStatus = 0x0f
HardwareError = 0x10
DataBufferOverflow = 0x1a
Le = 0x3e
VendorSpecific = 0xff
class LeEvent(enum.IntEnum):
"""
Common HCI LE event types.
"""
LeAdvertisingReport = 0x02
class GapProfile(enum.IntEnum):
"""
GAP communication roles/profiles.
"""
Broadcaster = 0x01
Observer = 0x02
Peripheral = 0x04
Central = 0x08
class DiscoveryType(enum.IntEnum):
"""
LeAdvertisingReport message type.
"""
ConnectableUndirectedAdvertising = 0x00
ConnectableDirectedAdvertising = 0x01
ScannableUndirectedAdvertising = 0x02
NonConnectableUndirectedAdvertising = 0x03
ScanResponse = 0x04
class AddressType(enum.IntEnum):
"""
Device address type.
"""
PublicDeviceAddress = 0x00
RandomDeviceAddress = 0x01
PublicIdentityAddress = 0x02
RandomIdentityAddress = 0x03
UnknownAddressType = 0x04
class ScanType(enum.IntEnum):
"""
LE scan type.
"""
PassiveScan = 0x00
ActiveScan = 0x01
class FilterPolicy(enum.IntEnum):
"""
LE scan filter policy.
"""
UndirectedAdsOnly = 0x00
WhitelistedOnly = 0x01
ResolvableDirected = 0x02
WhitelistedAndResolvableDirected = 0x03
class AdType(enum.IntEnum):
"""
Advertisement data type.
"""
Flags = 0x01
IncompleteListOf16BitServiceClassUUIDs = 0x02
CompleteListOf16BitServiceClassUUIDs = 0x03
IncompleteListOf32BitServiceClassUUIDs = 0x04
CompleteListOf32BitServiceClassUUIDs = 0x05
IncompleteListOf128BitServiceClassUUIDs = 0x06
CompleteListOf128BitServiceClassUUIDs = 0x07
ShortenedLocalName = 0x08
CompleteLocalName = 0x09
TxPowerLevel = 0x0a
ClassOfDevice = 0x0d
SimplePairingHashC192 = 0x0e
SimplePairingRandomizerR192 = 0x0f
SecurityManagerTKValue = 0x10
SecurityManagerOutOfBandFlags = 0x11
SlaveConnectionIntervalRange = 0x12
ListOf16BitServiceSolicitationUUIDs = 0x14
ListOf32BitServiceSolicitationUUIDs = 0x1f
ListOf128BitServiceSolicitationUUIDs = 0x15
ServiceData16BitUUID = 0x16
ServiceData32BitUUID = 0x20
ServiceData128BitUUID = 0x21
LeSecureConnectionsConfirmationValue = 0x22
LeSecureConnectionsRandomValue = 0x23
URI = 0x24
IndoorPositioning = 0x25
TransportDiscoveryData = 0x26
PublicTargetAddress = 0x17
RandomTargetAddress = 0x18
Appearance = 0x19
AdvertisingInterval = 0x1a
LeBluetoothDeviceAddress = 0x1b
LeRole = 0x1c
SimplePairingHashC256 = 0x1d
SimplePairingRandomizerR256 = 0x1e
InformationData = 0x3d
ManufacturerSpecificData = 0xff
class CompanyId(enum.IntEnum):
"""
Known company identifiers.
"""
EricssonTechnologyLicensing = 0x0000
NokiaMobilePhones = 0x0001
IntelCorp = 0x0002
IBMCorp = 0x0003
ToshibaCorp = 0x0004
ThreeCom = 0x0005
Microsoft = 0x0006
Lucent = 0x0007
Motorola = 0x0008
InfineonTechnologiesAG = 0x0009
CambridgeSiliconRadio = 0x000a
SiliconWave = 0x000b
DigianswerAS = 0x000c
TexasInstrumentsInc = 0x000d
CevaInc = 0x000e
BroadcomCorporation = 0x000f
MitelSemiconductor = 0x0010
WidcommInc = 0x0011
ZeevoInc = 0x0012
AtmelCorporation = 0x0013
MitsubishiElectricCorporation = 0x0014
RTXTelecomAS = 0x0015
KCTechnologyInc = 0x0016
NewLogic = 0x0017
TransilicaInc = 0x0018
RohdeSchwarzGmbHCoKG = 0x0019
TTPComLimited = 0x001a
SigniaTechnologiesInc = 0x001b
ConexantSystemsInc = 0x001c
Qualcomm = 0x001d
Inventel = 0x001e
AVMBerlin = 0x001f
BandSpeedInc = 0x0020
MansellaLtd = 0x0021
NECCorporation = 0x0022
WavePlusTechnologyCoLtd = 0x0023
Alcatel = 0x0024
NXPSemiconductors = 0x0025
CTechnologies = 0x0026
OpenInterface = 0x0027
RFMicroDevices = 0x0028
HitachiLtd = 0x0029
SymbolTechnologiesInc = 0x002a
Tenovis = 0x002b
MacronixInternationalCoLtd = 0x002c
GCTSemiconductor = 0x002d
NorwoodSystems = 0x002e
MewTelTechnologyInc = 0x002f
STMicroelectronics = 0x0030
Synopsis = 0x0031
RedMLtd = 0x0032
CommilLtd = 0x0033
ComputerAccessTechnologyCorporation = 0x0034
EclipseSL = 0x0035
RenesasElectronicsCorporation = 0x0036
MobilianCorporation = 0x0037
Terax = 0x0038
IntegratedSystemSolutionCorp = 0x0039
MatsushitaElectricIndustrialCoLtd = 0x003a
GennumCorporation = 0x003b
BlackBerryLimited = 0x003c
IPextremeInc = 0x003d
SystemsandChipsInc = 0x003e
BluetoothSIGInc = 0x003f
SeikoEpsonCorporation = 0x0040
IntegratedSiliconSolutionTaiwanInc = 0x0041
CONWISETechnologyCorporationLtd = 0x0042
PARROTSA = 0x0043
SocketMobile = 0x0044
AtherosCommunicationsInc = 0x0045
MediaTekInc = 0x0046
Bluegiga = 0x0047
MarvellTechnologyGroupLtd = 0x0048
ThreeDSPCorporation = 0x0049
AccelSemiconductorLtd = 0x004a
ContinentalAutomotiveSystems = 0x004b
AppleInc = 0x004c
StaccatoCommunicationsInc = 0x004d
AvagoTechnologies = 0x004e
APTLicensingLtd = 0x004f
SiRFTechnology = 0x0050
TzeroTechnologiesInc = 0x0051
JMCorporation = 0x0052
Free2moveAB = 0x0053
ThreeDiJoyCorporation = 0x0054
PlantronicsInc = 0x0055
SonyEricssonMobileCommunications = 0x0056
HarmanInternationalIndustriesInc = 0x0057
VizioInc = 0x0058
NordicSemiconductorASA = 0x0059
EMMicroelectronicMarinSA = 0x005a
RalinkTechnologyCorporation = 0x005b
BelkinInternationalInc = 0x005c
RealtekSemiconductorCorporation = 0x005d
StonestreetOneLLC = 0x005e
WicentricInc = 0x005f
RivieraWavesSAS = 0x0060
RDAMicroelectronics = 0x0061
GibsonGuitars = 0x0062
MiCommandInc = 0x0063
BandXIInternationalLLC = 0x0064
HewlettPackardCompany = 0x0065
NineSolutionsOy = 0x0066
GNNetcomAS = 0x0067
GeneralMotors = 0x0068
ADEngineeringInc = 0x0069
MindTreeLtd = 0x006a
PolarElectroOY = 0x006b
BeautifulEnterpriseCoLtd = 0x006c
BriarTekInc = 0x006d
SummitDataCommunicationsInc = 0x006e
SoundID = 0x006f
MonsterLLC = 0x0070
connectBlueAB = 0x0071
ShangHaiSuperSmartElectronicsCoLtd = 0x0072
GroupSenseLtd = 0x0073
ZommLLC = 0x0074
SamsungElectronicsCoLtd = 0x0075
CreativeTechnologyLtd = 0x0076
LairdTechnologies = 0x0077
NikeInc = 0x0078
lesswireAG = 0x0079
MStarSemiconductorInc = 0x007a
HanlynnTechnologies = 0x007b
ARCambridge = 0x007c
SeersTechnologyCoLtd = 0x007d
SportsTrackingTechnologiesLtd = 0x007e
AutonetMobile = 0x007f
DeLormePublishingCompanyInc = 0x0080
WuXiVimicro = 0x0081
SennheiserCommunicationsAS = 0x0082
TimeKeepingSystemsInc = 0x0083
LudusHelsinkiLtd = 0x0084
BlueRadiosInc = 0x0085
equinoxAG = 0x0086
GarminInternationalInc = 0x0087
Ecotest = 0x0088
GNReSoundAS = 0x0089
Jawbone = 0x008a
TopcornPositioningSystemsLLC = 0x008b
GimbalInc = 0x008c
ZscanSoftware = 0x008d
QuinticCorp = 0x008e
StollmanEVGmbH = 0x008f
FunaiElectricCoLtd = 0x0090
AdvancedPANMOBILSystemsGmbHCoKG = 0x0091
ThinkOpticsInc = 0x0092
UniversalElectronicsInc = 0x0093
AirohaTechnologyCorp = 0x0094
NECLightingLtd = 0x0095
ODMTechnologyInc = 0x0096
ConnecteDeviceLtd = 0x0097
zer01tvGmbH = 0x0098
iTechDynamicGlobalDistributionLtd = 0x0099
Alpwise = 0x009a
JiangsuToppowerAutomotiveElectronicsCoLtd = 0x009b
ColorfyInc = 0x009c
GeoforceInc = 0x009d
BoseCorporation = 0x009e
SuuntoOy = 0x009f
KensingtonComputerProductsGroup = 0x00a0
SRMedizinelektronik = 0x00a1
VertuCorporationLimited = 0x00a2
MetaWatchLtd = 0x00a3
LINAKAS = 0x00a4
OTLDynamicsLLC = 0x00a5
PandaOceanInc = 0x00a6
VisteonCorporation = 0x00a7
ARPDevicesLimited = 0x00a8
MagnetiMarelliSpA = 0x00a9
CAENRFIDsrl = 0x00aa
IngenieurSystemgruppeZahnGmbH = 0x00ab
GreenThrottleGames = 0x00ac
PeterSystemtechnikGmbH = 0x00ad
OmegawaveOy = 0x00ae
Cinetix = 0x00af
PassifSemiconductorCorp = 0x00b0
SarisCyclingGroupInc = 0x00b1
BekeyAS = 0x00b2
ClarinoxTechnologiesPtyLtd = 0x00b3
BDETechnologyCoLtd = 0x00b4
SwirlNetworks = 0x00b5
Mesointernational = 0x00b6
TreLabLtd = 0x00b7
QualcommInnovationCenterInc = 0x00b8
JohnsonControlsInc = 0x00b9
StarkeyLaboratoriesInc = 0x00ba
SPowerElectronicsLimited = 0x00bb
AceSensorInc = 0x00bc
AplixCorporation = 0x00bd
AAMPofAmerica = 0x00be
StalmartTechnologyLimited = 0x00bf
AMICCOMElectronicsCorporation = 0x00c0
ShenzhenExcelsecuDataTechnologyCoLtd = 0x00c1
GeneqInc = 0x00c2
adidasAG = 0x00c3
LGElectronics = 0x00c4
OnsetComputerCorporation = 0x00c5
SelflyBV = 0x00c6
QuuppaOy = 0x00c7
GeLoInc = 0x00c8
Evluma = 0x00c9
MC10 = 0x00ca
BinauricSE = 0x00cb
BeatsElectronics = 0x00cc
MicrochipTechnologyInc = 0x00cd
ElgatoSystemsGmbH = 0x00ce
ARCHOSSA = 0x00cf
DexcomInc = 0x00d0
PolarElectroEuropeBV = 0x00d1
DialogSemiconductorBV = 0x00d2
TaixingbangTechnologyCoLTD = 0x00d3
Kawantech = 0x00d4
AustcoCommunicationSystems = 0x00d5
TimexGroupUSAInc = 0x00d6
QualcommTechnologiesInc = 0x00d7
QualcommConnectedExperiencesInc = 0x00d8
VoyetraTurtleBeach = 0x00d9
txtrGmbH = 0x00da
Biosentronics = 0x00db
ProcterGamble = 0x00dc
HosidenCorporation = 0x00dd
MuzikLLC = 0x00de
MisfitWearablesCorp = 0x00df
Google = 0x00e0
DanlersLtd = 0x00e1
SemilinkInc = 0x00e2
inMusicBrandsInc = 0x00e3
LSResearchInc = 0x00e4
EdenSoftwareConsultantsLtd = 0x00e5
Freshtemp = 0x00e6
KSTechnologies = 0x00e7
ACTSTechnologies = 0x00e8
VtrackSystems = 0x00e9
NielsenKellermanCompany = 0x00ea
ServerTechnologyInc = 0x00eb
BioResearchAssociates = 0x00ec
JollyLogicLLC = 0x00ed
AboveAverageOutcomesInc = 0x00ee
BitsplittersGmbH = 0x00ef
PayPalInc = 0x00f0
WitronTechnologyLimited = 0x00f1
AetherThingsInc = 0x00f2
KentDisplaysInc = 0x00f3
NautilusInc = 0x00f4
SmartifierOy = 0x00f5
ElcometerLimited = 0x00f6
VSNTechnologiesInc = 0x00f7
AceUniCorpLtd = 0x00f8
StickNFind = 0x00f9
CrystalCodeAB = 0x00fa
KOUKAAMas = 0x00fb
DelphiCorporation = 0x00fc
ValenceTechLimited = 0x00fd
Reserved = 0x00fe
TypoProductsLLC = 0x00ff
TomTomInternationalBV = 0x0100
FugooInc = 0x0101
KeiserCorporation = 0x0102
BangOlufsenAS = 0x0103
PLUSLocationsSystemsPtyLtd = 0x0104
UbiquitousComputingTechnologyCorporation = 0x0105
InnovativeYachtterSolutions = 0x0106
WilliamDemantHoldingAS = 0x0107
ChiconyElectronicsCoLtd = 0x0108
AtusBV = 0x0109
CodegateLtd = 0x010a
ERiInc = 0x010b
TransducersDirectLLC = 0x010c
FujitsuTenLimited = 0x010d
AudiAG = 0x010e
HiSiliconTechnologiesCoLtd = 0x010f
NipponSeikiCoLtd = 0x0110
SteelseriesApS = 0x0111
VisyblInc = 0x0112
OpenbrainTechnologiesCoLtd = 0x0113
Xensr = 0x0114
esolutions = 0x0115
OneOAKTechnologies = 0x0116
WimotoTechnologiesInc = 0x0117
RadiusNetworksInc = 0x0118
WizeTechnologyCoLtd = 0x0119
QualcommLabsInc = 0x011a
ArubaNetworks = 0x011b
Baidu = 0x011c
ArendiAG = 0x011d
SkodaAutoas = 0x011e
VolkswagonAG = 0x011f
PorscheAG = 0x0120
SinoWealthElectronicLtd = 0x0121
AirTurnInc = 0x0122
KinsaInc = 0x0123
HIDGlobal = 0x0124
SEATes = 0x0125
PrometheanLtd = 0x0126
SaluticaAlliedSolutions = 0x0127
GPSIGroupPtyLtd = 0x0128
NimbleDevicesOy = 0x0129
ChangzhouYongseInfotechCoLtd = 0x012a
SportIQ = 0x012b
TEMECInstrumentsBV = 0x012c
SonyCorporation = 0x012d
ASSAABLOY = 0x012e
ClarionCoLtd = 0x012f
WarehouseInnovations = 0x0130
CypressSemiconductorCorporation = 0x0131
MADSInc = 0x0132
BlueMaestroLimited = 0x0133
ResolutionProductsInc = 0x0134
AirewearLLC = 0x0135
SeedLabsInc = 0x0136
PrestigioPlazaLtd = 0x0137
NTEOInc = 0x0138
FocusSystemsCorporation = 0x0139
TencentHoldingsLimited = 0x013a
Allegion = 0x013b
MurataManufacuringCoLtd = 0x013c
NodInc = 0x013e
BBManufacturingCompany = 0x013f
AlpineElectronicsCoLtd = 0x0140
FedExServices = 0x0141
GrapeSystemsInc = 0x0142
BkonConnect = 0x0143
LintechGmbH = 0x0144
NovatelWireless = 0x0145
Ciright = 0x0146
MightyCastInc = 0x0147
AmbimatElectronics = 0x0148
PerytonsLtd = 0x0149
TivoliAudioLLC = 0x014a
MasterLock = 0x014b
MeshNetLtd = 0x014c
HuizhouDesaySVAutomotiveCOLTD = 0x014d
TangerineInc = 0x014e
BWGroupLtd = 0x014f
PioneerCorporation = 0x0150
OnBeep = 0x0151
VernierSoftwareTechnology = 0x0152
ROLErgo = 0x0153
PebbleTechnology = 0x0154
NETATMO = 0x0155
AccumulateAB = 0x0156
AnhuiHuamiInformationTechnologyCoLtd = 0x0157
Inmitesro = 0x0158
ChefStepsInc = 0x0159
micasAG = 0x015a
BiomedicalResearchLtd = 0x015b
PitiusTecSL = 0x015c
EstimoteInc = 0x015d
UnikeyTechnologiesInc = 0x015e
TimerCapCo = 0x015f
AwoX = 0x0160
yikes = 0x0161
MADSGlobalNZLtd = 0x0162
PCHInternational = 0x0163
QingdaoYeelinkInformationTechnologyCoLtd = 0x0164
MilwaukeeTool = 0x0165
MISHIKPteLtd = 0x0166
BayerHealthCare = 0x0167
SpiceboxLLC = 0x0168
emberlight = 0x0169
CooperAtkinsCorporation = 0x016a
Qblinks = 0x016b
MYSPHERA = 0x016c
LifeScanInc = 0x016d
VolanticAB = 0x016e
PodoLabsInc = 0x016f
FHoffmannLaRocheAG = 0x0170
AmazonFulfillmentService = 0x0171
ConnovateTechnologyPrivateLimited = 0x0172
KocomojoLLC = 0x0173
EverykeyLLC = 0x0174
DynamicControls = 0x0175
SentriLock = 0x0176
ISYSTinc = 0x0177
CASIOCOMPUTERCOLTD = 0x0178
LAPISSemiconductorCoLtd = 0x0179
TelemonitorInc = 0x017a
taskitGmbH = 0x017b
DaimlerAG = 0x017c
BatAndCat = 0x017d
BluDotzLtd = 0x017e
XTelApS = 0x017f
GigasetCommunicationsGmbH = 0x0180
GeckoHealthInnovationsInc = 0x0181
HOPUbiquitous = 0x0182
ToBeAssigned = 0x0183
Nectar = 0x0184
belappsLLC = 0x0185
CORELightingLtd = 0x0186
SeraphimSenseLtd = 0x0187
UnicoRBC = 0x0188
PhysicalEnterprisesInc = 0x0189
AbleTrendTechnologyLimited = 0x018a
KonicaMinoltaInc = 0x018b
WiloSE = 0x018c
ExtronDesignServices = 0x018d
FitbitInc = 0x018e
FirefliesSystems = 0x018f
IntellettoTechnologiesInc = 0x0190
FDKCORPORATION = 0x0191
CloudleafInc = 0x0192
MavericAutomationLLC = 0x0193
AcousticStreamCorporation = 0x0194
Zuli = 0x0195
PaxtonAccessLtd = 0x0196
WiSilicaInc = 0x0197
VengitLimited = 0x0198
SALTOSYSTEMSSL = 0x0199
TRONForum = 0x019a
CUBETECHsro = 0x019b
CokiyaIncorporated = 0x019c
CVSHealth = 0x019d
Ceruus = 0x019e
StrainstallLtd = 0x019f
ChannelEnterprisesLtd = 0x01a0
FIAMM = 0x01a1
GIGALANECOLTD = 0x01a2
EROAD = 0x01a3
MineSafetyAppliances = 0x01a4
IconHealthandFitness = 0x01a5
AsandooGmbH = 0x01a6
ENERGOUSCORPORATION = 0x01a7
Taobao = 0x01a8
CanonInc = 0x01a9
GeophysicalTechnologyInc = 0x01aa
FacebookInc = 0x01ab
NiproDiagnosticsInc = 0x01ac
FlightSafetyInternational = 0x01ad
EarlensCorporation = 0x01ae
SunriseMicroDevicesInc = 0x01af
StarMicronicsCoLtd = 0x01b0
NetizensSpzoo = 0x01b1
NymiInc = 0x01b2
NytecInc = 0x01b3
TrineoSpzoo = 0x01b4
NestLabsInc = 0x01b5
LMTechnologiesLtd = 0x01b6
GeneralElectricCompany = 0x01b7
iD3SL = 0x01b8
HANAMicron = 0x01b9
StagesCyclingLLC = 0x01ba
CochlearBoneAnchoredSolutionsAB = 0x01bb
SenionLabAB = 0x01bc
SyszoneCoLtd = 0x01bd
PulsateMobileLtd = 0x01be
HongKongHunterSunElectronicLimited = 0x01bf
pironexGmbH = 0x01c0
BRADATECHCorp = 0x01c1
TransenergooilAG = 0x01c2
Bunch = 0x01c3
DMEMicroelectronics = 0x01c4
BitcrazeAB = 0x01c5
HASWAREInc = 0x01c6
AbiogenixInc = 0x01c7
PolyControlApS = 0x01c8
Avion = 0x01c9
LaerdalMedicalAS = 0x01ca
FetchMyPet = 0x01cb
SamLabsLtd = 0x01cc
ChengduSynwingTechnologyLtd = 0x01cd
HOUWASYSTEMDESIGNkk = 0x01ce
BSH = 0x01cf
PrimusInterParesLtd = 0x01d0
August = 0x01d1
GillElectronics = 0x01d2
SkyWaveDesign = 0x01d3
NewlabSrl = 0x01d4
ELADsrl = 0x01d5
Gwearablesinc = 0x01d6
SquadroneSystemsInc = 0x01d7
CodeCorporation = 0x01d8
SavantSystemsLLC = 0x01d9
LogitechInternationalSA = 0x01da
InnblueConsulting = 0x01db
iParkingLtd = 0x01dc
KoninklijkePhilipsElectronicsNV = 0x01dd
MinelabElectronicsPtyLimited = 0x01de
BisonGroupLtd = 0x01df
WidexAS = 0x01e0
JollaLtd = 0x01e1
LectronixInc = 0x01e2
CaterpillarInc = 0x01e3
FreedomInnovations = 0x01e4
DynamicDevicesLtd = 0x01e5
TechnologySolutionsLtd = 0x01e6
IPSGroupInc = 0x01e7
STIR = 0x01e8
SanoInc = 0x01e9
AdvancedApplicationDesignInc = 0x01ea
AutoMapLLC = 0x01eb
SpreadtrumCommunicationsShanghaiLtd = 0x01ec
CuteCircuitLTD = 0x01ed
ValeoService = 0x01ee
FullpowerTechnologiesInc = 0x01ef
KloudNation = 0x01f0
ZebraTechnologiesCorporation = 0x01f1
ItronInc = 0x01f2
TheUniversityofTokyo = 0x01f3
UTCFireandSecurity = 0x01f4
CoolWebthingsLimited = 0x01f5
DJOGlobal = 0x01f6
GellinerLimited = 0x01f7
AnykaMicroelectronicsTechnologyCoLTD = 0x01f8
MedtronicInc = 0x01f9
GozioInc = 0x01fa
FormLiftingLLC = 0x01fb
WahooFitnessLLC = 0x01fc
KontaktMicroLocationSpzoo = 0x01fd
RadioSystemCorporation = 0x01fe
FreescaleSemiconductorInc = 0x01ff
VerifoneSystemsPTeLtdTaiwanBranch = 0x0200
ARTiming = 0x0201
RigadoLLC = 0x0202
KemppiOy = 0x0203
TapcentiveInc = 0x0204
SmartboticsInc = 0x0205
OtterProductsLLC = 0x0206
STEMPInc = 0x0207
LumiGeekLLC = 0x0208
InvisionHeartInc = 0x0209
MacnicaInc = 0x020a
JaguarLandRoverLimited = 0x020b
CoroWareTechnologiesInc = 0x020c
SimploTechnologyCoLTD = 0x020d
OmronHealthcareCoLTD = 0x020e
ComoduleGMBH = 0x020f
ikeGPS = 0x0210
TelinkSemiconductorCoLtd = 0x0211
InterplanCoLtd = 0x0212
WylerAG = 0x0213
IKMultimediaProductionsrl = 0x0214
LukotonExperienceOy = 0x0215
MTILtd = 0x0216
Tech4homeLda = 0x0217
HiotechAB = 0x0218
DOTTLimited = 0x0219
BlueSpeckLabsLLC = 0x021a
CiscoSystemsInc = 0x021b
MobicommInc = 0x021c
Edamic = 0x021d
GoodnetLtd = 0x021e
LusterLeafProductsInc = 0x021f
ManusMachinaBV = 0x0220
MobiquityNetworksInc = 0x0221
PraxisDynamics = 0x0222
PhilipMorrisProductsSA = 0x0223
ComarchSA = 0x0224
NestlNespressoSA = 0x0225
MerliniaAS = 0x0226
LifeBEAMTechnologies = 0x0227
TwocanoesLabsLLC = 0x0228
MuovertiLimited = 0x0229
StamerMusikanlagenGMBH = 0x022a
TeslaMotors = 0x022b
PharynksCorporation = 0x022c
Lupine = 0x022d
SiemensAG = 0x022e
HuamiCultureCommunicationCOLTD = 0x022f
FosterElectricCompanyLtd = 0x0230
ETASA = 0x0231
xSensoSolutionsKft = 0x0232
ShenzhenSuLongCommunicationLtd = 0x0233
FengFanTechnologyCoLtd = 0x0234
QrioInc = 0x0235
PitpatpetLtd = 0x0236
MSHelisrl = 0x0237
Trakm8Ltd = 0x0238
JINCOLtd = 0x0239
AlatechTechnology = 0x023a
BeijingCarePulseElectronicTechnologyCoLtd = 0x023b
Awarepoint = 0x023c
ViCentraBV = 0x023d
RavenIndustries = 0x023e
WaveWareTechnologies = 0x023f
ArgenoxTechnologies = 0x0240
BragiGmbH = 0x0241
SixteenLabInc = 0x0242
MasimoCorp = 0x0243
IoteraInc = 0x0244
EndressHauser = 0x0245
ACKmeNetworksInc = 0x0246
FiftyThreeInc = 0x0247
ParkerHannifinCorp = 0x0248
TranscranialLtd = 0x0249
UwatecAG = 0x024a
OrlanLLC = 0x024b
BlueCloverDevices = 0x024c
MWaySolutionsGmbH = 0x024d
MicrotronicsEngineeringGmbH = 0x024e
SchneiderSchreibgerteGmbH = 0x024f
SapphireCircuitsLLC = 0x0250
LumoBodytechInc = 0x0251
UKCTechnosolution = 0x0252
XicatoInc = 0x0253
Playbrush = 0x0254
DaiNipponPrintingCoLtd = 0x0255
G24PowerLimited = 0x0256
AdBabbleLocalCommerceInc = 0x0257
DevialetSA = 0x0258
ALTYOR = 0x0259
UniversityofAppliedSciencesValaisHauteEcoleValaisanne = 0x025a
FiveInteractiveLLCdbaZendo = 0x025b
NetEaseNetworkcoLtd = 0x025c
LexmarkInternationalInc = 0x025d
FlukeCorporation = 0x025e
YardarmTechnologies = 0x025f
SensaRx = 0x0260
SECVREGmbH = 0x0261
GlacialRidgeTechnologies = 0x0262
IdentivInc = 0x0263
DDSInc = 0x0264
SMKCorporation = 0x0265
SchawbelTechnologiesLLC = 0x0266
XMISystemsSA = 0x0267
Cerevo = 0x0268
TorroxGmbHCoKG = 0x0269
Gemalto = 0x026a
DEKAResearchDevelopmentCorp = 0x026b
DomsterTadeuszSzydlowski = 0x026c
TechnogymSPA = 0x026d
FLEURBAEYBVBA = 0x026e
AptcodeSolutions = 0x026f
LSIADLTechnology = 0x0270
AnimasCorp = 0x0271
AlpsElectricCoLtd = 0x0272
OCEASOFT = 0x0273
MotsaiResearch = 0x0274
Geotab = 0x0275
EGOElektroGertebauGmbH = 0x0276
bewhereinc = 0x0277
JohnsonOutdoorsInc = 0x0278
steuteSchaltgerateGmbHCoKG = 0x0279
Ekominiinc = 0x027a
DEFAAS = 0x027b
AseptikaLtd = 0x027c
HUAWEITechnologiesCoLtd = 0x027d
HabitAwareLLC = 0x027e
ruwidoaustriagmbh = 0x027f
ITECcorporation = 0x0280
StoneL = 0x0281
SonovaAG = 0x0282
MavenMachinesInc = 0x0283
SynapseElectronics = 0x0284
StandardInnovationInc = 0x0285
RFCodeInc = 0x0286
WallyVenturesSL = 0x0287
WillowbankElectronicsLtd = 0x0288
SKTelecom = 0x0289
JetroAS = 0x028a
CodeGearsLTD = 0x028b
NANOLINKAPS = 0x028c
IFLLC = 0x028d
RFDigitalCorp = 0x028e
ChurchDwightCoInc = 0x028f
MultibitOy = 0x0290
CliniCloudInc = 0x0291
SwiftSensors = 0x0292
BlueBite = 0x0293
ELIASGmbH = 0x0294
SivantosGmbH = 0x0295
Petzl = 0x0296
stormpowerltd = 0x0297
EISSTLtd = 0x0298
InexessTechnologySimmaKG = 0x0299
CurrantInc = 0x029a
C2DevelopmentInc = 0x029b
BlueSkyScientificLLCA = 0x029c
ALOTTAZSLABSLLC = 0x029d
Kupsonspolsro = 0x029e
AreusEngineeringGmbH = 0x029f
ImpossibleCameraGmbH = 0x02a0
InventureTrackSystems = 0x02a1
LockedUp = 0x02a2
Itude = 0x02a3
PacificLockCompany = 0x02a4
TendyronCorporation = 0x02a5
RobertBoschGmbH = 0x02a6
IlluxtroninternationalBV = 0x02a7
miSportLtd = 0x02a8
Chargelib = 0x02a9
DopplerLab = 0x02aa
BBPOSLimited = 0x02ab
RTBElektronikGmbHCoKG = 0x02ac
RxNetworksInc = 0x02ad
WeatherFlowInc = 0x02ae
TechnicolorUSAInc = 0x02af
BestechnicLtd = 0x02b0
RadenInc = 0x02b1
JouZenOy = 0x02b2
CLABERSPA = 0x02b3
HyginexInc = 0x02b4
HANSHINELECTRICRAILWAYCOLTD = 0x02b5
SchneiderElectric = 0x02b6
OortTechnologiesLLC = 0x02b7
ChronoTherapeutics = 0x02b8
RinnaiCorporation = 0x02b9
SwissprimeTechnologiesAG = 0x02ba
KohaCoLtd = 0x02bb
GenevacLtd = 0x02bc
Chemtronics = 0x02bd
SeguroTechnologySpzoo = 0x02be
RedbirdFlightSimulations = 0x02bf
DashRobotics = 0x02c0
LINECorporation = 0x02c1
GuillemotCorporation = 0x02c2
TechtronicPowerToolsTechnologyLimited = 0x02c3
WilsonSportingGoods = 0x02c4
LenovoPteLtd = 0x02c5
AyatanSensors = 0x02c6
ElectronicsTomorrowLimited = 0x02c7
VASCODataSecurityInternationalInc = 0x02c8
PayRangeInc = 0x02c9
ABOVSemiconductor = 0x02ca
AINAWirelessInc = 0x02cb
EijkelkampSoilWater = 0x02cc
BMAergonomicsbv = 0x02cd
TevaBrandedPharmaceuticalProductsRDInc = 0x02ce
Anima = 0x02cf
ThreeM = 0x02d0
EmpaticaSrl = 0x02d1
AferoInc = 0x02d2
PowercastCorporation = 0x02d3
SecuyouApS = 0x02d4
OMRONCorporation = 0x02d5
SendSolutions = 0x02d6
NIPPONSYSTEMWARECOLTD = 0x02d7
Neosfar = 0x02d8
FlieglAgrartechnikGmbH = 0x02d9
Gilvader = 0x02da
DigiInternationalInc = 0x02db
DeWalchTechnologiesInc = 0x02dc
FlintRehabilitationDevicesLLC = 0x02dd
SamsungSDSCoLtd = 0x02de
BlurProductDevelopment = 0x02df
UniversityofMichigan = 0x02e0
VictronEnergyBV = 0x02e1
NTTdocomo = 0x02e2
CarmanahTechnologiesCorp = 0x02e3
BytestormLtd = 0x02e4
EspressifIncorporated = 0x02e5
Unwire = 0x02e6
ConnectedYardInc = 0x02e7
AmericanMusicEnvironments = 0x02e8
SensogramTechnologiesInc = 0x02e9
FujitsuLimited = 0x02ea
ArdicTechnology = 0x02eb
DeltaSystemsInc = 0x02ec
HTCCorporation = 0x02ed
CitizenHoldingsCoLtd = 0x02ee
SMARTINNOVATIONinc = 0x02ef
BlackratSoftware = 0x02f0
TheIdeaCaveLLC = 0x02f1
GoProInc = 0x02f2
AuthAirInc = 0x02f3
VensiInc = 0x02f4
IndagemTechLLC = 0x02f5
IntemoTechnologies = 0x02f6
DreamVisionscoLtd = 0x02f7
RunteqOyLtd = 0x02f8
IMAGINATIONTECHNOLOGIESLTD = 0x02f9
CoSTARTechnologies = 0x02fa
ClariusMobileHealthCorp = 0x02fb
ShanghaiFrequenMicroelectronicsCoLtd = 0x02fc
UwannaInc = 0x02fd
LierdaScienceTechnologyGroupCoLtd = 0x02fe
SiliconLaboratories = 0x02ff
WorldMotoInc = 0x0300
GiatecScientificInc = 0x0301
LoopDevicesInc = 0x0302
IACAelectronique = 0x0303
MartiansInc = 0x0304
SwippApS = 0x0305
LifeLaboratoryInc = 0x0306
FUJIINDUSTRIALCOLTD = 0x0307
SurefireLLC = 0x0308
DolbyLabs = 0x0309
Ellisys = 0x030a
MagnitudeLightingConverters = 0x030b
HiltiAG = 0x030c
DevdataSrl = 0x030d
Deviceworx = 0x030e
ShortcutLabs = 0x030f
SGLItaliaSrl = 0x0310
PEEQDATA = 0x0311
DucereTechnologiesPvtLtd = 0x0312
DiveNavInc = 0x0313
RIIGAISpzoo = 0x0314
ThermoFisherScientific = 0x0315
AGMeasurematicsPvtLtd = 0x0316
CHUOElectronicsCOLTD = 0x0317
AspentaInternational = 0x0318
EugsterFrismagAG = 0x0319
AmberwirelessGmbH = 0x031a
HQInc = 0x031b
LabSensorSolutions = 0x031c
EnterlabApS = 0x031d
EyefiInc = 0x031e
MetaSystemSpA = 0x031f
SONOELECTRONICSCOLTD = 0x0320
Jewelbots = 0x0321
CompumedicsLimited = 0x0322
RotorBikeComponents = 0x0323
AstroInc = 0x0324
AmotusSolutions = 0x0325
HealthwearTechnologiesLtd = 0x0326
EssexElectronics = 0x0327
GrundfosAS = 0x0328
EargoInc = 0x0329
ElectronicDesignLab = 0x032a
ESYLUX = 0x032b
NIPPONSMTCOLtd = 0x032c
BMinnovationsGmbH = 0x032d
indoormap = 0x032e
OttoQInc = 0x032f
NorthPoleEngineering = 0x0330
ThreeFlaresTechnologiesInc = 0x0331
ElectrocompanietAS = 0x0332
MulTLock = 0x0333
CorentiumAS = 0x0334
EnlightedInc = 0x0335
GISTIC = 0x0336
AJP2HoldingsLLC = 0x0337
COBIGmbH = 0x0338
BlueSkyScientificLLCB = 0x0339
AppceptionInc = 0x033a
CourtneyThorneLimited = 0x033b
Virtuosys = 0x033c
TPVTechnologyLimited = 0x033d
MonitraSA = 0x033e
AutomationComponentsInc = 0x033f
Letsensesrl = 0x0340
EtesianTechnologiesLLC = 0x0341
GERTECBRASILLTDA = 0x0342
DrekkerDevelopmentPtyLtd = 0x0343
WhirlInc = 0x0344
LocusPositioning = 0x0345
AcuityBrandsLightingInc = 0x0346
PreventBiometrics = 0x0347
Arioneo = 0x0348
VersaMe = 0x0349
Vaddio = 0x034a
LibratoneAS = 0x034b
HMElectronicsInc = 0x034c
TASERInternationalInc = 0x034d
SafeTrustInc = 0x034e
HeartlandPaymentSystems = 0x034f
BitstrataSystemsInc = 0x0350
PiepsGmbH = 0x0351
iRidingTechnologyCoLtd = 0x0352
AlphaAudiotronicsInc = 0x0353
TOPPANFORMSCOLTD = 0x0354
SigmaDesignsInc = 0x0355
RESERVED = 0xffff
ALL_16BIT_UUIDS = {
0x0001: "SDP",
0x0003: "RFCOMM",
0x0005: "TCS-BIN",
0x0007: "ATT",
0x0008: "OBEX",
0x000f: "BNEP",
0x0010: "UPNP",
0x0011: "HIDP",
0x0012: "Hardcopy Control Channel",
0x0014: "Hardcopy Data Channel",
0x0016: "Hardcopy Notification",
0x0017: "AVCTP",
0x0019: "AVDTP",
0x001b: "CMTP",
0x001e: "MCAP Control Channel",
0x001f: "MCAP Data Channel",
0x0100: "L2CAP",
# 0x0101 to 0x0fff undefined */
0x1000: "Service Discovery Server Service Class",
0x1001: "Browse Group Descriptor Service Class",
0x1002: "Public Browse Root",
# 0x1003 to 0x1100 undefined */
0x1101: "Serial Port",
0x1102: "LAN Access Using PPP",
0x1103: "Dialup Networking",
0x1104: "IrMC Sync",
0x1105: "OBEX Object Push",
0x1106: "OBEX File Transfer",
0x1107: "IrMC Sync Command",
0x1108: "Headset",
0x1109: "Cordless Telephony",
0x110a: "Audio Source",
0x110b: "Audio Sink",
0x110c: "A/V Remote Control Target",
0x110d: "Advanced Audio Distribution",
0x110e: "A/V Remote Control",
0x110f: "A/V Remote Control Controller",
0x1110: "Intercom",
0x1111: "Fax",
0x1112: "Headset AG",
0x1113: "WAP",
0x1114: "WAP Client",
0x1115: "PANU",
0x1116: "NAP",
0x1117: "GN",
0x1118: "Direct Printing",
0x1119: "Reference Printing",
0x111a: "Basic Imaging Profile",
0x111b: "Imaging Responder",
0x111c: "Imaging Automatic Archive",
0x111d: "Imaging Referenced Objects",
0x111e: "Handsfree",
0x111f: "Handsfree Audio Gateway",
0x1120: "Direct Printing Refrence Objects Service",
0x1121: "Reflected UI",
0x1122: "Basic Printing",
0x1123: "Printing Status",
0x1124: "Human Interface Device Service",
0x1125: "Hardcopy Cable Replacement",
0x1126: "HCR Print",
0x1127: "HCR Scan",
0x1128: "Common ISDN Access",
# 0x1129 and 0x112a undefined */
0x112d: "SIM Access",
0x112e: "Phonebook Access Client",
0x112f: "Phonebook Access Server",
0x1130: "Phonebook Access",
0x1131: "Headset HS",
0x1132: "Message Access Server",
0x1133: "Message Notification Server",
0x1134: "Message Access Profile",
0x1135: "GNSS",
0x1136: "GNSS Server",
0x1137: "3D Display",
0x1138: "3D Glasses",
0x1139: "3D Synchronization",
0x113a: "MPS Profile",
0x113b: "MPS Service",
# 0x113c to 0x11ff undefined */
0x1200: "PnP Information",
0x1201: "Generic Networking",
0x1202: "Generic File Transfer",
0x1203: "Generic Audio",
0x1204: "Generic Telephony",
0x1205: "UPNP Service",
0x1206: "UPNP IP Service",
0x1300: "UPNP IP PAN",
0x1301: "UPNP IP LAP",
0x1302: "UPNP IP L2CAP",
0x1303: "Video Source",
0x1304: "Video Sink",
0x1305: "Video Distribution",
# 0x1306 to 0x13ff undefined */
0x1400: "HDP",
0x1401: "HDP Source",
0x1402: "HDP Sink",
# 0x1403 to 0x17ff undefined */
0x1800: "Generic Access Profile",
0x1801: "Generic Attribute Profile",
0x1802: "Immediate Alert",
0x1803: "Link Loss",
0x1804: "Tx Power",
0x1805: "Current Time Service",
0x1806: "Reference Time Update Service",
0x1807: "Next DST Change Service",
0x1808: "Glucose",
0x1809: "Health Thermometer",
0x180a: "Device Information",
# 0x180b and 0x180c undefined */
0x180d: "Heart Rate",
0x180e: "Phone Alert Status Service",
0x180f: "Battery Service",
0x1810: "Blood Pressure",
0x1811: "Alert Notification Service",
0x1812: "Human Interface Device",
0x1813: "Scan Parameters",
0x1814: "Running Speed and Cadence",
0x1815: "Automation IO",
0x1816: "Cycling Speed and Cadence",
# 0x1817 undefined */
0x1818: "Cycling Power",
0x1819: "Location and Navigation",
0x181a: "Environmental Sensing",
0x181b: "Body Composition",
0x181c: "User Data",
0x181d: "Weight Scale",
0x181e: "Bond Management",
0x181f: "Continuous Glucose Monitoring",
0x1820: "Internet Protocol Support",
0x1821: "Indoor Positioning",
0x1822: "Pulse Oximeter",
0x1823: "HTTP Proxy",
0x1824: "Transport Discovery",
0x1825: "Object Transfer",
# 0x1824 to 0x27ff undefined */
0x2800: "Primary Service",
0x2801: "Secondary Service",
0x2802: "Include",
0x2803: "Characteristic",
# 0x2804 to 0x28ff undefined */
0x2900: "Characteristic Extended Properties",
0x2901: "Characteristic User Description",
0x2902: "Client Characteristic Configuration",
0x2903: "Server Characteristic Configuration",
0x2904: "Characteristic Format",
0x2905: "Characteristic Aggregate Formate",
0x2906: "Valid Range",
0x2907: "External Report Reference",
0x2908: "Report Reference",
0x2909: "Number of Digitals",
0x290a: "Value Trigger Setting",
0x290b: "Environmental Sensing Configuration",
0x290c: "Environmental Sensing Measurement",
0x290d: "Environmental Sensing Trigger Setting",
0x290e: "Time Trigger Setting",
# 0x290f to 0x29ff undefined */
0x2a00: "Device Name",
0x2a01: "Appearance",
0x2a02: "Peripheral Privacy Flag",
0x2a03: "Reconnection Address",
0x2a04: "Peripheral Preferred Connection Parameters",
0x2a05: "Service Changed",
0x2a06: "Alert Level",
0x2a07: "Tx Power Level",
0x2a08: "Date Time",
0x2a09: "Day of Week",
0x2a0a: "Day Date Time",
# 0x2a0b undefined */
0x2a0c: "Exact Time 256",
0x2a0d: "DST Offset",
0x2a0e: "Time Zone",
0x2a0f: "Local Time Information",
# 0x2a10 undefined */
0x2a11: "Time with DST",
0x2a12: "Time Accuracy",
0x2a13: "Time Source",
0x2a14: "Reference Time Information",
# 0x2a15 undefined */
0x2a16: "Time Update Control Point",
0x2a17: "Time Update State",
0x2a18: "Glucose Measurement",
0x2a19: "Battery Level",
# 0x2a1a and 0x2a1b undefined */
0x2a1c: "Temperature Measurement",
0x2a1d: "Temperature Type",
0x2a1e: "Intermediate Temperature",
# 0x2a1f and 0x2a20 undefined */
0x2a21: "Measurement Interval",
0x2a22: "Boot Keyboard Input Report",
0x2a23: "System ID",
0x2a24: "Model Number String",
0x2a25: "Serial Number String",
0x2a26: "Firmware Revision String",
0x2a27: "Hardware Revision String",
0x2a28: "Software Revision String",
0x2a29: "Manufacturer Name String",
0x2a2a: "IEEE 11073-20601 Regulatory Cert. Data List",
0x2a2b: "Current Time",
0x2a2c: "Magnetic Declination",
# 0x2a2d to 0x2a30 undefined */
0x2a31: "Scan Refresh",
0x2a32: "Boot Keyboard Output Report",
0x2a33: "Boot Mouse Input Report",
0x2a34: "Glucose Measurement Context",
0x2a35: "Blood Pressure Measurement",
0x2a36: "Intermediate Cuff Pressure",
0x2a37: "Heart Rate Measurement",
0x2a38: "Body Sensor Location",
0x2a39: "Heart Rate Control Point",
# 0x2a3a to 0x2a3e undefined */
0x2a3f: "Alert Status",
0x2a40: "Ringer Control Point",
0x2a41: "Ringer Setting",
0x2a42: "Alert Category ID Bit Mask",
0x2a43: "Alert Category ID",
0x2a44: "Alert Notification Control Point",
0x2a45: "Unread Alert Status",
0x2a46: "New Alert",
0x2a47: "Supported New Alert Category",
0x2a48: "Supported Unread Alert Category",
0x2a49: "Blood Pressure Feature",
0x2a4a: "HID Information",
0x2a4b: "Report Map",
0x2a4c: "HID Control Point",
0x2a4d: "Report",
0x2a4e: "Protocol Mode",
0x2a4f: "Scan Interval Window",
0x2a50: "PnP ID",
0x2a51: "Glucose Feature",
0x2a52: "Record Access Control Point",
0x2a53: "RSC Measurement",
0x2a54: "RSC Feature",
0x2a55: "SC Control Point",
0x2a56: "Digital",
# 0x2a57 undefined */
0x2a58: "Analog",
# 0x2a59 undefined */
0x2a5a: "Aggregate",
0x2a5b: "CSC Measurement",
0x2a5c: "CSC Feature",
0x2a5d: "Sensor Location",
# 0x2a5e to 0x2a62 undefined */
0x2a63: "Cycling Power Measurement",
0x2a64: "Cycling Power Vector",
0x2a65: "Cycling Power Feature",
0x2a66: "Cycling Power Control Point",
0x2a67: "Location and Speed",
0x2a68: "Navigation",
0x2a69: "Position Quality",
0x2a6a: "LN Feature",
0x2a6b: "LN Control Point",
0x2a6c: "Elevation",
0x2a6d: "Pressure",
0x2a6e: "Temperature",
0x2a6f: "Humidity",
0x2a70: "True Wind Speed",
0x2a71: "True Wind Direction",
0x2a72: "Apparent Wind Speed",
0x2a73: "Apparent Wind Direction",
0x2a74: "Gust Factor",
0x2a75: "Pollen Concentration",
0x2a76: "UV Index",
0x2a77: "Irradiance",
0x2a78: "Rainfall",
0x2a79: "Wind Chill",
0x2a7a: "Heat Index",
0x2a7b: "Dew Point",
0x2a7c: "Trend",
0x2a7d: "Descriptor Value Changed",
0x2a7e: "Aerobic Heart Rate Lower Limit",
0x2a7f: "Aerobic Threshold",
0x2a80: "Age",
0x2a81: "Anaerobic Heart Rate Lower Limit",
0x2a82: "Anaerobic Heart Rate Upper Limit",
0x2a83: "Anaerobic Threshold",
0x2a84: "Aerobic Heart Rate Upper Limit",
0x2a85: "Date of Birth",
0x2a86: "Date of Threshold Assessment",
0x2a87: "Email Address",
0x2a88: "Fat Burn Heart Rate Lower Limit",
0x2a89: "Fat Burn Heart Rate Upper Limit",
0x2a8a: "First Name",
0x2a8b: "Five Zone Heart Rate Limits",
0x2a8c: "Gender",
0x2a8d: "Heart Rate Max",
0x2a8e: "Height",
0x2a8f: "Hip Circumference",
0x2a90: "Last Name",
0x2a91: "Maximum Recommended Heart Rate",
0x2a92: "Resting Heart Rate",
0x2a93: "Sport Type for Aerobic/Anaerobic Thresholds",
0x2a94: "Three Zone Heart Rate Limits",
0x2a95: "Two Zone Heart Rate Limit",
0x2a96: "VO2 Max",
0x2a97: "Waist Circumference",
0x2a98: "Weight",
0x2a99: "Database Change Increment",
0x2a9a: "User Index",
0x2a9b: "Body Composition Feature",
0x2a9c: "Body Composition Measurement",
0x2a9d: "Weight Measurement",
0x2a9e: "Weight Scale Feature",
0x2a9f: "User Control Point",
0x2aa0: "Magnetic Flux Density - 2D",
0x2aa1: "Magnetic Flux Density - 3D",
0x2aa2: "Language",
0x2aa3: "Barometric Pressure Trend",
0x2aa4: "Bond Management Control Point",
0x2aa5: "Bond Management Feature",
0x2aa6: "Central Address Resolution",
0x2aa7: "CGM Measurement",
0x2aa8: "CGM Feature",
0x2aa9: "CGM Status",
0x2aaa: "CGM Session Start Time",
0x2aab: "CGM Session Run Time",
0x2aac: "CGM Specific Ops Control Point",
0x2aad: "Indoor Positioning Configuration",
0x2aae: "Latitude",
0x2aaf: "Longitude",
0x2ab0: "Local North Coordinate",
0x2ab1: "Local East Coordinate",
0x2ab2: "Floor Number",
0x2ab3: "Altitude",
0x2ab4: "Uncertainty",
0x2ab5: "Location Name",
0x2ab6: "URI",
0x2ab7: "HTTP Headers",
0x2ab8: "HTTP Status Code",
0x2ab9: "HTTP Entity Body",
0x2aba: "HTTP Control Point",
0x2abb: "HTTPS Security",
0x2abc: "TDS Control Point",
0x2abd: "OTS Feature",
0x2abe: "Object Name",
0x2abf: "Object Type",
0x2ac0: "Object Size",
0x2ac1: "Object First-Created",
0x2ac2: "Object Last-Modified",
0x2ac3: "Object ID",
0x2ac4: "Object Properties",
0x2ac5: "Object Action Control Point",
0x2ac6: "Object List Control Point",
0x2ac7: "Object List Filter",
0x2ac8: "Object Changed",
# vendor defined */
0xfeff: "GN Netcom",
0xfefe: "GN ReSound A/S",
0xfefd: "Gimbal, Inc.",
0xfefc: "Gimbal, Inc.",
0xfefb: "Stollmann E+V GmbH",
0xfefa: "PayPal, Inc.",
0xfef9: "PayPal, Inc.",
0xfef8: "Aplix Corporation",
0xfef7: "Aplix Corporation",
0xfef6: "Wicentric, Inc.",
0xfef5: "Dialog Semiconductor GmbH",
0xfef4: "Google",
0xfef3: "Google",
0xfef2: "CSR",
0xfef1: "CSR",
0xfef0: "Intel",
0xfeef: "Polar Electro Oy",
0xfeee: "Polar Electro Oy",
0xfeed: "Tile, Inc.",
0xfeec: "Tile, Inc.",
0xfeeb: "Swirl Networks, Inc.",
0xfeea: "Swirl Networks, Inc.",
0xfee9: "Quintic Corp.",
0xfee8: "Quintic Corp.",
0xfee7: "Tencent Holdings Limited",
0xfee6: "Seed Labs, Inc.",
0xfee5: "Nordic Semiconductor ASA",
0xfee4: "Nordic Semiconductor ASA",
0xfee3: "Anki, Inc.",
0xfee2: "Anki, Inc.",
0xfee1: "Anhui Huami Information Technology Co.",
0xfee0: "Anhui Huami Information Technology Co.",
0xfedf: "Design SHIFT",
0xfede: "Coin, Inc.",
0xfedd: "Jawbone",
0xfedc: "Jawbone",
0xfedb: "Perka, Inc.",
0xfeda: "ISSC Technologies Corporation",
0xfed9: "Pebble Technology Corporation",
0xfed8: "Google",
0xfed7: "Broadcom Corporation",
0xfed6: "Broadcom Corporation",
0xfed5: "Plantronics Inc.",
0xfed4: "Apple, Inc.",
0xfed3: "Apple, Inc.",
0xfed2: "Apple, Inc.",
0xfed1: "Apple, Inc.",
0xfed0: "Apple, Inc.",
0xfecf: "Apple, Inc.",
0xfece: "Apple, Inc.",
0xfecd: "Apple, Inc.",
0xfecc: "Apple, Inc.",
0xfecb: "Apple, Inc.",
0xfeca: "Apple, Inc.",
0xfec9: "Apple, Inc.",
0xfec8: "Apple, Inc.",
0xfec7: "Apple, Inc.",
0xfec6: "Kocomojo, LLC",
0xfec5: "Realtek Semiconductor Corp.",
0xfec4: "PLUS Location Systems",
0xfec3: "360fly, Inc.",
0xfec2: "Blue Spark Technologies, Inc.",
0xfec1: "KDDI Corporation",
0xfec0: "KDDI Corporation",
0xfebf: "Nod, Inc.",
0xfebe: "Bose Corporation",
0xfebd: "Clover Network, Inc.",
0xfebc: "Dexcom, Inc.",
0xfebb: "adafruit industries",
0xfeba: "Tencent Holdings Limited",
0xfeb9: "LG Electronics",
0xfeb8: "Facebook, Inc.",
0xfeb7: "Facebook, Inc.",
0xfeb6: "Vencer Co, Ltd",
0xfeb5: "WiSilica Inc.",
0xfeb4: "WiSilica Inc.",
0xfeb3: "Taobao",
0xfeb2: "Microsoft Corporation",
0xfeb1: "Electronics Tomorrow Limited",
0xfeb0: "Nest Labs Inc.",
0xfeaf: "Nest Labs Inc.",
0xfeae: "Nokia Corporation",
0xfead: "Nokia Corporation",
0xfeac: "Nokia Corporation",
0xfeab: "Nokia Corporation",
0xfeaa: "Google",
0xfea9: "Savant Systems LLC",
0xfea8: "Savant Systems LLC",
0xfea7: "UTC Fire and Security",
0xfea6: "GoPro, Inc.",
0xfea5: "GoPro, Inc.",
0xfea4: "Paxton Access Ltd",
0xfea3: "ITT Industries",
0xfea2: "Intrepid Control Systems, Inc.",
0xfea1: "Intrepid Control Systems, Inc.",
0xfea0: "Google",
0xfe9f: "Google",
0xfe9e: "Dialog Semiconductor B.V.",
0xfe9d: "Mobiquity Networks Inc",
0xfe9c: "GSI Laboratories, Inc.",
0xfe9b: "Samsara Networks, Inc",
0xfe9a: "Estimote",
0xfe99: "Currant, Inc.",
0xfe98: "Currant, Inc.",
0xfe97: "Tesla Motor Inc.",
0xfe96: "Tesla Motor Inc.",
0xfe95: "Xiaomi Inc.",
0xfe94: "OttoQ Inc.",
0xfe93: "OttoQ Inc.",
0xfe92: "Jarden Safety & Security",
0xfe91: "Shanghai Imilab Technology Co.,Ltd",
0xfe90: "JUMA",
0xfe8f: "CSR",
0xfe8e: "ARM Ltd",
0xfe8d: "Interaxon Inc.",
0xfe8c: "TRON Forum",
0xfe8b: "Apple, Inc.",
0xfe8a: "Apple, Inc.",
0xfe89: "B&O Play A/S",
0xfe88: "SALTO SYSTEMS S.L.",
0xfe87: "Qingdao Yeelink Information Technology Co., Ltd. ( 青岛亿联客信息技术有限公司 )",
0xfe86: "HUAWEI Technologies Co., Ltd. ( 华为技术有限公司 )",
0xfe85: "RF Digital Corp",
0xfe84: "RF Digital Corp",
0xfe83: "Blue Bite",
0xfe82: "Medtronic Inc.",
0xfe81: "Medtronic Inc.",
0xfe80: "Doppler Lab",
0xfe7f: "Doppler Lab",
0xfe7e: "Awear Solutions Ltd",
0xfe7d: "Aterica Health Inc.",
0xfe7c: "Stollmann E+V GmbH",
0xfe7b: "Orion Labs, Inc.",
0xfe7a: "Bragi GmbH",
0xfe79: "Zebra Technologies",
0xfe78: "Hewlett-Packard Company",
0xfe77: "Hewlett-Packard Company",
0xfe76: "TangoMe",
0xfe75: "TangoMe",
0xfe74: "unwire",
0xfe73: "St. Jude Medical, Inc.",
0xfe72: "St. Jude Medical, Inc.",
0xfe71: "Plume Design Inc",
0xfe70: "Beijing Jingdong Century Trading Co., Ltd.",
0xfe6f: "LINE Corporation",
0xfe6e: "The University of Tokyo",
0xfe6d: "The University of Tokyo",
0xfe6c: "TASER International, Inc.",
0xfe6b: "TASER International, Inc.",
0xfe6a: "Kontakt Micro-Location Sp. z o.o.",
0xfe69: "Qualcomm Life Inc",
0xfe68: "Qualcomm Life Inc",
0xfe67: "Lab Sensor Solutions",
0xfe66: "Intel Corporation",
# SDO defined */
0xfffe: "Alliance for Wireless Power (A4WP)",
0xfffd: "Fast IDentity Online Alliance (FIDO)",
}
ALL_128BIT_UUIDS = {
"a3c87500-8ed3-4bdf-8a39-a01bebede295": "Eddystone Configuration Service",
"a3c87501-8ed3-4bdf-8a39-a01bebede295": "Capabilities",
"a3c87502-8ed3-4bdf-8a39-a01bebede295": "Active Slot",
"a3c87503-8ed3-4bdf-8a39-a01bebede295": "Advertising Interval",
"a3c87504-8ed3-4bdf-8a39-a01bebede295": "Radio Tx Power",
"a3c87505-8ed3-4bdf-8a39-a01bebede295": "(Advanced) Advertised Tx Power",
"a3c87506-8ed3-4bdf-8a39-a01bebede295": "Lock State",
"a3c87507-8ed3-4bdf-8a39-a01bebede295": "Unlock",
"a3c87508-8ed3-4bdf-8a39-a01bebede295": "Public ECDH Key",
"a3c87509-8ed3-4bdf-8a39-a01bebede295": "EID Identity Key",
"a3c8750a-8ed3-4bdf-8a39-a01bebede295": "ADV Slot Data",
"a3c8750b-8ed3-4bdf-8a39-a01bebede295": "(Advanced) Factory reset",
"a3c8750c-8ed3-4bdf-8a39-a01bebede295": "(Advanced) Remain Connectable",
# BBC micro:bit Bluetooth Profiles */
"e95d0753-251d-470a-a062-fa1922dfa9a8": "MicroBit Accelerometer Service",
"e95dca4b-251d-470a-a062-fa1922dfa9a8": "MicroBit Accelerometer Data",
"e95dfb24-251d-470a-a062-fa1922dfa9a8": "MicroBit Accelerometer Period",
"e95df2d8-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Service",
"e95dfb11-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Data",
"e95d386c-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Period",
"e95d9715-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Bearing",
"e95d9882-251d-470a-a062-fa1922dfa9a8": "MicroBit Button Service",
"e95dda90-251d-470a-a062-fa1922dfa9a8": "MicroBit Button A State",
"e95dda91-251d-470a-a062-fa1922dfa9a8": "MicroBit Button B State",
"e95d127b-251d-470a-a062-fa1922dfa9a8": "MicroBit IO PIN Service",
"e95d8d00-251d-470a-a062-fa1922dfa9a8": "MicroBit PIN Data",
"e95d5899-251d-470a-a062-fa1922dfa9a8": "MicroBit PIN AD Configuration",
"e95dd822-251d-470a-a062-fa1922dfa9a8": "MicroBit PWM Control",
"e95dd91d-251d-470a-a062-fa1922dfa9a8": "MicroBit LED Service",
"e95d7b77-251d-470a-a062-fa1922dfa9a8": "MicroBit LED Matrix state",
"e95d93ee-251d-470a-a062-fa1922dfa9a8": "MicroBit LED Text",
"e95d0d2d-251d-470a-a062-fa1922dfa9a8": "MicroBit Scrolling Delay",
"e95d93af-251d-470a-a062-fa1922dfa9a8": "MicroBit Event Service",
"e95db84c-251d-470a-a062-fa1922dfa9a8": "MicroBit Requirements",
"e95d9775-251d-470a-a062-fa1922dfa9a8": "MicroBit Event Data",
"e95d23c4-251d-470a-a062-fa1922dfa9a8": "MicroBit Client Requirements",
"e95d5404-251d-470a-a062-fa1922dfa9a8": "MicroBit Client Events",
"e95d93b0-251d-470a-a062-fa1922dfa9a8": "MicroBit DFU Control Service",
"e95d93b1-251d-470a-a062-fa1922dfa9a8": "MicroBit DFU Control",
"e95d6100-251d-470a-a062-fa1922dfa9a8": "MicroBit Temperature Service",
"e95d1b25-251d-470a-a062-fa1922dfa9a8": "MicroBit Temperature Period",
# Nordic UART Port Emulation */
"6e400001-b5a3-f393-e0a9-e50e24dcca9e": "Nordic UART Service",
"6e400002-b5a3-f393-e0a9-e50e24dcca9e": "Nordic UART TX",
"6e400003-b5a3-f393-e0a9-e50e24dcca9e": "Nordic UART RX",
}
def uuid_to_string(uuid):
"""
For a given UUID string, try to determine the textual equivalent
of the GATT service or characteristic.
"""
if not isinstance(uuid, str):
raise TypeError("Expected a UUID string.")
if len(uuid) != 36:
raise ValueError("Expected the UUID string to be 36 characters long.")
uuid_text = ALL_128BIT_UUIDS.get(uuid, None)
if uuid_text is not None:
return uuid_text
else:
if uuid.endswith("-0000-1000-8000-00805f9b34fb"):
uuid_service = int(uuid[:8], 16)
return ALL_16BIT_UUIDS.get(uuid_service, None)
else:
return None
| mit | 6,484,401,005,254,201,000 | 30.06422 | 81 | 0.698386 | false |
opennode/nodeconductor | waldur_core/logging/serializers.py | 1 | 5531 | from django.db import IntegrityError
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from waldur_core.core.fields import MappedChoiceField, NaturalChoiceField
from waldur_core.core.serializers import GenericRelatedField
from waldur_core.logging import models, utils, loggers
class AlertSerializer(serializers.HyperlinkedModelSerializer):
scope = GenericRelatedField(related_models=utils.get_loggable_models())
severity = MappedChoiceField(
choices=[(v, k) for k, v in models.Alert.SeverityChoices.CHOICES],
choice_mappings={v: k for k, v in models.Alert.SeverityChoices.CHOICES},
)
context = serializers.JSONField(read_only=True)
class Meta(object):
model = models.Alert
fields = (
'url', 'uuid', 'alert_type', 'message', 'severity', 'scope',
'created', 'closed', 'context', 'acknowledged',
)
read_only_fields = ('uuid', 'created', 'closed')
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
def create(self, validated_data):
try:
alert, created = loggers.AlertLogger().process(
severity=validated_data['severity'],
message_template=validated_data['message'],
scope=validated_data['scope'],
alert_type=validated_data['alert_type'],
)
except IntegrityError:
# In case of simultaneous requests serializer validation can pass for both alerts,
# so we need to handle DB IntegrityError separately.
raise serializers.ValidationError(_('Alert with given type and scope already exists.'))
else:
return alert
class EventSerializer(serializers.Serializer):
level = serializers.ChoiceField(choices=['debug', 'info', 'warning', 'error'])
message = serializers.CharField()
scope = GenericRelatedField(related_models=utils.get_loggable_models(), required=False)
class BaseHookSerializer(serializers.HyperlinkedModelSerializer):
author_uuid = serializers.ReadOnlyField(source='user.uuid')
hook_type = serializers.SerializerMethodField()
class Meta(object):
model = models.BaseHook
fields = (
'url', 'uuid', 'is_active', 'author_uuid',
'event_types', 'event_groups', 'created', 'modified',
'hook_type'
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
def get_fields(self):
"""
When static declaration is used, event type choices are fetched too early -
even before all apps are initialized. As a result, some event types are missing.
When dynamic declaration is used, all valid event types are available as choices.
"""
fields = super(BaseHookSerializer, self).get_fields()
fields['event_types'] = serializers.MultipleChoiceField(
choices=loggers.get_valid_events(), required=False)
fields['event_groups'] = serializers.MultipleChoiceField(
choices=loggers.get_event_groups_keys(), required=False)
return fields
def create(self, validated_data):
validated_data['user'] = self.context['request'].user
return super(BaseHookSerializer, self).create(validated_data)
def validate(self, attrs):
if not self.instance and 'event_types' not in attrs and 'event_groups' not in attrs:
raise serializers.ValidationError(_('Please specify list of event_types or event_groups.'))
if 'event_groups' in attrs:
events = list(attrs.get('event_types', []))
groups = list(attrs.get('event_groups', []))
events = sorted(set(loggers.expand_event_groups(groups)) | set(events))
attrs['event_types'] = events
attrs['event_groups'] = groups
elif 'event_types' in attrs:
attrs['event_types'] = list(attrs['event_types'])
return attrs
def get_hook_type(self, hook):
raise NotImplementedError
class SummaryHookSerializer(serializers.Serializer):
def to_representation(self, instance):
serializer = self.get_hook_serializer(instance.__class__)
return serializer(instance, context=self.context).data
def get_hook_serializer(self, cls):
for serializer in BaseHookSerializer.__subclasses__():
if serializer.Meta.model == cls:
return serializer
raise ValueError('Hook serializer for %s class is not found' % cls)
class WebHookSerializer(BaseHookSerializer):
content_type = NaturalChoiceField(models.WebHook.ContentTypeChoices.CHOICES, required=False)
class Meta(BaseHookSerializer.Meta):
model = models.WebHook
fields = BaseHookSerializer.Meta.fields + ('destination_url', 'content_type')
def get_hook_type(self, hook):
return 'webhook'
class PushHookSerializer(BaseHookSerializer):
type = NaturalChoiceField(models.PushHook.Type.CHOICES)
class Meta(BaseHookSerializer.Meta):
model = models.PushHook
fields = BaseHookSerializer.Meta.fields + ('type', 'device_id', 'token', 'device_manufacturer', 'device_model')
def get_hook_type(self, hook):
return 'pushhook'
class EmailHookSerializer(BaseHookSerializer):
class Meta(BaseHookSerializer.Meta):
model = models.EmailHook
fields = BaseHookSerializer.Meta.fields + ('email', )
def get_hook_type(self, hook):
return 'email'
| mit | 1,335,064,343,548,479,000 | 36.371622 | 119 | 0.654131 | false |
CanalTP/Chaos | tests/disruption_test.py | 1 | 3495 | import sys
import chaos.publisher
if 'threading' in sys.modules:
del sys.modules['threading']
from nose.tools import assert_false, eq_
from chaos import models
from chaos.utils import send_disruption_to_navitia
from mock import MagicMock
def test_disruption_with_draft_status_isnnot_send():
'''
Tests that a disruption with status draft is not sent to navitia
:return:
'''
disruption = models.Disruption()
disruption.status = 'draft'
has_been_sent = send_disruption_to_navitia(disruption)
eq_(has_been_sent, True)
def test_disruption_with_archived_status_is_sent():
'''
Tests that a disruption with status archived is sent to navitia
:return:
'''
disruption = models.Disruption()
disruption.contributor = models.Contributor()
disruption.contributor_id = disruption.contributor.id
disruption.status = 'archived'
has_been_sent = send_disruption_to_navitia(disruption)
eq_(has_been_sent, True)
def test_disruption_with_published_status_is_sent():
'''
Tests that a disruption with status published is sent to navitia
:return:
'''
disruption = models.Disruption()
#contributor
disruption.contributor = models.Contributor()
disruption.contributor_id = disruption.contributor.id
#cause
disruption.cause = models.Cause()
disruption.cause.wording = "CauseTest"
disruption.cause.category = models.Category()
disruption.cause.category.name = "CategoryTest"
disruption.reference = "DisruptionTest"
#localization
localization = models.PTobject()
localization.uri = "stop_area:123"
localization.type = "stop_area"
disruption.localizations.append(localization)
# Wording
wording = models.Wording()
wording.key = "key_1"
wording.value = "value_1"
disruption.cause.wordings.append(wording)
wording = models.Wording()
wording.key = "key_2"
wording.value = "value_2"
disruption.cause.wordings.append(wording)
# Tag
tag = models.Tag()
tag.name = "rer"
disruption.tags.append(tag)
disruption.status = 'published'
has_been_sent = send_disruption_to_navitia(disruption)
eq_(has_been_sent, True)
def test_disruption_with_rabbitmq_exception():
'''
Tests when a disruption fail to publish to rabbitmq
:return:
'''
disruption = models.Disruption()
#contributor
disruption.contributor = models.Contributor()
disruption.contributor_id = disruption.contributor.id
#cause
disruption.cause = models.Cause()
disruption.cause.wording = "CauseTest"
disruption.cause.category = models.Category()
disruption.cause.category.name = "CategoryTest"
disruption.reference = "DisruptionTest"
#localization
localization = models.PTobject()
localization.uri = "stop_area:123"
localization.type = "stop_area"
disruption.localizations.append(localization)
# Wording
wording = models.Wording()
wording.key = "key_1"
wording.value = "value_1"
disruption.cause.wordings.append(wording)
wording = models.Wording()
wording.key = "key_2"
wording.value = "value_2"
disruption.cause.wordings.append(wording)
# Tag
tag = models.Tag()
tag.name = "rer"
disruption.tags.append(tag)
disruption.status = 'published'
chaos.publisher.publish = MagicMock(return_value=False)
to_rabbitmq_not_sent = send_disruption_to_navitia(disruption)
eq_(to_rabbitmq_not_sent, False) | agpl-3.0 | 2,251,519,589,489,508,400 | 27.193548 | 68 | 0.692704 | false |
omermahgoub/MigTool | Process/ResizeWorker.py | 1 | 4412 | __author__ = 'OmerMahgoub'
#!/usr/bin/env python
import time
import ast
import random
import pika
from common.VM import OpenStackAdapterVMInstance
from settings import settings
objSettings = settings.StackSettings()
# The below settings are coming from settings/setting.py which in return getting all the configurations from config.yml file
# Start of Settings Configuration
stackSettings = objSettings.ServiceSettings("OpenStack")
queueHostSettings = objSettings.ServiceSettings("rabbitMQ")
queueSettings = objSettings.ServiceSettings("QueueSettings")
# End of Settings Configuration
# Connection Initialization for RabbitMQ Server
count = queueSettings['retryCount']
connection = pika.BlockingConnection(pika.ConnectionParameters(
host=queueHostSettings["host"]))
channel = connection.channel()
# User Queue Declaration
channel.queue_declare(queue=queueSettings['ResizeQueueName'], durable=True)
print ' [*] Waiting for messages. To exit press CTRL+C'
# Send_To_VM_Queue is a Method which accepts the msg from User Queue and throws the Message to VM Queue
def Notification_Queue(UserMsg):
pass
# The callback method is to iterate the message in the Queue. It will keep on checking for new messages
def callback(ch, method, properties, body):
print " [x] Received %r" % (body,)
time.sleep(5)
# Split the body into seperate items
strMessage = ast.literal_eval(body)
bodyMessage = strMessage['body']
planMessage = strMessage['PlanDetails']['ItemDetails']
itemsList = bodyMessage.split(",")
orderId = itemsList[0]
userid = itemsList[1]
email = itemsList[2]
planname = itemsList[3]
projectname = itemsList[4]
requestType = itemsList[5]
serverId = itemsList[6]
objStack = OpenStackAdapterVMInstance()
# def VMQueue(self, VmName, ImageName, FlavorName, ProjectName):
msg = objStack.ResizeInstance(ServerId = serverId, FlavorName = planMessage['Flavor'], ProjectName = projectname)
print "VM Resize Status (True/False) %s" % msg
# First Get the Retry Times from the First Message
print "Retry Count: %s" % properties.headers["retry_count"]
if properties.headers["retry_count"] > count:
print("Saving in DB")
ch.basic_ack(delivery_tag=method.delivery_tag)
else:
try:
if msg['Status'] == False or msg['Status'] == "Error":
raise Exception("VM can't be created due to some reasons.Re-Queuing the Message again")
else:
ch.basic_ack(delivery_tag=method.delivery_tag)
print "Successfully Operated and removed from Queue"
# Throw the Project Creation Message to User Queue
Notification_Queue(body)
# End of Throwing Message to Project Queue
except:
print "Just Reached Exception Area"
print "Before setting header, Count was %s" % properties.headers["retry_count"]
# Setting the Header and incrementing to 1
headers = { # example how headers can be used
'retry_count': properties.headers["retry_count"] + 1
}
# Creating the message in the Queue Again
channel.basic_publish(
exchange='',
routing_key=queueSettings['ResizeQueueName'],
body=body, # must be string
properties=pika.BasicProperties(
delivery_mode=2, # makes persistent job
priority=0, # default priority
# timestamp=timestamp, # timestamp of job creation
# expiration=str(expire), # job expiration (milliseconds from now), must be string, handled by rabbitmq
headers=headers
))
# Acknowledges that the Message is success and then through back the message to Queue.
channel.basic_ack(delivery_tag=method.delivery_tag)
print "Queue Acknowledged and removed"
print "[++++++Done+++++]"
print
channel.basic_qos(prefetch_count=1)
channel.basic_consume(callback, queue=queueSettings['ResizeQueueName'])
try:
channel.start_consuming()
except KeyboardInterrupt:
channel.stop_consuming();
connection.close() | gpl-3.0 | 1,589,770,511,240,852,000 | 35.092437 | 124 | 0.644379 | false |
tuos/FlowAndCorrelations | model/ampt/production/v3/run0pPbv3x10more/ampt_StringMelting_pPb5020GeV_cfg.py | 1 | 5886 | # Auto generated configuration file
# using:
# Revision: 1.168
# Source: /cvs_server/repositories/CMSSW/CMSSW/Configuration/PyReleaseValidation/python/ConfigBuilder.py,v
# with command line options: GeneratorInterface/AMPTInterface/amptDefault_cfi.py -s GEN --conditions auto:mc --datatier GEN --eventcontent RAWSIM -n 1 --scenario HeavyIons --no_exec
import FWCore.ParameterSet.Config as cms
process = cms.Process('GEN')
# import of standard configurations
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.Geometry.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic8TeVCollision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContentHeavyIons_cff')
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.2 $'),
annotation = cms.untracked.string('AMPT generator'),
name = cms.untracked.string('$Source: /cvs_server/repositories/CMSSW/CMSSW/GeneratorInterface/AMPTInterface/python/amptDefault_cfi.py,v $')
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(10)
)
process.options = cms.untracked.PSet(
)
# Input source
process.source = cms.Source("EmptySource")
# Output definition
process.output = cms.OutputModule("PoolOutputModule",
splitLevel = cms.untracked.int32(0),
outputCommands = process.RAWSIMEventContent.outputCommands,
fileName = cms.untracked.string('ampt_StringMelting_pPb5020GeV_GEN.root'),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('GEN'),
filterName = cms.untracked.string('')
),
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
)
)
# Additional output definition
# Other statements
process.GlobalTag.globaltag = 'STARTHI71_V2::All'
process.generator = cms.EDFilter("AMPTGeneratorFilter",
comEnergy = cms.double(5020.0),
frame = cms.string('CMS'),
proj = cms.string('P'),
targ = cms.string('A'),
iap = cms.int32(1),
izp = cms.int32(1),
iat = cms.int32(208),
izt = cms.int32(82),
bMin = cms.double(0),
bMax = cms.double(8.31),
amptmode = cms.int32(4),
ntmax = cms.int32(150),
dt = cms.double(0.2),
stringFragA = cms.double(0.5),
stringFragB = cms.double(0.9),
popcornmode = cms.bool(True),
popcornpar = cms.double(1.0),
shadowingmode = cms.bool(True),
quenchingmode = cms.bool(False),
quenchingpar = cms.double(1.0),
pthard = cms.double(2.0),
mu = cms.double(2.2814),
izpc = cms.int32(0),
alpha = cms.double(0.33333),
dpcoal = cms.double(1000000.0),
drcoal = cms.double(1000000.0),
ks0decay = cms.bool(False),
phidecay = cms.bool(True), ##no pi0 decay flag
deuteronmode = cms.int32(0),
deuteronfactor = cms.int32(1),
deuteronxsec = cms.int32(1),
minijetpt = cms.double(-7.0),
maxmiss = cms.int32(1000),
doInitialAndFinalRadiation = cms.int32(3),
ktkick = cms.int32(1),
diquarkembedding = cms.int32(0),
diquarkpx = cms.double(7.0),
diquarkpy = cms.double(0.0),
diquarkx = cms.double(0.0),
diquarky = cms.double(0.0),
rotateEventPlane = cms.bool(True),
firstRun = cms.untracked.uint32(1),
firstEvent = cms.untracked.uint32(1)
)
# Path and EndPath definitions
#process.generation_step = cms.Path(process.pgen_hi)
process.generation_step = cms.Path(process.generator)
process.endjob_step = cms.Path(process.endOfProcess)
process.out_step = cms.EndPath(process.output)
# Schedule definition
process.schedule = cms.Schedule(process.generation_step,process.endjob_step,process.out_step)
from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper
randSvc = RandomNumberServiceHelper(process.RandomNumberGeneratorService)
randSvc.populate()
# special treatment in case of production filter sequence
for path in process.paths:
getattr(process,path)._seq = process.generator*getattr(process,path)._seq
| mit | -9,084,353,928,747,316,000 | 47.644628 | 181 | 0.576622 | false |
n9code/calm | setup.py | 1 | 1439 | from os import path
import codecs
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'requirements.txt'),
encoding='utf-8') as reqs:
requirements = reqs.read()
setup(
name='calm',
version='0.1.4',
description='It is always Calm before a Tornado!',
long_description="""
Calm is an extension to Tornado Framework for building RESTful APIs.
Navigate to http://calm.n9co.de for more information.
""",
url='http://calm.n9co.de',
author='Bagrat Aznauryan',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
keywords='tornado rest restful api framework',
packages=find_packages(exclude=['docs', 'tests']),
install_requires=requirements,
)
| mit | -1,791,420,317,329,591,000 | 25.648148 | 79 | 0.624044 | false |
biolauncher/pycudalibs | test/test_trans.py | 1 | 2069 | import unittest
import cunumpy as cn
import numpy as np
import math
import test
class TestTranspose(unittest.TestCase):
def setUp(self):
self.real_veca = [3,4,0,11.,12,15.1,-2.,9.,14.,16.7]
self.real_vecb = [7.0003,4.1,9.7,1.03,1.1210008,1.1500013,-2.9876,9.0002,3005.01,22.000007]
self.complex_veca = [1, 0, 2+4j, -1-math.pi*1j, 2.-3.7j, 5., 3.j, -2.3j, -math.e+math.e*1j, 7]
self.complex_vecb = [1, 0, 12-4.09j, -5.00002+math.pi*1j,
22.-13.077j, 5., 2.003+3.004j, -2.3j, 1j, 0.0007j]
self.real_mata = [self.real_veca for i in range(1,5)]
self.real_matb = [self.real_vecb for i in range(1,11)]
self.complex_mata = [self.complex_veca for i in range(1,5)]
self.complex_matb = [self.complex_vecb for i in range(1,11)]
def test_vector_transpose_idempotency(self):
# transpose is a noop for vectors - they are always columns
v = cn.array(self.real_veca, dtype=cn.float32)
self.assert_(test.arrays_equal(v.T.toarray(), v.toarray()))
def test_matrix_transpose_idempotency(self):
a = cn.array(self.real_mata, dtype=cn.float32)
self.assert_(test.arrays_equal(a.toarray(), a.T.T.toarray()))
def test_linalg_transpose_idempotency(self):
a = cn.array(self.real_mata, dtype=cn.float32)
b = cn.array(self.real_matb, dtype=cn.float32)
c = a.dot(b)
d = b.T.dot(a.T).T
self.assert_(test.arrays_equal(c.toarray(), d.toarray(), epsilon=0.05))
def suite_single():
suite = unittest.TestSuite()
tests = ['test_vector_transpose_idempotency',
'test_matrix_transpose_idempotency',
'test_linalg_transpose_idempotency'
]
return unittest.TestSuite(map(TestTranspose, tests))
def suite_double():
suite = unittest.TestSuite()
tests = []
return unittest.TestSuite(map(TestTranspose, tests))
def suite():
return unittest.TestSuite(suite_single())
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
| gpl-3.0 | -4,447,343,407,113,610,000 | 36.618182 | 102 | 0.61914 | false |
sqall01/alertR | managerClientConsole/lib/manager/elementAlertLevel.py | 1 | 14639 | #!/usr/bin/env python3
# written by sqall
# twitter: https://twitter.com/sqall01
# blog: https://h4des.org
# github: https://github.com/sqall01
#
# Licensed under the GNU Affero General Public License, version 3.
import time
import urwid
from typing import List
from ..globalData import ManagerObjSensor, ManagerObjAlert, ManagerObjAlertLevel, ManagerObjProfile, SensorDataType
# this class is an urwid object for an alert level
class AlertLevelUrwid:
def __init__(self, alertLevel: ManagerObjAlertLevel):
# store reference to alert level object
self.alertLevel = alertLevel
# store reference in alert level object to
# this urwid alert level object
self.alertLevel.internal_data["urwid"] = self
alertLevelPileList = list()
self.nameWidget = urwid.Text("Name: " + self.alertLevel.name)
alertLevelPileList.append(self.nameWidget)
alertLevelPile = urwid.Pile(alertLevelPileList)
alertLevelBox = urwid.LineBox(alertLevelPile, title="Level: %d" % self.alertLevel.level)
paddedAlertLevelBox = urwid.Padding(alertLevelBox, left=1, right=1)
# set the color of the urwid object
self.alertLevelUrwidMap = urwid.AttrMap(paddedAlertLevelBox, "greenColor")
self.alertLevelUrwidMap.set_focus_map({None: "greenColor_focus"})
# this function returns the final urwid widget that is used
# to render the box of an alert level
def get(self) -> urwid.AttrMap:
return self.alertLevelUrwidMap
# this function updates the description of the object
def updateName(self, name: str):
self.nameWidget.set_text("Name: " + name)
# this function changes the color of this urwid object to red
def turnRed(self):
self.alertLevelUrwidMap.set_attr_map({None: "redColor"})
self.alertLevelUrwidMap.set_focus_map({None: "redColor_focus"})
# this function changes the color of this urwid object to green
def turnGreen(self):
self.alertLevelUrwidMap.set_attr_map({None: "greenColor"})
self.alertLevelUrwidMap.set_focus_map({None: "greenColor_focus"})
# this function changes the color of this urwid object to gray
def turnGray(self):
self.alertLevelUrwidMap.set_attr_map({None: "grayColor"})
self.alertLevelUrwidMap.set_focus_map({None: "grayColor_focus"})
# this function changes the color of this urwid object to the
# neutral color scheme
def turnNeutral(self):
self.alertLevelUrwidMap.set_attr_map({None: "neutral"})
# this function updates all internal widgets and checks if
# the alert level still exists
def updateCompleteWidget(self):
# check if alert level still exists
if self.alertLevel.is_deleted():
# return false if object no longer exists
return False
self.turnGreen()
self.updateName(self.alertLevel.name)
# return true if object was updated
return True
# this functions sets the color when the connection to the server has failed.
def setConnectionFail(self):
self.alertLevelUrwidMap.set_attr_map({None: "connectionfail"})
self.alertLevelUrwidMap.set_focus_map({None: "connectionfail_focus"})
# this class is an urwid object for a detailed alert level output
class AlertLevelDetailedUrwid:
def __init__(self,
alertLevel: ManagerObjAlertLevel,
sensors: List[ManagerObjSensor],
alerts: List[ManagerObjAlert],
profiles: List[ManagerObjProfile]):
self.alertLevel = alertLevel
content = list()
content.append(urwid.Divider("="))
content.append(urwid.Text("Alert Level"))
content.append(urwid.Divider("="))
temp = self._createAlertLevelWidgetList(alertLevel)
self.alertLevelPileWidget = urwid.Pile(temp)
content.append(self.alertLevelPileWidget)
content.append(urwid.Divider())
content.append(urwid.Divider("="))
content.append(urwid.Text("Profiles"))
content.append(urwid.Divider("="))
temp = self._create_profiles_widget_list(profiles)
self._profiles_pile_widget = urwid.Pile(temp)
content.append(self._profiles_pile_widget)
content.append(urwid.Divider())
content.append(urwid.Divider("="))
content.append(urwid.Text("Alerts"))
content.append(urwid.Divider("="))
temp = self._createAlertsWidgetList(alerts)
self.alertsPileWidget = urwid.Pile(temp)
content.append(self.alertsPileWidget)
content.append(urwid.Divider())
content.append(urwid.Divider("="))
content.append(urwid.Text("Sensors"))
content.append(urwid.Divider("="))
temp = self._createSensorsWidgetList(sensors)
self.sensorsPileWidget = urwid.Pile(temp)
content.append(self.sensorsPileWidget)
# use ListBox here because it handles all the
# scrolling part automatically
detailedList = urwid.ListBox(urwid.SimpleListWalker(content))
detailedFrame = urwid.Frame(detailedList, footer=urwid.Text("Keys: ESC - Back, Up/Down - Scrolling"))
self.detailedBox = urwid.LineBox(detailedFrame, title="Alert Level: " + self.alertLevel.name)
# this function creates the detailed output of a alert level object
# in a list
def _createAlertLevelWidgetList(self, alertLevel: ManagerObjAlertLevel) -> List[urwid.Widget]:
temp = list()
temp.append(urwid.Text("Alert Level:"))
temp.append(urwid.Text(str(alertLevel.level)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Name:"))
temp.append(urwid.Text(alertLevel.name))
temp.append(urwid.Divider())
temp.append(urwid.Text("Profiles:"))
profile_str = ", ".join(map(lambda x: str(x), alertLevel.profiles))
temp.append(urwid.Text(profile_str))
temp.append(urwid.Divider())
temp.append(urwid.Text("Instrumentation Activated:"))
if alertLevel.instrumentation_active is None:
temp.append(urwid.Text("Undefined"))
elif alertLevel.instrumentation_active:
temp.append(urwid.Text("Yes"))
temp.append(urwid.Divider())
temp.append(urwid.Text("Instrumentation Cmd:"))
temp.append(urwid.Text(alertLevel.instrumentation_cmd))
temp.append(urwid.Divider())
temp.append(urwid.Text("Instrumentation Timeout:"))
temp.append(urwid.Text(str(alertLevel.instrumentation_timeout) + " Seconds"))
else:
temp.append(urwid.Text("No"))
return temp
# this function creates the detailed output of all alert objects
# in a list
def _createAlertsWidgetList(self, alerts: List[ManagerObjAlert]) -> List[urwid.Widget]:
temp = list()
first = True
for alert in alerts:
if first:
first = False
else:
temp.append(urwid.Divider())
temp.append(urwid.Divider("-"))
temp.extend(self._createAlertWidgetList(alert))
if not temp:
temp.append(urwid.Text("None"))
return temp
# this function creates the detailed output of a alert object
# in a list
def _createAlertWidgetList(self, alert: ManagerObjAlert) -> List[urwid.Widget]:
temp = list()
temp.append(urwid.Text("Node ID:"))
temp.append(urwid.Text(str(alert.nodeId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Alert ID:"))
temp.append(urwid.Text(str(alert.alertId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Client Alert ID:"))
temp.append(urwid.Text(str(alert.clientAlertId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Description:"))
temp.append(urwid.Text(alert.description))
return temp
# this function creates the detailed output of all profile objects in a list
def _create_profiles_widget_list(self, profiles: List[ManagerObjProfile]) -> List[urwid.Widget]:
temp = list()
first = True
for profile in profiles:
if first:
first = False
else:
temp.append(urwid.Divider())
temp.append(urwid.Divider("-"))
temp.extend(self._create_profile_widget_list(profile))
if not temp:
temp.append(urwid.Text("None"))
return temp
# this function creates the detailed output of a profile object in a list
def _create_profile_widget_list(self, profile: ManagerObjProfile) -> List[urwid.Widget]:
temp = list()
temp.append(urwid.Text("Profile ID:"))
temp.append(urwid.Text(str(profile.profileId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Name:"))
temp.append(urwid.Text(profile.name))
return temp
# this function creates the detailed output of all sensor objects
# in a list
def _createSensorsWidgetList(self, sensors: List[ManagerObjSensor]) -> List[urwid.Widget]:
temp = list()
first = True
for sensor in sensors:
if first:
first = False
else:
temp.append(urwid.Divider())
temp.append(urwid.Divider("-"))
temp.extend(self._createSensorWidgetList(sensor))
if not temp:
temp.append(urwid.Text("None"))
return temp
# this function creates the detailed output of a sensor object
# in a list
def _createSensorWidgetList(self, sensor: ManagerObjSensor) -> List[urwid.Widget]:
temp = list()
temp.append(urwid.Text("Node ID:"))
temp.append(urwid.Text(str(sensor.nodeId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Sensor ID:"))
temp.append(urwid.Text(str(sensor.sensorId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Client Sensor ID:"))
temp.append(urwid.Text(str(sensor.clientSensorId)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Alert Delay:"))
temp.append(urwid.Text(str(sensor.alertDelay) + " Seconds"))
temp.append(urwid.Divider())
temp.append(urwid.Text("Description:"))
temp.append(urwid.Text(sensor.description))
temp.append(urwid.Divider())
temp.append(urwid.Text("State:"))
if sensor.state == 0:
temp.append(urwid.AttrMap(urwid.Text("Normal"), "neutral"))
elif sensor.state == 1:
temp.append(urwid.AttrMap(urwid.Text("Triggered"), "sensoralert"))
else:
temp.append(urwid.AttrMap(urwid.Text("Undefined"), "redColor"))
temp.append(urwid.Divider())
temp.append(urwid.Text("Data Type:"))
if sensor.dataType == SensorDataType.NONE:
temp.append(urwid.Text("None"))
elif sensor.dataType == SensorDataType.INT:
temp.append(urwid.Text("Integer"))
elif sensor.dataType == SensorDataType.FLOAT:
temp.append(urwid.Text("Floating Point"))
else:
temp.append(urwid.Text("Unknown"))
temp.append(urwid.Divider())
temp.append(urwid.Text("Data:"))
if sensor.dataType == SensorDataType.NONE:
temp.append(urwid.Text("None"))
else:
temp.append(urwid.Text(str(sensor.data)))
temp.append(urwid.Divider())
temp.append(urwid.Text("Last Updated (Server Time):"))
lastUpdatedWidget = urwid.Text(time.strftime("%D %H:%M:%S", time.localtime(sensor.lastStateUpdated)))
temp.append(lastUpdatedWidget)
return temp
# this function returns the final urwid widget that is used
# to render this object
def get(self) -> urwid.LineBox:
return self.detailedBox
# this function updates all internal widgets
def updateCompleteWidget(self,
sensors: List[ManagerObjSensor],
alerts: List[ManagerObjAlert],
profiles: List[ManagerObjProfile]):
self.updateAlertLevelDetails()
self.updateSensorsDetails(sensors)
self.updateAlertsDetails(alerts)
self.update_profile_details(profiles)
# this function updates the alert level information shown
def updateAlertLevelDetails(self):
# crate new sensor pile content
temp = self._createAlertLevelWidgetList(self.alertLevel)
# create a list of tuples for the pile widget
pileOptions = self.alertLevelPileWidget.options()
temp = [(x, pileOptions) for x in temp]
# empty pile widget contents and replace it with the new widgets
del self.alertLevelPileWidget.contents[:]
self.alertLevelPileWidget.contents.extend(temp)
# this function updates the node information shown
def updateAlertsDetails(self, alerts: List[ManagerObjAlert]):
# crate new sensor pile content
temp = self._createAlertsWidgetList(alerts)
# create a list of tuples for the pile widget
pileOptions = self.alertsPileWidget.options()
temp = [(x, pileOptions) for x in temp]
# empty pile widget contents and replace it with the new widgets
del self.alertsPileWidget.contents[:]
self.alertsPileWidget.contents.extend(temp)
def update_profile_details(self, profiles: List[ManagerObjProfile]):
"""
This function updates the profile information shown.
:param profiles:
"""
temp = self._create_profiles_widget_list(profiles)
# Create a list of tuples for the pile widget.
pile_options = self._profiles_pile_widget.options()
new_profiles_list = [(x, pile_options) for x in temp]
# Empty pile widget contents and replace it with the new widgets.
del self._profiles_pile_widget.contents[:]
self._profiles_pile_widget.contents.extend(new_profiles_list)
# this function updates the sensor information shown
def updateSensorsDetails(self, sensors: List[ManagerObjSensor]):
# crate new sensor pile content
temp = self._createSensorsWidgetList(sensors)
# create a list of tuples for the pile widget
pileOptions = self.sensorsPileWidget.options()
temp = [(x, pileOptions) for x in temp]
# empty pile widget contents and replace it with the new widgets
del self.sensorsPileWidget.contents[:]
self.sensorsPileWidget.contents.extend(temp)
| agpl-3.0 | -2,799,409,555,636,106,000 | 35.415423 | 115 | 0.644784 | false |
sivaprakashniet/push_pull | p2p/lib/python2.7/site-packages/celery/app/defaults.py | 1 | 11107 | # -*- coding: utf-8 -*-
"""
celery.app.defaults
~~~~~~~~~~~~~~~~~~~
Configuration introspection and defaults.
"""
from __future__ import absolute_import
import sys
from collections import deque, namedtuple
from datetime import timedelta
from celery.five import items
from celery.utils import strtobool
from celery.utils.functional import memoize
__all__ = ['Option', 'NAMESPACES', 'flatten', 'find']
is_jython = sys.platform.startswith('java')
is_pypy = hasattr(sys, 'pypy_version_info')
DEFAULT_POOL = 'prefork'
if is_jython:
DEFAULT_POOL = 'threads'
elif is_pypy:
if sys.pypy_version_info[0:3] < (1, 5, 0):
DEFAULT_POOL = 'solo'
else:
DEFAULT_POOL = 'prefork'
DEFAULT_ACCEPT_CONTENT = ['json', 'pickle', 'msgpack', 'yaml']
DEFAULT_PROCESS_LOG_FMT = """
[%(asctime)s: %(levelname)s/%(processName)s] %(message)s
""".strip()
DEFAULT_LOG_FMT = '[%(asctime)s: %(levelname)s] %(message)s'
DEFAULT_TASK_LOG_FMT = """[%(asctime)s: %(levelname)s/%(processName)s] \
%(task_name)s[%(task_id)s]: %(message)s"""
_BROKER_OLD = {'deprecate_by': '2.5', 'remove_by': '4.0',
'alt': 'BROKER_URL setting'}
_REDIS_OLD = {'deprecate_by': '2.5', 'remove_by': '4.0',
'alt': 'URL form of CELERY_RESULT_BACKEND'}
searchresult = namedtuple('searchresult', ('namespace', 'key', 'type'))
class Option(object):
alt = None
deprecate_by = None
remove_by = None
typemap = dict(string=str, int=int, float=float, any=lambda v: v,
bool=strtobool, dict=dict, tuple=tuple)
def __init__(self, default=None, *args, **kwargs):
self.default = default
self.type = kwargs.get('type') or 'string'
for attr, value in items(kwargs):
setattr(self, attr, value)
def to_python(self, value):
return self.typemap[self.type](value)
def __repr__(self):
return '<Option: type->{0} default->{1!r}>'.format(self.type,
self.default)
NAMESPACES = {
'BROKER': {
'URL': Option(None, type='string'),
'CONNECTION_TIMEOUT': Option(4, type='float'),
'CONNECTION_RETRY': Option(True, type='bool'),
'CONNECTION_MAX_RETRIES': Option(100, type='int'),
'FAILOVER_STRATEGY': Option(None, type='string'),
'HEARTBEAT': Option(None, type='int'),
'HEARTBEAT_CHECKRATE': Option(3.0, type='int'),
'LOGIN_METHOD': Option(None, type='string'),
'POOL_LIMIT': Option(10, type='int'),
'USE_SSL': Option(False, type='bool'),
'TRANSPORT': Option(type='string'),
'TRANSPORT_OPTIONS': Option({}, type='dict'),
'HOST': Option(type='string', **_BROKER_OLD),
'PORT': Option(type='int', **_BROKER_OLD),
'USER': Option(type='string', **_BROKER_OLD),
'PASSWORD': Option(type='string', **_BROKER_OLD),
'VHOST': Option(type='string', **_BROKER_OLD),
},
'CASSANDRA': {
'COLUMN_FAMILY': Option(type='string'),
'DETAILED_MODE': Option(False, type='bool'),
'KEYSPACE': Option(type='string'),
'READ_CONSISTENCY': Option(type='string'),
'SERVERS': Option(type='list'),
'WRITE_CONSISTENCY': Option(type='string'),
},
'CELERY': {
'ACCEPT_CONTENT': Option(DEFAULT_ACCEPT_CONTENT, type='list'),
'ACKS_LATE': Option(False, type='bool'),
'ALWAYS_EAGER': Option(False, type='bool'),
'ANNOTATIONS': Option(type='any'),
'FORCE_BILLIARD_LOGGING': Option(True, type='bool'),
'BROADCAST_QUEUE': Option('celeryctl'),
'BROADCAST_EXCHANGE': Option('celeryctl'),
'BROADCAST_EXCHANGE_TYPE': Option('fanout'),
'CACHE_BACKEND': Option(),
'CACHE_BACKEND_OPTIONS': Option({}, type='dict'),
'CHORD_PROPAGATES': Option(True, type='bool'),
'COUCHBASE_BACKEND_SETTINGS': Option(None, type='dict'),
'CREATE_MISSING_QUEUES': Option(True, type='bool'),
'DEFAULT_RATE_LIMIT': Option(type='string'),
'DISABLE_RATE_LIMITS': Option(False, type='bool'),
'DEFAULT_ROUTING_KEY': Option('celery'),
'DEFAULT_QUEUE': Option('celery'),
'DEFAULT_EXCHANGE': Option('celery'),
'DEFAULT_EXCHANGE_TYPE': Option('direct'),
'DEFAULT_DELIVERY_MODE': Option(2, type='string'),
'EAGER_PROPAGATES_EXCEPTIONS': Option(False, type='bool'),
'ENABLE_UTC': Option(True, type='bool'),
'ENABLE_REMOTE_CONTROL': Option(True, type='bool'),
'EVENT_SERIALIZER': Option('json'),
'EVENT_QUEUE_EXPIRES': Option(None, type='float'),
'EVENT_QUEUE_TTL': Option(None, type='float'),
'IMPORTS': Option((), type='tuple'),
'INCLUDE': Option((), type='tuple'),
'IGNORE_RESULT': Option(False, type='bool'),
'MAX_CACHED_RESULTS': Option(5000, type='int'),
'MESSAGE_COMPRESSION': Option(type='string'),
'MONGODB_BACKEND_SETTINGS': Option(type='dict'),
'REDIS_HOST': Option(type='string', **_REDIS_OLD),
'REDIS_PORT': Option(type='int', **_REDIS_OLD),
'REDIS_DB': Option(type='int', **_REDIS_OLD),
'REDIS_PASSWORD': Option(type='string', **_REDIS_OLD),
'REDIS_MAX_CONNECTIONS': Option(type='int'),
'RESULT_BACKEND': Option(type='string'),
'RESULT_DB_SHORT_LIVED_SESSIONS': Option(False, type='bool'),
'RESULT_DB_TABLENAMES': Option(type='dict'),
'RESULT_DBURI': Option(),
'RESULT_ENGINE_OPTIONS': Option(type='dict'),
'RESULT_EXCHANGE': Option('celeryresults'),
'RESULT_EXCHANGE_TYPE': Option('direct'),
'RESULT_SERIALIZER': Option('pickle'),
'RESULT_PERSISTENT': Option(None, type='bool'),
'ROUTES': Option(type='any'),
'SEND_EVENTS': Option(False, type='bool'),
'SEND_TASK_ERROR_EMAILS': Option(False, type='bool'),
'SEND_TASK_SENT_EVENT': Option(False, type='bool'),
'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'),
'TASK_PUBLISH_RETRY': Option(True, type='bool'),
'TASK_PUBLISH_RETRY_POLICY': Option({
'max_retries': 3,
'interval_start': 0,
'interval_max': 1,
'interval_step': 0.2}, type='dict'),
'TASK_RESULT_EXPIRES': Option(timedelta(days=1), type='float'),
'TASK_SERIALIZER': Option('pickle'),
'TIMEZONE': Option(type='string'),
'TRACK_STARTED': Option(False, type='bool'),
'REDIRECT_STDOUTS': Option(True, type='bool'),
'REDIRECT_STDOUTS_LEVEL': Option('WARNING'),
'QUEUES': Option(type='dict'),
'QUEUE_HA_POLICY': Option(None, type='string'),
'SECURITY_KEY': Option(type='string'),
'SECURITY_CERTIFICATE': Option(type='string'),
'SECURITY_CERT_STORE': Option(type='string'),
'WORKER_DIRECT': Option(False, type='bool'),
},
'CELERYD': {
'AGENT': Option(None, type='string'),
'AUTOSCALER': Option('celery.worker.autoscale:Autoscaler'),
'AUTORELOADER': Option('celery.worker.autoreload:Autoreloader'),
'CONCURRENCY': Option(0, type='int'),
'TIMER': Option(type='string'),
'TIMER_PRECISION': Option(1.0, type='float'),
'FORCE_EXECV': Option(False, type='bool'),
'HIJACK_ROOT_LOGGER': Option(True, type='bool'),
'CONSUMER': Option('celery.worker.consumer:Consumer', type='string'),
'LOG_FORMAT': Option(DEFAULT_PROCESS_LOG_FMT),
'LOG_COLOR': Option(type='bool'),
'LOG_LEVEL': Option('WARN', deprecate_by='2.4', remove_by='4.0',
alt='--loglevel argument'),
'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
alt='--logfile argument'),
'MAX_TASKS_PER_CHILD': Option(type='int'),
'POOL': Option(DEFAULT_POOL),
'POOL_PUTLOCKS': Option(True, type='bool'),
'POOL_RESTARTS': Option(False, type='bool'),
'PREFETCH_MULTIPLIER': Option(4, type='int'),
'STATE_DB': Option(),
'TASK_LOG_FORMAT': Option(DEFAULT_TASK_LOG_FMT),
'TASK_SOFT_TIME_LIMIT': Option(type='float'),
'TASK_TIME_LIMIT': Option(type='float'),
'WORKER_LOST_WAIT': Option(10.0, type='float')
},
'CELERYBEAT': {
'SCHEDULE': Option({}, type='dict'),
'SCHEDULER': Option('celery.beat:PersistentScheduler'),
'SCHEDULE_FILENAME': Option('celerybeat-schedule'),
'MAX_LOOP_INTERVAL': Option(0, type='float'),
'LOG_LEVEL': Option('INFO', deprecate_by='2.4', remove_by='4.0',
alt='--loglevel argument'),
'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
alt='--logfile argument'),
},
'CELERYMON': {
'LOG_LEVEL': Option('INFO', deprecate_by='2.4', remove_by='4.0',
alt='--loglevel argument'),
'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
alt='--logfile argument'),
'LOG_FORMAT': Option(DEFAULT_LOG_FMT),
},
'EMAIL': {
'HOST': Option('localhost'),
'PORT': Option(25, type='int'),
'HOST_USER': Option(),
'HOST_PASSWORD': Option(),
'TIMEOUT': Option(2, type='float'),
'USE_SSL': Option(False, type='bool'),
'USE_TLS': Option(False, type='bool'),
},
'SERVER_EMAIL': Option('celery@localhost'),
'ADMINS': Option((), type='tuple'),
}
def flatten(d, ns=''):
stack = deque([(ns, d)])
while stack:
name, space = stack.popleft()
for key, value in items(space):
if isinstance(value, dict):
stack.append((name + key + '_', value))
else:
yield name + key, value
DEFAULTS = dict((key, value.default) for key, value in flatten(NAMESPACES))
def find_deprecated_settings(source):
from celery.utils import warn_deprecated
for name, opt in flatten(NAMESPACES):
if (opt.deprecate_by or opt.remove_by) and getattr(source, name, None):
warn_deprecated(description='The {0!r} setting'.format(name),
deprecation=opt.deprecate_by,
removal=opt.remove_by,
alternative='Use the {0.alt} instead'.format(opt))
return source
@memoize(maxsize=None)
def find(name, namespace='celery'):
# - Try specified namespace first.
namespace = namespace.upper()
try:
return searchresult(
namespace, name.upper(), NAMESPACES[namespace][name.upper()],
)
except KeyError:
# - Try all the other namespaces.
for ns, keys in items(NAMESPACES):
if ns.upper() == name.upper():
return searchresult(None, ns, keys)
elif isinstance(keys, dict):
try:
return searchresult(ns, name.upper(), keys[name.upper()])
except KeyError:
pass
# - See if name is a qualname last.
return searchresult(None, name.upper(), DEFAULTS[name.upper()])
| bsd-3-clause | -1,949,531,072,986,532,400 | 40.289963 | 79 | 0.570451 | false |
peterlei/fboss | fboss/py/fboss/cli/commands/nic.py | 1 | 2427 | #!/usr/bin/env python3
#
# Copyright (c) 2004-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from fboss.cli.commands import commands as cmds
from fboss.cli.data.oui_to_vendor_ieee import NIC_VENDOR_OUI_MAP
class NicCmd(cmds.PrintNeighborTableCmd):
'''Class for host NIC related commands in fboss.'''
_LENGTH_OF_OUI = 8
_NO_MAC_FOUND_MESSAGE = "No MAC address found in ARP/NDP tables found."
_UNKNOWN_VENDOR_MESSAGE = "Unknown NIC Vendor."
def run(self, detail, verbose):
self._client = self._create_agent_client()
# Get the MAC addresses for IPV4.
arp_table_detailed = self._client.getArpTable()
arp_mac_addresses = [arp_mac.mac for arp_mac in arp_table_detailed]
# Get the MAC addresses for IPV6.
ndp_table_detailed = self._client.getNdpTable()
ndp_mac_addresses = [ndp_mac.mac for ndp_mac in ndp_table_detailed]
mac_address_set = set(arp_mac_addresses + ndp_mac_addresses)
# Ignore the broadcast mac.
mac_address_set -= set(['ff:ff:ff:ff:ff:ff', 'FF:FF:FF:FF:FF:FF'])
if not len(mac_address_set):
print(self._NO_MAC_FOUND_MESSAGE)
return
mac_nic_dictionary = {}
for mac in mac_address_set:
oui = mac[:self._LENGTH_OF_OUI].upper()
if oui in NIC_VENDOR_OUI_MAP.keys():
mac_nic_dictionary[mac] = NIC_VENDOR_OUI_MAP[oui]
else:
mac_nic_dictionary[mac] = self._UNKNOWN_VENDOR_MESSAGE
if detail or verbose:
for mac_address, vendor_name in mac_nic_dictionary.items():
print("MAC Address: " + mac_address + " Vendor: " + vendor_name)
return
# Non verbose output needs only NIC vendor names.
nic_vendor_set = set(mac_nic_dictionary.values())
response = ""
if len(nic_vendor_set) == 0:
response = self._NO_MAC_FOUND_MESSAGE
elif len(nic_vendor_set) > 1:
response += ", ".join(str(nic_vendor_iterator)
for nic_vendor_iterator in nic_vendor_set)
else:
response += nic_vendor_set.pop()
print(response)
| bsd-3-clause | 5,845,180,241,870,015,000 | 35.772727 | 80 | 0.613103 | false |
pythonprobr/pythonpro-website | pythonpro/cohorts/facade.py | 1 | 2292 | from functools import partial
from django.conf import settings
from django.core.cache import cache
from django.db.models import Prefetch as _Prefetch
from django.urls import reverse
from pythonpro.cohorts.models import Cohort as _Cohort, CohortStudent, LiveClass as _LiveClass, Webinar as _Webinar
__all__ = [
'get_all_cohorts_desc',
'find_cohort',
'find_most_recent_cohort',
'calculate_most_recent_cohort_path',
'find_webinars',
'find_webinar',
'find_live_class',
]
def get_all_cohorts_desc():
lazy_all_cohorts = partial(tuple, _Cohort.objects.order_by('-start'))
return cache.get_or_set('ALL_COHORTS', lazy_all_cohorts, settings.CACHE_TTL)
def find_cohort(slug):
return _Cohort.objects.filter(slug=slug).prefetch_related(
_Prefetch(
'liveclass_set',
queryset=_LiveClass.objects.order_by('start'),
to_attr='classes'
)
).prefetch_related(
_Prefetch(
'webinar_set',
queryset=_Webinar.objects.order_by('start'),
to_attr='webinars'
)
).get()
def find_most_recent_cohort():
return _Cohort.objects.order_by('-start').first()
def calculate_most_recent_cohort_path() -> str:
slug_dct = _Cohort.objects.order_by('-start').values('slug').first()
return reverse('modules:detail', kwargs=slug_dct)
def find_webinars():
"""
Retrieve Webinars from database ordered by date desc
:return: Tuple of webinars
"""
return tuple(_Webinar.objects.order_by('-start'))
def find_recorded_webinars():
"""
Retrieve recorded Webinars from database ordered by date desc.
A recorded Webinar has vimeo_id not empty
:return: Tuple of webinars
"""
return tuple(_Webinar.objects.order_by('-start').exclude(vimeo_id__exact=''))
def find_webinar(slug):
"""
Retrieve Webinar by its slug
:return: Webinar
"""
return _Webinar.objects.filter(slug=slug).get()
def find_live_class(pk):
"""
Find Live Class by its PK, selecting related cohort
:param pk:
:return:
"""
return _LiveClass.objects.select_related('cohort').get(pk=pk)
def subscribe_to_last_cohort(user):
ch = CohortStudent(user=user, cohort=find_most_recent_cohort())
ch.save()
return ch
| agpl-3.0 | -2,346,784,259,353,792,500 | 25.045455 | 115 | 0.65445 | false |
dukestats/gpustats | scripts/bench.py | 1 | 6208 | from pandas import *
import numpy as np
from pycuda.gpuarray import to_gpu
import gpustats
import gpustats.util as util
from scipy.stats import norm
import timeit
data = np.random.randn(1000000)
mean = 20
std = 5
univ_setup = """
import numpy as np
from pycuda.gpuarray import to_gpu
k = 8
means = np.random.randn(k)
stds = np.abs(np.random.randn(k))
mean = 20
std = 5
import gpustats
from scipy.stats import norm
cpu_data = np.random.randn(%d)
gpu_data = cpu_data
"""
univ_setup_gpuarray = univ_setup + """
gpu_data = to_gpu(cpu_data)
"""
multivar_setup = """
# from __main__ import data, mean, std
import gpustats
import gpustats.util as util
import numpy as np
import testmod
from pycuda.gpuarray import to_gpu
import testmod
from numpy.linalg import cholesky as chol
import numpy.linalg as L
def next_multiple(k, p):
if k.__mod__(p):
return k + (p - k.__mod__(p))
return k
PAD_MULTIPLE = 16
HALF_WARP = 16
def pad_data(data):
n, k = data.shape
if not k.__mod__(HALF_WARP):
pad_dim = k + 1
else:
pad_dim = k
if k != pad_dim:
padded_data = np.empty((n, pad_dim), dtype=np.float32)
padded_data[:, :k] = data
return padded_data
else:
return prep_ndarray(data)
def prep_ndarray(arr):
# is float32 and contiguous?
if not arr.dtype == np.float32 or not arr.flags.contiguous:
arr = np.array(arr, dtype=np.float32)
return arr
def pack_params(means, chol_sigmas, logdets):
to_pack = []
for m, ch, ld in zip(means, chol_sigmas, logdets):
to_pack.append(pack_pdf_params(m, ch, ld))
return np.vstack(to_pack)
def pack_pdf_params(mean, chol_sigma, logdet):
k = len(mean)
mean_len = k
chol_len = k * (k + 1) / 2
mch_len = mean_len + chol_len
packed_dim = next_multiple(mch_len + 2, PAD_MULTIPLE)
packed_params = np.empty(packed_dim, dtype=np.float32)
packed_params[:mean_len] = mean
packed_params[mean_len:mch_len] = chol_sigma[np.tril_indices(k)]
packed_params[mch_len:mch_len + 2] = 1, logdet
return packed_params
k = %d
dim = 15
means = np.random.randn(k, dim)
covs = [util.random_cov(dim) for _ in xrange(k)]
cpu_data = np.random.randn(%d, dim)
gpu_data = cpu_data
"""
multivar_setup_gpuarray = multivar_setup + """
gpu_data = to_gpu(cpu_data)
"""
LOG_2_PI = np.log(2 * np.pi)
# def mvnpdf(data, mean, cov):
# ichol_sigma = np.asarray(np.linalg.inv(np.linalg.cholesky(cov)))
# # ichol_sigma = np.tril(ichol_sigma)
# logdet = np.log(np.linalg.det(cov))
# return [_mvnpdf(x, mean, ichol_sigma, logdet)
# for x in data]
# def _mvnpdf(x, mean, ichol_sigma, logdet):
# demeaned = x - mean
# discrim = ((ichol_sigma * demeaned) ** 2).sum()
# # discrim = np.dot(demeaned, np.dot(ichol_sigma, demeaned))
# return - 0.5 * (discrim + logdet + LOG_2_PI * dim)
def get_timeit(stmt, setup, iter=10):
return timeit.Timer(stmt, setup).timeit(number=iter) / iter
def compare_timings_single(n, setup=univ_setup):
gpu = "gpustats.normpdf(gpu_data, mean, std, logged=False)"
cpu = "norm.pdf(cpu_data, loc=mean, scale=std)"
setup = setup % n
return {'GPU' : get_timeit(gpu, setup, iter=1000),
'CPU' : get_timeit(cpu, setup)}
def compare_timings_multi(n, setup=univ_setup):
gpu = "gpustats.normpdf_multi(gpu_data, means, stds, logged=False)"
cpu = """
for m, s in zip(means, stds):
norm.pdf(cpu_data, loc=m, scale=s)
"""
setup = setup % n
return {'GPU' : get_timeit(gpu, setup, iter=100),
'CPU' : get_timeit(cpu, setup)}
def mvcompare_timings(n, k=1, setup=multivar_setup):
gpu = "gpustats.mvnpdf_multi(gpu_data, means, covs, logged=False)"
cpu = """
ichol_sigmas = [L.inv(chol(sig)) for sig in covs]
logdets = [np.log(np.linalg.det(sig)) for sig in covs]
params = pack_params(means, covs, logdets)
testmod.cpu_mvnpdf(cpu_data, params, dim)
"""
setup = setup % (k, n)
return {'GPU' : get_timeit(gpu, setup, iter=100),
'CPU' : get_timeit(cpu, setup)}
def get_timing_results(timing_f):
lengths = [100, 1000, 10000, 100000, 1000000]
result = {}
for n in lengths:
print n
result[n] = timing_f(n)
result = DataFrame(result).T
result['Speedup'] = result['CPU'] / result['GPU']
return result
# mvsingle = get_timing_results(mvcompare_timings)
# comp_gpu = lambda n: mvcompare_timings(n, setup=multivar_setup_gpuarray)
# mvsingle_gpu = get_timing_results(comp_gpu)
# multi_comp = lambda n: mvcompare_timings(n, k=16)
# mvmulti = get_timing_results(multi_comp)
# multi_comp_gpu = lambda n: mvcompare_timings(n, k=16,
# setup=multivar_setup_gpuarray)
# mvmulti_gpu = get_timing_results(multi_comp_gpu)
single = get_timing_results(compare_timings_single)
comp_gpu = lambda n: compare_timings_single(n, setup=univ_setup_gpuarray)
single_gpu = get_timing_results(comp_gpu)
multi = get_timing_results(compare_timings_multi)
comp_gpu = lambda n: compare_timings_multi(n, setup=univ_setup_gpuarray)
multi_gpu = get_timing_results(comp_gpu)
data = DataFrame({
'Single' : single['Speedup'],
'Single (GPUArray)' : single_gpu['Speedup'],
'Multi' : multi['Speedup'],
'Multi (GPUArray)' : multi_gpu['Speedup'],
})
mvdata = DataFrame({
'Single' : mvsingle['Speedup'],
'Single (GPUArray)' : mvsingle_gpu['Speedup'],
'Multi' : mvmulti['Speedup'],
'Multi (GPUArray)' : mvmulti_gpu['Speedup'],
})
if __name__ == '__main__':
import gpustats
import numpy as np
from scipy.stats import norm
import testmod
from numpy.linalg import cholesky as chol
import numpy.linalg as L
# dim = 15
# k = 8
# means = np.random.randn(k, dim)
# covs = [np.asarray(util.random_cov(dim)) for _ in xrange(k)]
# cpu_data = np.random.randn(100000, dim)
# gpu_data = to_gpu(cpu_data)
# ichol_sigmas = [L.inv(chol(sig)) for sig in covs]
# logdets = [np.log(np.linalg.det(sig)) for sig in covs]
# packed_params = pack_params(means, covs, logdets)
# pdfs = gpustats.mvnpdf(cpu_data, means[0], covs[0])
# pdfs = testmod.cpu_mvnpdf(cpu_data, packed_params, 15)
| bsd-3-clause | 1,382,903,943,584,763,100 | 26.22807 | 74 | 0.637081 | false |
STIXProject/stix-validator | sdv/validators/stix/best_practice.py | 1 | 42947 | # Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
# builtin
import re
import itertools
import collections
import distutils.version
# external
from lxml import etree
from mixbox.vendor.six import iteritems, itervalues, with_metaclass
from mixbox import compat
# internal
from sdv import utils, xmlconst
# relative
from . import common
from .. import base
from ...utils import remove_version_prefix
# Python 2.6 doesn't have collections.OrderedDict :(
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
# STIX ID Format: [ns prefix]:[construct type]-[GUID]
# Note: This will validate invalid QNames, so this should be used with a
# QName format check.
ID_PATTERN = re.compile(r"[\w\-]+:\w+-.+", re.UNICODE)
def rule(minver, maxver=None):
"""Decorator that identifies methods as being a STIX best practice checking
rule.
Args:
version: Identifies the minimum version of STIX for which the decorated
method applies.
"""
def decorator(func):
func.is_rule = True
func.min_version = minver
func.max_version = maxver
return func
return decorator
class BestPracticeMeta(type):
"""Metaclass that collects all :meth:`rule` decorated methods and
builds an internal mapping of STIX version numbers to rules.
"""
def __new__(metacls, name, bases, dict_):
obj = type.__new__(metacls, name, bases, dict_)
# Initialize a mapping of STIX versions to applicable rule funcs.
ruledict = collections.defaultdict(list)
# Find all @rule marked functions in the class dict_
rulefuncs = (x for x in itervalues(dict_) if hasattr(x, 'is_rule'))
# Build the rule function dict.
for rule in rulefuncs:
ruledict[(rule.min_version, rule.max_version)].append(rule) # noqa
# Attach the rule dictionary to the object instance.
obj._rules = ruledict # noqa
return obj
class BestPracticeWarning(compat.MutableMapping, base.ValidationError):
"""Represents a best practice warning. These are built within best
practice rule checking methods and attached to
:class:`BestPracticeWarningCollection` instances.
Note:
This class acts like a dictionary and contains the following keys
at a minimum:
* ``'id'``: The id of a node associated with the warning.
* ``'idref'``: The idref of a node associated with the warning.
* ``'line'``: The line number of the offending node.
* ``'message'``: A message associated with the warning.
* ``'tag'``: The lxml tag for the offending node.
These keys can be retrieved via the :attr:`core_keys` property.
Instances of this class may attach additional keys. These `other keys`
can be obtained via the :attr:`other_keys` property.
Args:
node: The ``lxml._Element`` node associated with this warning.
message: A message for this warning.
"""
def __init__(self, node, message=None):
base.ValidationError.__init__(self)
self._inner = OrderedDict()
self._node = node
self['line'] = node.sourceline
self['message'] = message
self['id'] = node.attrib.get('id')
self['idref'] = node.attrib.get('idref')
self['tag'] = node.tag
def __unicode__(self):
return unicode(self.message)
def __str__(self):
return unicode(self).encode("utf-8")
def __getitem__(self, key):
return self._inner.__getitem__(key)
def __delitem__(self, key):
self._inner.__delitem__(key)
def __setitem__(self, key, value):
self._inner.__setitem__(key, value)
def __len__(self):
return self._inner.__len__()
def __iter__(self):
return self._inner.__iter__()
@property
def line(self):
"""Returns the line number of the warning node in the input document.
"""
return self['line']
@property
def message(self):
"""Returns a message associated with the warning. This may return
``None`` if there is no warning message.
"""
return self['message']
@property
def core_keys(self):
"""Returns a ``tuple`` of the keys that can always be found on
instance of this class.
Returns:
A tuple including the following keys.
* ``'id'``: The id of the warning node. The associated value
may be ``None``.
* ``'idref'``: The idref of the warning node. The associated value
may be ``None``.
* ``'line'``: The line number of the warning node in the input
document. The associated value may be ``None``.
* ``'tag'``: The ``{namespace}localname`` value of the warning
node.
* ``'message'``: An optional message that can be attached to the
warning. The associated value may be ``None``.
"""
return ('id', 'idref', 'line', 'tag', 'message')
@property
def other_keys(self):
"""Returns a ``tuple`` of keys attached to instances of this class that
are not found in the :attr:`core_keys`.
"""
return tuple(x for x in self if x not in self.core_keys)
def as_dict(self):
"""Returns a dictionary representation of this class instance. This
is implemented for consistency across other validation error types.
The :class:`.BestPracticeWarning` class extends
:class:`collections.MutableMapping`, so this method isn't really
necessary.
"""
return dict(iteritems(self))
class BestPracticeWarningCollection(compat.MutableSequence):
"""A collection of :class:`BestPracticeWarning` instances for a given
type of STIX Best Practice.
For example, all warnings about STIX constructs missing titles would
go within an instance of this class.
Note:
This class behaves like a mutable sequence, such as a ``list``.
Args:
name: The name of the STIX best practice for this collection (e.g.,
'Missing Titles').
Attributes:
name: The name of the STIX best practice for this collection (e.g.,
'Missing Titles').
"""
def __init__(self, name):
super(BestPracticeWarningCollection, self).__init__()
self.name = name
self._warnings = []
def insert(self, idx, value):
"""Inserts `value` at `idx` into this
:class:`BestPracticeWarningCollection` instance.
Note:
Values that evaluate to ``False`` will not be inserted.
"""
if not value:
return
if isinstance(value, etree._Element): # noqa
value = BestPracticeWarning(node=value)
self._warnings.insert(idx, value)
def __getitem__(self, key):
return self._warnings.__getitem__(key)
def __setitem__(self, key, value):
self._warnings.__setitem__(key, value)
def __delitem__(self, key):
self._warnings.__delitem__(key)
def __len__(self):
return len(self._warnings)
def __nonzero__(self):
return bool(self._warnings)
def as_dict(self):
"""Returns a dictionary representation.
The key of the dictionary is the ``name`` of this collection. The
associated value is a ``list`` of :class:`BestPracticeWarning`
dictionaries.
"""
if not self:
return {}
return {self.name: [x.as_dict() for x in self]}
class BestPracticeValidationResults(base.ValidationResults, compat.MutableSequence):
"""Represents STIX best practice validation results. This class behaves
like a ``list`` and accepts instances of
:class:`BestPracticeWarningCollection`.
"""
def __init__(self):
base.ValidationResults.__init__(self, False)
self._warnings = []
@base.ValidationResults.is_valid.getter
def is_valid(self):
"""Returns ``True`` if an instance of this class contains no warning
collections or only contains only warning collections.
"""
return not(any(self))
@property
def errors(self):
"""Returns a ``list`` of :class:`BestPracticeWarningCollection`
instances.
"""
return [x for x in self if x]
def insert(self, idx, value):
"""Inserts an instance of :class:`BestPracticeWarningCollection`.
Note:
If ``bool(value) == False`` then `value` will not be inserted.
Raises:
ValueError: If `value` is not an instance of
:class:`BestPracticeWarningCollection`.
"""
if not value:
return
if not isinstance(value, BestPracticeWarningCollection):
raise ValueError(
"Value must be instance of BestPracticeWarningCollection"
)
self._warnings.insert(idx, value)
def __getitem__(self, key):
return self._warnings.__getitem__(key)
def __setitem__(self, key, value):
self._warnings.__setitem__(key, value)
def __delitem__(self, key):
self._warnings.__delitem__(key)
def __len__(self):
return len(self._warnings)
def __nonzero__(self):
return bool(self._warnings)
def as_dict(self):
"""Returns a dictionary representation.
Keys:
* ``'result'``: The result of the validation. Values can be
``True`` or ``False`` .
* ``'errors'``: A list of :class:`BestPracticeWarningCollection`
dictionaries.
"""
d = base.ValidationResults.as_dict(self)
if any(self):
d['errors'] = [x.as_dict() for x in self if x]
return d
class STIXBestPracticeValidator(with_metaclass(BestPracticeMeta, object)):
"""Performs STIX Best Practice validation."""
@rule('1.0')
def _check_id_presence(self, root, namespaces, version): # noqa
"""Checks that all major STIX/CybOX constructs have id attributes set.
Constructs with idref attributes set should not have an id attribute
and are thus omitted from the results.
"""
to_check = itertools.chain(
common.STIX_CORE_COMPONENTS,
common.CYBOX_CORE_COMPONENTS
)
results = BestPracticeWarningCollection('Missing IDs')
xpath = " | ".join("//%s" % x for x in to_check)
nodes = root.xpath(xpath, namespaces=namespaces)
for node in nodes:
if any(x in node.attrib for x in ('id', 'idref')):
continue
warning = BestPracticeWarning(node=node)
results.append(warning)
return results
@rule('1.0')
def _check_id_format(self, root, namespaces, version): # noqa
"""Checks that the core STIX/CybOX constructs in the STIX instance
document have ids and that each id is a valid QName, formatted as
follows:
``[ns_prefix]:[object-type]-[GUID].``
Note:
This only checks for STIX ID best practices and does not verify
that the ID is a valid QName. QName conformance verification is
done during XML Schema validation.
"""
to_check = itertools.chain(
common.STIX_CORE_COMPONENTS,
common.CYBOX_CORE_COMPONENTS
)
results = BestPracticeWarningCollection('ID Format')
msg = "ID should be formatted as [ns prefix]:[construct type]-[GUID]"
xpath = " | ".join("//%s[@id]" % x for x in to_check)
for node in root.xpath(xpath, namespaces=namespaces):
id_ = node.attrib['id']
if ID_PATTERN.match(id_):
continue
result = BestPracticeWarning(node=node, message=msg)
results.append(result)
return results
def _get_id_timestamp_conflicts(self, nodes):
"""Returns a list of BestPracticeWarnings for all nodes in `nodes`
that have duplicate (id, timestamp) pairs.
"""
warns = []
def _equal_timestamps(nodeset):
return [x for x in nodeset if utils.is_equal_timestamp(node, x)]
while len(nodes) > 1:
node = nodes.pop()
ts_equal = _equal_timestamps(nodes)
if not ts_equal:
continue
conflicts = itertools.chain(ts_equal, (node,))
for c in conflicts:
warning = BestPracticeWarning(node=c)
warning['timestamp'] = c.attrib.get('timestamp')
warns.append(warning)
utils.remove_all(nodes, ts_equal)
return warns
@rule('1.2')
def _check_1_2_duplicate_ids(self, root, namespaces, version): # noqa
"""STIX 1.2 dropped the schematic enforcement of id uniqueness to
support versioning of components.
This checks for duplicate (id, timestamp) pairs.
"""
results = BestPracticeWarningCollection('Duplicate IDs')
nlist = namespaces.values()
# Find all nodes with IDs in the STIX/CybOX namespace
nodes = root.xpath("//*[@id]")
filtered = [x for x in nodes if utils.namespace(x) in nlist]
# Build a mapping of IDs to nodes
idnodes = collections.defaultdict(list)
for node in filtered:
idnodes[node.attrib.get('id')].append(node)
# Find all nodes that have duplicate IDs
dups = [x for x in itervalues(idnodes) if len(x) > 1]
# Build warnings for all nodes that have conflicting id/timestamp pairs.
for nodeset in dups:
warns = self._get_id_timestamp_conflicts(nodeset)
results.extend(warns)
return results
@rule(minver='1.0', maxver='1.1.1')
def _check_1_0_duplicate_ids(self, root, namespaces, version): # noqa
"""Checks for duplicate ids in the document.
"""
id_nodes = collections.defaultdict(list)
for node in root.xpath("//*[@id]"):
id_nodes[node.attrib['id']].append(node)
results = BestPracticeWarningCollection('Duplicate IDs')
for nodes in itervalues(id_nodes):
if len(nodes) > 1:
results.extend(BestPracticeWarning(node=x) for x in nodes)
return results
@rule('1.0')
def _check_idref_resolution(self, root, namespaces, version): # noqa
"""Checks that all idrefs resolve to a construct in the document.
"""
idrefs = root.xpath("//*[@idref]")
ids = root.xpath("//@id")
def idref(x):
return x.attrib['idref']
results = BestPracticeWarningCollection("Unresolved IDREFs")
warns = (BestPracticeWarning(x) for x in idrefs if idref(x) not in ids)
results.extend(warns)
return results
@rule('1.0')
def _check_idref_with_content(self, root, namespaces, version): # noqa
"""Checks that constructs with idref set do not contain content.
Note:
Some STIX/CybOX constructs (e.g., ``Related_Object`` instances) are
exceptions to this rule.
"""
def is_invalid(node):
if common.is_idref_content_exception(node):
return False
return utils.has_content(node)
nodes = root.xpath("//*[@idref]")
warnings = (BestPracticeWarning(x) for x in nodes if is_invalid(x))
results = BestPracticeWarningCollection("IDREF with Content")
results.extend(warnings)
return results
@rule('1.0')
def _check_indicator_practices(self, root, namespaces, version): # noqa
"""Looks for STIX Indicators that are missing a Description, Type,
Valid_Time_Position, Indicated_TTP, and/or Confidence.
"""
to_check = (
"{0}:Indicator".format(common.PREFIX_STIX_CORE),
"{0}:Indicator".format(common.PREFIX_STIX_COMMON),
"{0}:Indicator".format(common.PREFIX_STIX_REPORT),
)
results = BestPracticeWarningCollection("Indicator Suggestions")
xpath = " | ".join("//%s" % x for x in to_check)
ns = namespaces[common.PREFIX_STIX_INDICATOR]
for indicator in root.xpath(xpath, namespaces=namespaces):
missing = []
if 'idref' not in indicator.attrib:
if indicator.find('{%s}Description' % ns) is None:
missing.append("Description")
if indicator.find('{%s}Type' % ns) is None:
missing.append("Type")
if indicator.find('{%s}Valid_Time_Position' % ns) is None:
missing.append('Valid_Time_Position')
if indicator.find('{%s}Indicated_TTP' % ns) is None:
missing.append('Indicated_TTP')
if indicator.find('{%s}Confidence' % ns) is None:
missing.append('Confidence')
if missing:
warning = BestPracticeWarning(node=indicator)
warning['missing'] = missing
results.append(warning)
return results
@rule('1.0')
def _check_root_element(self, root, namespaces, version): # noqa
"""Checks that the root element is a STIX_Package.
"""
ns = namespaces[common.PREFIX_STIX_CORE]
results = BestPracticeWarningCollection("Root Element")
if root.tag != "{%s}STIX_Package" % (ns):
warning = BestPracticeWarning(node=root)
results.append(warning)
return results
@rule('1.0')
def _check_latest_vocabs(self, root, namespaces, version): # noqa
"""Checks that all STIX vocabs are using latest published versions.
Triggers a warning if an out of date vocabulary is used.
Note:
The xpath used to discover instances of controlled vocabularies
assumes that the type name ends with 'Vocab-'. An example
instance would be 'IndicatorTypeVocab-1.0'.
"""
results = BestPracticeWarningCollection("Vocab Suggestions")
xpath = "//*[contains(@xsi:type, 'Vocab-')]"
for vocab in root.xpath(xpath, namespaces=namespaces):
xsi_type = vocab.attrib[xmlconst.TAG_XSI_TYPE]
name = common.parse_vocab_name(xsi_type)
found = common.parse_vocab_version(xsi_type)
expected = common.get_vocab_version(root, version, xsi_type)
if found == expected:
continue
warning = BestPracticeWarning(node=vocab)
warning['vocab name'] = name
warning['version found'] = found
warning['version expected'] = expected
results.append(warning)
return results
@rule('1.0')
def _check_latest_versions(self, root, namespaces, version): # noqa
"""Checks that all major STIX constructs versions are equal to
the latest version.
"""
to_check = common.STIX_COMPONENT_VERSIONS[version]
results = BestPracticeWarningCollection('Latest Component Versions')
def _is_expected(node, expected):
if 'version' not in node.attrib:
return True
return node.attrib['version'] == expected
for selector, expected in iteritems(to_check):
xpath = "//%s" % selector
for node in root.xpath(xpath, namespaces=namespaces):
if _is_expected(node, expected):
continue
warning = BestPracticeWarning(node)
warning['version found'] = node.attrib['version']
warning['version expected'] = expected
results.append(warning)
return results
def _check_timestamp_usage(self, root, namespaces, selectors):
"""Inspects each node in `nodes` for correct timestamp use.
"""
results = BestPracticeWarningCollection("Timestamp Use")
xpath = " | ".join("//%s" % x for x in selectors)
nodes = root.xpath(xpath, namespaces=namespaces)
for node in nodes:
attrib = node.attrib.get
id_ = attrib('id')
idref = attrib('idref')
timestamp = attrib('timestamp')
if timestamp:
tz_set = utils.has_tzinfo(timestamp)
if not tz_set:
warning = BestPracticeWarning(
node = node,
message="Timestamp without timezone information."
)
warning['timestamp'] = timestamp
results.append(warning)
if id_ and not timestamp:
warning = BestPracticeWarning(
node=node,
message="ID present but missing timestamp"
)
elif idref and not timestamp:
warning = BestPracticeWarning(
node=node,
message="IDREF present but missing timestamp"
)
elif idref and timestamp:
resolves = common.idref_timestamp_resolves(
root=root,
idref=idref,
timestamp=timestamp,
namespaces=namespaces
)
if resolves:
continue
warning = BestPracticeWarning(
node=node,
message="IDREF and timestamp combination do not resolve "
"to a node in the input document."
)
warning['timestamp'] = timestamp
else:
continue
results.append(warning)
return results
@rule(minver='1.1', maxver='1.1.1')
def _check_1_1_timestamp_usage(self, root, namespaces, **kwargs): # noqa
"""Checks that all major STIX constructs have appropriate
timestamp usage.
Note:
This does not check core CybOX constructs because they lack
timestamp attributes.
"""
to_check = common.STIX_CORE_COMPONENTS
results = self._check_timestamp_usage(root, namespaces, to_check)
return results
@rule('1.2')
def _check_1_2_timestamp_usage(self, root, namespaces, **kwargs): # noqa
"""Checks that all major STIX constructs have appropriate
timestamp usage.
Note:
This does not check core CybOX constructs because they lack
timestamp attributes.
"""
to_check = common.STIX_CORE_COMPONENTS[2:] # skip STIX Packages
results = self._check_timestamp_usage(root, namespaces, to_check)
return results
def _check_titles(self, root, namespaces, selectors):
"""Checks that each node in `nodes` has a ``Title`` element unless
there is an ``@idref`` attribute set.
"""
results = BestPracticeWarningCollection("Missing Titles")
xpath = " | ".join("//%s" % x for x in selectors)
nodes = root.xpath(xpath, namespaces=namespaces)
for node in nodes:
if 'idref' in node.attrib:
continue
if not any(utils.localname(x) == 'Title' for x in utils.iterchildren(node)):
warning = BestPracticeWarning(node=node)
results.append(warning)
return results
@rule(minver='1.0', maxver='1.1.1')
def _check_1_0_titles(self, root, namespaces, version): # noqa
"""Checks that all major STIX constructs have a Title element.
"""
to_check = (
'{0}:STIX_Package/{0}:STIX_Header'.format(common.PREFIX_STIX_CORE),
'{0}:Campaign'.format(common.PREFIX_STIX_CORE),
'{0}:Campaign'.format(common.PREFIX_STIX_COMMON),
'{0}:Course_Of_Action'.format(common.PREFIX_STIX_CORE),
'{0}:Course_Of_Action'.format(common.PREFIX_STIX_COMMON),
'{0}:Exploit_Target'.format(common.PREFIX_STIX_CORE),
'{0}:Exploit_Target'.format(common.PREFIX_STIX_COMMON),
'{0}:Incident'.format(common.PREFIX_STIX_CORE),
'{0}:Incident'.format(common.PREFIX_STIX_COMMON),
'{0}:Indicator'.format(common.PREFIX_STIX_CORE),
'{0}:Indicator'.format(common.PREFIX_STIX_COMMON),
'{0}:Threat_Actor'.format(common.PREFIX_STIX_COMMON),
'{0}:Threat_Actor'.format(common.PREFIX_STIX_CORE),
'{0}:TTP'.format(common.PREFIX_STIX_CORE),
'{0}:TTP'.format(common.PREFIX_STIX_COMMON)
)
results = self._check_titles(root, namespaces, to_check)
return results
@rule('1.2')
def _check_1_2_titles(self, root, namespaces, version): # noqa
"""Checks that all major STIX constructs have a Title element.
"""
to_check = (
'{0}:Campaign'.format(common.PREFIX_STIX_CORE),
'{0}:Campaign'.format(common.PREFIX_STIX_COMMON),
'{0}:Course_Of_Action'.format(common.PREFIX_STIX_CORE),
'{0}:Course_Of_Action'.format(common.PREFIX_STIX_COMMON),
'{0}:Exploit_Target'.format(common.PREFIX_STIX_CORE),
'{0}:Exploit_Target'.format(common.PREFIX_STIX_COMMON),
'{0}:Incident'.format(common.PREFIX_STIX_CORE),
'{0}:Incident'.format(common.PREFIX_STIX_COMMON),
'{0}:Indicator'.format(common.PREFIX_STIX_CORE),
'{0}:Indicator'.format(common.PREFIX_STIX_COMMON),
'{0}:Threat_Actor'.format(common.PREFIX_STIX_COMMON),
'{0}:Threat_Actor'.format(common.PREFIX_STIX_CORE),
'{0}:TTP'.format(common.PREFIX_STIX_CORE),
'{0}:TTP'.format(common.PREFIX_STIX_COMMON),
'{0}:Report/{1}:Header'.format(common.PREFIX_STIX_CORE, common.PREFIX_STIX_REPORT),
'{0}:Report/{1}:Header'.format(common.PREFIX_STIX_COMMON, common.PREFIX_STIX_REPORT)
)
results = self._check_titles(root, namespaces, to_check)
return results
@rule('1.0')
def _check_marking_control_xpath(self, root, namespaces, version): # noqa
"""Checks that data marking controlled structure XPaths are valid
and resolve to nodes in the `root` document.
"""
results = BestPracticeWarningCollection("Data Marking Control XPath")
xpath = "//%s:Controlled_Structure" % common.PREFIX_DATA_MARKING
for elem in root.xpath(xpath, namespaces=namespaces):
if not elem.text:
message = "Empty Control XPath"
else:
message = common.test_xpath(elem)
if message:
result = BestPracticeWarning(node=elem, message=message)
results.append(result)
return results
@rule('1.0')
def _check_condition_attribute(self, root, namespaces, version): # noqa
"""Checks that Observable properties contain a ``@condition``
attribute.
This will also attempt to resolve Observables which are referenced
(not embedded) within Indicators.
Note:
This could produce inaccurate results if a CybOX ObjectProperties
instance contains fields that do not contain a ``condition``
attribute (e.g., a field that is not patternable).
"""
results = BestPracticeWarningCollection(
"Indicator Pattern Properties Missing Condition Attributes"
)
selectors = (
"//{0}:Indicator".format(common.PREFIX_STIX_CORE),
"//{0}:Indicator".format(common.PREFIX_STIX_COMMON),
"//{0}:Indicator".format(common.PREFIX_STIX_REPORT)
)
xpath = " | ".join(selectors)
indicators = root.xpath(xpath, namespaces=namespaces)
if len(indicators) == 0:
return results
def _get_leaves(nodes):
"""Finds and returns all leaf nodes contained within `nodes`."""
leaves = []
for n in nodes:
leaves.extend(x for x in utils.leaves(n) if utils.has_content(x))
return leaves
def _get_observables(indicators):
"""Iterates over `indicators` and yields an (indicator instance,
observable list) tuple with each pass.
The observable list contains all observable instances embedded or
referenced within the Indicator.
"""
for indicator in indicators:
observables = common.get_indicator_observables(
root=root,
indicator=indicator,
namespaces=namespaces
)
yield (indicator, observables)
xpath = ".//{0}:Properties".format(common.PREFIX_CYBOX_CORE)
for indicator, observables in _get_observables(indicators):
id_ = indicator.attrib.get('id', 'No ID Found')
for obs in observables:
props = obs.xpath(xpath, namespaces=namespaces)
for leaf in _get_leaves(props):
if leaf.attrib.get('condition'):
continue
result = BestPracticeWarning(leaf)
result['parent indicator id'] = id_
result['parent indicator line'] = indicator.sourceline
results.append(result)
return results
@rule('1.0')
def _check_example_namespace(self, root, namespaces, version): # noqa
"""Checks for nodes in the input `root` document that contain IDs
which fall under the ``example`` namespace.
"""
ex_namespaces = ('http://example.com', 'http://example.com/')
# Get all the namespaces used in the document
doc_nsmap = utils.get_document_namespaces(root)
# Element tags to check for example ID presence
to_check = itertools.chain(
common.STIX_CORE_COMPONENTS,
common.CYBOX_CORE_COMPONENTS
)
results = BestPracticeWarningCollection('IDs Use Example Namespace')
xpath = " | ".join("//%s" % x for x in to_check)
for node in root.xpath(xpath, namespaces=namespaces):
if 'id' not in node.attrib:
continue
# ID attr found. Break it up into ns prefix and local parts
id_parts = node.attrib['id'].split(":")
if len(id_parts) != 2:
continue
# Try to get the namespace mapped to the ID ns prefix
prefix = id_parts[0]
ns = doc_nsmap.get(prefix)
if ns not in ex_namespaces:
continue
result = BestPracticeWarning(node=node)
results.append(result)
return results
def _get_1_2_tlo_deprecations(self, root, namespaces):
"""Checks for the existence of any idref elements inside the STIX
Package top-level collections.
"""
stix = (
'//{0}:Campaigns/{0}:Campaign',
'//{0}:Courses_Of_Action/{0}:Course_Of_Action',
'//{0}:Exploit_Targets/{0}:Exploit_Target',
'//{0}:Incidents/{0}:Incident',
'//{0}:Indicators/{0}:Indicator',
'//{0}:Threat_Actors/{0}:Threat_Actor',
'//{0}:TTPs/{0}:TTP',
'//{0}:Related_Packages/{0}:Related_Package/{0}:Package',
)
cybox = "//{0}:Observables/{1}:Observable".format(
common.PREFIX_STIX_CORE,
common.PREFIX_CYBOX_CORE
)
# Combine the STIX and CybOX selectors
to_check = [x.format(common.PREFIX_STIX_CORE) for x in stix]
to_check.append(cybox)
xpath = " | ".join(to_check)
nodes = root.xpath(xpath, namespaces=namespaces)
# Create result collection
msg = "IDREFs in top-level collections is deprecated."
# Attach warnings to collection
warns = []
for node in nodes:
if 'idref' not in node.attrib:
continue
warn = BestPracticeWarning(node=node, message=msg)
warns.append(warn)
return warns
def _get_1_2_related_package_deprecations(self, root, namespaces):
"""Checks for deprecated use of Related_Packages in STIX component
instances.
"""
selector = "//{0}:Related_Packages"
prefixes = (
common.PREFIX_STIX_CAMPAIGN,
common.PREFIX_STIX_COA,
common.PREFIX_STIX_EXPLOIT_TARGET,
common.PREFIX_STIX_INCIDENT,
common.PREFIX_STIX_INDICATOR,
common.PREFIX_STIX_THREAT_ACTOR,
common.PREFIX_STIX_TTP
)
to_check = (selector.format(prefix) for prefix in prefixes)
xpath = " | ".join(to_check)
nodes = root.xpath(xpath, namespaces=namespaces)
msg = "Use of Related_Packages is deprecated."
warns = [BestPracticeWarning(node=x, message=msg) for x in nodes]
return warns
def _get_1_2_package_deprecations(self, root, namespaces):
"""Checks for deprecated fields on STIX Package instances.
"""
to_check = (
"//{0}:STIX_Package".format(common.PREFIX_STIX_CORE),
"//{0}:Package".format(common.PREFIX_STIX_CORE)
)
xpath = " | ".join(to_check)
nodes = root.xpath(xpath, namespaces=namespaces)
warns = []
for node in nodes:
attrib = node.attrib
if 'idref' in attrib:
msg = "@idref is deprecated in STIX Package."
warn = BestPracticeWarning(node=node, message=msg)
warns.append(warn)
if 'timestamp' in attrib:
msg = "@timestamp is deprecated in STIX Package."
warn = BestPracticeWarning(node=node, message=msg)
warns.append(warn)
return warns
def _get_1_2_header_warnings(self, root, namespaces):
"""Checks for deprecated fields on STIX Header instances.
"""
to_check = (
"{0}:Title".format(common.PREFIX_STIX_CORE),
"{0}:Description".format(common.PREFIX_STIX_CORE),
"{0}:Short_Description".format(common.PREFIX_STIX_CORE),
"{0}:Package_Intent".format(common.PREFIX_STIX_CORE),
)
header = "//{0}:STIX_Header".format(common.PREFIX_STIX_CORE)
xpath = " | ".join("%s/%s" % (header, x) for x in to_check)
nodes = root.xpath(xpath, namespaces=namespaces)
fmt = "%s is deprecated in STIX Header."
warns = []
for node in nodes:
localname = utils.localname(node)
msg = fmt % localname
warn = BestPracticeWarning(node=node, message=msg)
warns.append(warn)
return warns
@rule('1.2')
def _check_1_2_deprecations(self, root, namespaces, version): # noqa
"""Checks the input document `root` for fields that were deprecated
in STIX v1.2.
"""
package_warnings = self._get_1_2_package_deprecations(
root=root,
namespaces=namespaces
)
header_warnings = self._get_1_2_header_warnings(
root=root,
namespaces=namespaces
)
tlo_warnings = self._get_1_2_tlo_deprecations(
root=root,
namespaces=namespaces
)
related_package_warnings= self._get_1_2_related_package_deprecations(
root=root,
namespaces=namespaces
)
warns = itertools.chain(
package_warnings,
header_warnings,
tlo_warnings,
related_package_warnings
)
results = BestPracticeWarningCollection("STIX 1.2 Deprecations")
results.extend(warns)
return results
def _get_campaign_related_indicators(self, root, namespaces):
xpath = ".//{0}:Related_Indicators".format(common.PREFIX_STIX_CAMPAIGN)
nodes = root.xpath(xpath, namespaces=namespaces)
msg = "Related_Indicators has been deprecated in Campaign."
return [BestPracticeWarning(node=n, message=msg) for n in nodes]
@rule('1.1')
def _check_1_1_deprecations(self, root, namespaces, version): # noqa
"""Checks the input document `root` for fields that were deprecated
in STIX v1.1.
"""
results = BestPracticeWarningCollection("STIX 1.1 Deprecations")
warns = self._get_campaign_related_indicators(root, namespaces)
results.extend(warns)
return results
def _get_bad_ordinalities(self, nodes, tag, namespaces):
"""Returns a set of warnings for nodes in `nodes` that do not comply
with @ordinality use of descriptive elements.
Args:
nodes: A set of nodes that have more than one instance of `tag`
children.
tag: The localname of the nodes to inspect for ordinalities.
namespaces: A list of STIX namespaces.
"""
def can_inspect(node):
"""Only check nodes that are in the STIX namespace and have a
localname that matches the tag (e.g., 'Description').
"""
qname = etree.QName(node)
return (qname.localname == tag) and (qname.namespace in namespaces)
filtered = []
for node in nodes:
# Filter out fields that belong to non-STIX namespaces
filtered.extend(x for x in utils.iterchildren(node) if can_inspect(x))
warns = []
seen = set()
for node in filtered:
o = node.attrib.get('ordinality')
if o is None:
fmt = "@ordinality missing in '{0}' list."
msg = fmt.format(tag)
warns.append(BestPracticeWarning(node=node, message=msg))
continue
o = int(o) # @ordinality is a xs:positiveInteger type.
if o in seen:
fmt = "@ordinality is duplicate in '{0}' list: '{1}'"
msg = fmt.format(tag, o)
warns.append(BestPracticeWarning(node=node, message=msg))
continue
seen.add(o)
return warns
@rule('1.2')
def _check_structured_text_ordinalities(self, root, namespaces, version): # noqa
"""Checks the input STIX document for correct ordinality usage in
StructuredText lists.
Checks for duplicates and missing ordinality attributes in elements
that have lists of StructuredText instances.
"""
# Selects nodes that have more than one instance of a specific
# StructuredTextType child (i.e., more than one Description child).
xpath_fmt = "//*[count(child::*[local-name()='{0}']) > 1]"
tags = (
"Description",
"Short_Description",
"Description_Of_Effect",
"Business_Function_Or_Role"
)
title = "StructuredText @ordinality Use"
results = BestPracticeWarningCollection(title)
nslist = namespaces.values()
for tag in tags:
xpath = xpath_fmt.format(tag)
nodes = root.xpath(xpath, namespaces=namespaces)
if len(nodes) == 0:
continue
warns = self._get_bad_ordinalities(nodes, tag, nslist)
results.extend(warns)
return results
def _get_rules(self, version):
"""Returns a list of best practice check functions that are applicable
to the STIX `version`.
"""
def can_run(stix_version, rule_min, rule_max):
if not rule_min:
return True
doc_ver = StrictVersion(remove_version_prefix(stix_version))
min_ver = StrictVersion(remove_version_prefix(rule_min))
if rule_max:
max_ver = StrictVersion(remove_version_prefix(rule_max))
return (min_ver <= doc_ver <= max_ver)
return min_ver <= doc_ver
StrictVersion = distutils.version.StrictVersion
all_rules = iteritems(self._rules) # noqa
# Get a generator which yields all best practice methods that are
# assigned a version number <= the input STIX document version number.
rules = []
for (versions, funcs) in all_rules:
min_, max_ = versions
rules.extend(f for f in funcs if can_run(version, min_, max_))
return rules
def _run_rules(self, root, version):
"""Runs all best practice rules applicable to a `version` of STIX
against the `root` document.
"""
namespaces = common.get_stix_namespaces(version)
results = BestPracticeValidationResults()
rules = self._get_rules(version)
for func in rules:
result = func(self, root, namespaces=namespaces, version=version)
results.append(result)
return results
@common.check_stix
def validate(self, doc, version=None):
"""Checks that a STIX document aligns with `suggested authoring
practices`_.
.. _suggested authoring practices: http://stixproject.github.io/documentation/suggested-practices/
Args:
doc: The STIX document. Can be a filename, file-like object,
lxml._Element, or lxml._ElementTree instance.
version: The version of the STIX document. This will determine the
set of best practice rules to check. If ``None`` an attempt
will be made to extract the version from `doc`.
Returns:
An instance of
:class:`.BestPracticeValidationResults`.
Raises:
.UnknownSTIXVersionError: If `version` was ``None`` and `doc`
did not contain any version information.
.InvalidSTIXVersionError: If discovered version or `version`
argument contains an invalid STIX version number.
.ValidationError: If there are any issues parsing `doc`.
"""
# Get the element for the input document
root = utils.get_etree_root(doc)
# Get the STIX version for the input `doc` if one is not passed in.
version = version or common.get_version(root)
# Check that the version number is a valid STIX version number
common.check_version(version)
# Run the best practice checks applicable for the STIX version number.
results = self._run_rules(root, version)
# Return the results
return results
__all__ = [
'STIXBestPracticeValidator',
'BestPracticeValidationResults',
'BestPracticeWarningCollection',
'BestPracticeWarning'
]
| bsd-3-clause | 6,831,026,196,110,730,000 | 32.578577 | 106 | 0.583719 | false |
Anstow/TeamAwesome | game/ext/sched.py | 1 | 23655 | """Event scheduler by Joseph Lansdowne.
Uses Pygame's wait function if available, else the less accurate time.sleep.
To use something else, do:
import sched
sched.wait = wait_function
This function should take the number of milliseconds to wait for. This will
always be an integer.
Python version: 2.
Release: 11-dev.
Licensed under the GNU General Public License, version 3; if this was not
included, you can find it here:
http://www.gnu.org/licenses/gpl-3.0.txt
CLASSES
Timer
Scheduler
FUNCTIONS
interp_linear
interp_target
interp_round
interp_repeat
interp_oscillate
"""
from time import time
from bisect import bisect
from math import cos, atan, exp
from random import randrange, expovariate
try:
from pygame.time import wait
except ImportError:
from time import sleep
def wait (t):
sleep(int(t * 1000))
def ir (x):
"""Returns the argument rounded to the nearest integer."""
# this is about twice as fast as int(round(x))
y = int(x)
return (y + (x - y >= .5)) if x > 0 else (y - (y - x >= .5))
def _match_in_nest (obj, x):
"""Check if every object in a data structure is equal to some given object.
_match_in_nest(obj, x)
obj: data structure to look in: an arbitrarily nested list of lists.
x: object to compare to (not a list or tuple).
"""
if isinstance(obj, (tuple, list)):
return all(_match_in_nest(o, x) == x for o in obj)
else:
return obj == x
def call_in_nest (f, *args):
"""Collapse a number of similar data structures into one.
Used in interp_* functions.
call_in_nest(f, *args) -> result
Each arg in args is a data structure of nested lists with a similar format (eg.
[1, 2, 3, [4, 5], []] and ['a', 'b', 'c', ['d', 'e'], []]). result is a new
structure in the same format with each non-list object the result of calling f
with the corresponding objects from each arg (eg. f = lambda n, c: str(n) + c
produces the result ['1a', '2b', '3c', ['4d', '5e'], []]).
One argument may have a list where others do not. In this case, those that do
not have the object in that place passed to f for each object in the (possibly
further nested) list in the argument that does. For example, given
[1, 2, [3, 4]], [1, 2, 3] and 1, result is
[f(1, 1, 1), f(2, 2, 1), [f(3, 3, 1), f(4, 3, 1)]]. However, in args with
lists, all lists must be the same length.
"""
is_list = [isinstance(arg, (tuple, list)) for arg in args]
if any(is_list):
n = len(args[is_list.index(True)])
# listify non-list args (assume all lists are the same length)
args = (arg if this_is_list else [arg] * n
for this_is_list, arg in zip(is_list, args))
return [call_in_nest(f, *inner_args) for inner_args in zip(*args)]
else:
return f(*args)
def _cmp_structure (x, y):
"""Find whether the (nested list) structure of two objects is the same."""
is_list = isinstance(x, (tuple, list))
if is_list != isinstance(y, (tuple, list)):
# one is a list, one isn't
return False
elif is_list:
# both are lists: check length and contents
return len(x) == len(y) and \
all(_cmp_structure(xi, yi) for xi, yi in zip(x, y))
else:
# neither is a list
return True
def interp_linear (*waypoints):
"""Linear interpolation for Scheduler.interp.
interp_linear(*waypoints) -> f
waypoints: each is (v, t) to set the value to v at time t. t can be omitted
for any but the last waypoint; the first is 0, and other gaps are
filled in with equal spacing. v is like the arguments taken by the
call_in_nest function in this module, and we interpolate for each number in the nested list structure of v. Some objects in the v
structures may be non-numbers, in which case they will not be varied
(maybe your function takes another argument you don't want to vary).
f: a function for which f(t) = v for every waypoint, with intermediate values
linearly interpolated between waypoints.
"""
# fill in missing times
vs = []
ts = []
last = waypoints[-1]
for w in waypoints:
if w is last or _cmp_structure(w, last):
vs.append(w[0])
ts.append(w[1])
else:
vs.append(w)
ts.append(None)
ts[0] = 0
# get groups with time = None
groups = []
group = None
for i, (v, t) in enumerate(zip(vs, ts)):
if t is None:
if group is None:
group = [i]
groups.append(group)
else:
if group is not None:
group.append(i)
group = None
# and assign times within those groups
for i0, i1 in groups:
t0 = ts[i0 - 1]
dt = float(ts[i1] - t0) / (i1 - (i0 - 1))
for i in xrange(i0, i1):
ts[i] = t0 + dt * (i - (i0 - 1))
interp_val = lambda r, v1, v2: (r * (v2 - v1) + v1) \
if isinstance(v1, (int, float)) else v1
def val_gen ():
t = yield
while 1:
# get waypoints we're between
i = bisect(ts, t)
if i == 0:
# before start
t = yield vs[0]
elif i == len(ts):
# past end: use final value, then end
t = yield vs[-1]
yield None # to avoid StopIteration issues
return
else:
v0 = vs[i - 1]
v1 = vs[i]
t0 = ts[i - 1]
t1 = ts[i]
# get ratio of the way between waypoints
r = 1 if t1 == t0 else (t - t0) / (t1 - t0) # t is always float
t = yield call_in_nest(interp_val, r, v0, v1)
# start the generator; get_val is its send method
g = val_gen()
g.next()
return g.send
def interp_target (v0, target, damp, freq = 0, speed = 0, threshold = 0):
"""Move towards a target.
interp_target(v0, target, damp, freq = 0, speed = 0, threshold = 0) -> f
v0: the initial value (a structure of numbers like arguments to this module's
call_in_nest function). Elements which are not numbers are ignored.
target: the target value (has the same form as v0).
damp: rate we move towards the target (> 0).
freq: if damping is low, oscillation around the target can occur, and this
controls the frequency. If 0, there is no oscillation.
speed: if frequency is non-zero, this is the initial 'speed', in the same form
as v0.
threshold: stop when within this distance of the target, in the same form as
v0. If None, never stop. If varying more than one number, only
stop when every number is within its threshold.
f: function that returns position given the current time.
"""
if v0 == target: # nothing to do
return lambda t: None
def get_phase (v0, target, sped):
if freq == 0 or not isinstance(v0, (int, float)) or v0 == target:
return 0
else:
return atan(-(float(speed) / (v0 - target) + damp) / freq)
phase = call_in_nest(get_phase, v0, target, speed)
def get_amplitude (v0, target, phase):
if isinstance(v0, (int, float)):
return (v0 - target) / cos(phase) # cos(atan(x)) is never 0
amplitude = call_in_nest(get_amplitude, v0, target, phase)
def get_val (t):
def interp_val (v0, target, amplitude, phase, threshold):
if not isinstance(v0, (int, float)):
return v0
# amplitude is None if non-number
if amplitude is None or v0 == target:
if threshold is not None:
return None
return v0
else:
dist = amplitude * exp(-damp * t)
if threshold is not None and abs(dist) <= threshold:
return None
return dist * cos(freq * t + phase) + target
rtn = call_in_nest(interp_val, v0, target, amplitude, phase, threshold)
if _match_in_nest(rtn, None):
# all done
rtn = None
return rtn
return get_val
def interp_shake (centre, amplitude = 1, threshold = 0, signed = True):
"""Shake randomly.
interp(centre, amplitude = 1, threshold = 0, signed = True) -> f
centre: the value to shake about; a nested list (a structure of numbers like
arguments to this module's call_in_nest function). Elements which are
not numbers are ignored.
amplitude: a number to multiply the value by. This can be a function that
takes the elapsed time in seconds to vary in time. Has the same
form as centre (return value if a function).
threshold: stop when amplitude is this small. If None, never stop. If varying
more than one number, only stop when every number is within its
threshold.
signed: whether to shake around the centre. If False, values are greater than
centre (not that amplitude may be signed).
f: function that returns position given the current time.
"""
def get_val (t):
def interp_val (centre, amplitude, threshold):
if not isinstance(centre, (int, float)):
return centre
if threshold is not None and abs(amplitude) <= threshold:
return None
val = amplitude * expovariate(1)
if signed:
val *= 2 * randrange(2) - 1
return centre + val
a = amplitude(t) if callable(amplitude) else amplitude
rtn = call_in_nest(interp_val, centre, a, threshold)
if _match_in_nest(rtn, None):
# all done
rtn = None
return rtn
return get_val
def interp_round (get_val, do_round = True):
"""Round the output of an existing interpolation function to integers.
interp_round(get_val, round_val = True) -> f
get_val: the existing function. The values it returns are as the arguments
taken by the call_in_nest function in this module.
do_round: determines which values to round. This is in the form of the values
get_val returns, a structure of lists and booleans corresponding to
each number in get_val. Any list in this structure can be replaced
by a single boolean to apply to the entire (nested) list. Non-number
objects in the value's structure are ignored.
f: the get_val wrapper that rounds the returned value.
"""
def round_val (do, v):
return ir(v) if isinstance(v, (int, float)) and do else v
def round_get_val (t):
return call_in_nest(round_val, do_round, get_val(t))
return round_get_val
def interp_repeat (get_val, period, t_min = 0, t_start = None):
"""Repeat an existing interpolation function.
interp_repeat(get_val, period, t_min = 0, t_start = t_min) -> f
get_val: an existing interpolation function, as taken by Scheduler.interp.
Times passed to the returned function are looped around to fit in the range
[t_min, t_min + period), starting at t_start, and the result is passed to
get_val.
f: the get_val wrapper that repeats get_val over the given period.
"""
if t_start is None:
t_start = t_min
return lambda t: get_val(t_min + (t_start - t_min + t) % period)
def interp_oscillate (get_val, t_max, t_min = 0, t_start = None):
"""Repeat a linear oscillation over an existing interpolation function.
interp_oscillate(get_val, t_max, t_min = 0, t_start = t_min) -> f
get_val: an existing interpolation function, as taken by Scheduler.interp.
Times passed to the returned function are looped and reversed to fit in the
range [t_min, t_max), starting at t_start. If t_start is in the range
[t_max, 2 * t_max + - t_min), it is mapped to the 'return journey' of the
oscillation.
f: the generated get_val wrapper.
"""
if t_start is None:
t_start = t_min
period = t_max - t_min
def osc_get_val (t):
t = (t_start - t_min + t) % (2 * period)
if t >= period:
t = 2 * period - t
return get_val(t_min + t)
return osc_get_val
class Timer (object):
"""Simple timer.
Either call run once and stop if you need to, or step every time you've done
what you need to.
CONSTRUCTOR
Timer(fps = 60)
fps: frames per second to aim for.
METHODS
run
step
stop
ATTRIBUTES
fps: the current target FPS. Set this directly.
frame: the current length of a frame in seconds.
t: the time at the last step, if using individual steps.
"""
def __init__ (self, fps = 60):
self.fps = fps
self.t = time()
def run (self, cb, *args, **kwargs):
"""Run indefinitely or for a specified amount of time.
run(cb, *args[, seconds][, frames]) -> remain
cb: a function to call every frame.
args: extra arguments to pass to cb.
seconds, frames: keyword-only arguments that determine how long to run for. If
seconds is passed, frames is ignored; if neither is given, run
forever (until Timer.stop is called). Either can be a float.
Time passed is based on the number of frames that have passed,
so it does not necessarily reflect real time.
remain: the number of frames/seconds left until the timer has been running for
the requested amount of time (or None, if neither were given). This
may be less than 0 if cb took a long time to run.
"""
self.stopped = False
seconds = kwargs.get('seconds')
frames = kwargs.get('frames')
if seconds is not None:
seconds = max(seconds, 0)
elif frames is not None:
frames = max(frames, 0)
# main loop
t0 = time()
while 1:
frame = self.frame
cb(*args)
t = time()
t_gone = min(t - t0, frame)
if self.stopped:
if seconds is not None:
return seconds - t_gone
elif frames is not None:
return frames - t_gone / frame
else:
return None
t_left = frame - t_gone # until next frame
if seconds is not None:
t_left = min(seconds, t_left)
elif frames is not None:
t_left = min(frames, t_left / frame)
if t_left > 0:
wait(int(1000 * t_left))
t0 = t + t_left
else:
t0 = t
if seconds is not None:
seconds -= t_gone + t_left
if seconds <= 0:
return seconds
elif frames is not None:
frames -= (t_gone + t_left) / frame
if frames <= 0:
return frames
def step (self):
"""Step forwards one frame."""
t = time()
t_left = self.t + self.frame - t
if t_left > 0:
wait(int(1000 * t_left))
self.t = t + t_left
else:
self.t = t
def stop (self):
"""Stop any current call to Timer.run."""
self.stopped = True
@property
def fps (self):
return self._fps
@fps.setter
def fps (self, fps):
self._fps = int(round(fps))
self.frame = 1. / fps
class Scheduler (Timer):
"""Simple event scheduler (Timer subclass).
Takes the same arguments as Timer.
METHODS
add_timeout
rm_timeout
interp
interp_simple
"""
def __init__ (self, fps = 60):
Timer.__init__(self, fps)
self._cbs = {}
self._max_id = 0
def run (self, seconds = None, frames = None):
"""Start the scheduler.
run([seconds][, frames]) -> remain
Arguments and return value are as for Timer.run.
"""
return Timer.run(self, self._update, seconds = seconds,
frames = frames)
def step (self):
self._update()
Timer.step(self)
def add_timeout (self, cb, *args, **kwargs):
"""Call a function after a delay.
add_timeout(cb, *args[, seconds][, frames][, repeat_seconds][, repeat_frames])
-> ID
cb: the function to call.
args: list of arguments to pass to cb.
seconds: how long to wait before calling, in seconds (respects changes to FPS).
If passed, frames is ignored.
frames: how long to wait before calling, in frames (same number of frames even
if FPS changes).
repeat_seconds, repeat_frames:
how long to wait between calls; time is determined as for the seconds and
frames arguments. If repeat_seconds is passed, repeat_frames is ignored;
if neither is passed, the initial time delay is used between calls.
ID: an ID to pass to rm_timeout. This is guaranteed to be unique over time.
Times can be floats, in which case part-frames are carried over, and time
between calls is actually an average over a large enough number of frames.
The called function can return a boolean True object to repeat the timeout;
otherwise it will not be called again.
"""
seconds = kwargs.get('seconds')
frames = kwargs.get('frames')
repeat_seconds = kwargs.get('repeat_seconds')
repeat_frames = kwargs.get('repeat_frames')
if seconds is not None:
frames = None
if repeat_seconds is not None:
repeat_frames = None
elif repeat_frames is None:
repeat_seconds = seconds
repeat_frames = frames
self._cbs[self._max_id] = [seconds, frames, repeat_seconds,
repeat_frames, cb, args]
self._max_id += 1
# ID is key in self._cbs
return self._max_id - 1
def rm_timeout (self, *ids):
"""Remove the timeouts with the given IDs."""
for i in ids:
try:
del self._cbs[i]
except KeyError:
pass
def _update (self):
"""Handle callbacks this frame."""
cbs = self._cbs
frame = self.frame
# cbs might add/remove cbs, so use items instead of iteritems
for i, data in cbs.items():
if i not in cbs:
# removed since we called .items()
continue
if data[0] is not None:
remain = 0
dt = frame
else:
remain = 1
dt = 1
data[remain] -= dt
if data[remain] <= 0:
# call callback
if data[4](*data[5]):
# add on delay
total = 0 if data[2] is not None else 1
data[not total] = None
data[total] += data[total + 2]
elif i in cbs: # else removed in above call
del cbs[i]
def interp (self, get_val, set_val, t_max = None, val_min = None,
val_max = None, end = None, round_val = False,
multi_arg = False):
"""Vary a value over time.
interp(get_val, set_val[, t_max][, val_min][, val_max][, end],
round_val = False, multi_arg = False) -> timeout_id
get_val: a function called with the elapsed time in seconds to obtain the
current value. If this function returns None, the interpolation will
be canceled. The interp_* functions in this module can be used to
construct such functions. The value must actually be a list of
arguments to pass to set_val (unless set_val is (obj, attr)).
set_val: a function called with the current value to set it. This may also be
an (obj, attr) tuple to do obj.attr = val.
t_max: if time becomes larger than this, cancel the interpolation.
val_min, val_max: minimum and maximum values of the interpolated value. If
given, get_val must only return values that can be compared
with these. If the value ever falls outside of this range,
set_val is called with the value at the boundary it is beyond
(val_min or val_max) and the interpolation is canceled.
end: used to do some cleanup when the interpolation is canceled (when get_val
returns None or t_max, val_min or val_max comes into effect, but not when
the rm_timeout method is called with the returned id). This can be a
final value to pass to set_val, or a function to call without arguments.
If the function returns a (non-None) value, set_val is called with it.
round_val: whether to round the value(s) (see the interp_round function in this
module for other possible values).
multi_arg: whether values should be interpreted as lists of arguments to pass
to set_val instead of a single list argument.
timeout_id: an identifier that can be passed to the rm_timeout method to remove
the callback that continues the interpolation. In this case the
end argument is not respected.
"""
if round_val:
get_val = interp_round(get_val, round_val)
if not callable(set_val):
obj, attr = set_val
set_val = lambda val: setattr(obj, attr, val)
def timeout_cb ():
t = 0
last_v = None
done = False
while 1:
t += self.frame
v = get_val(t)
if v is None:
done = True
# check bounds
elif t_max is not None and t > t_max:
done = True
else:
if val_min is not None and v < val_min:
done = True
v = val_min
elif val_max is not None and v > val_max:
done = True
v = val_max
if v != last_v:
set_val(*v) if multi_arg else set_val(v)
last_v = v
if done:
# canceling for some reason
if callable(end):
v = end()
else:
v = end
# set final value if want to
if v is not None and v != last_v:
set_val(*v) if multi_arg else set_val(v)
yield False
# just in case we get called again (should never happen)
return
else:
yield True
return self.add_timeout(timeout_cb().next, frames = 1)
def interp_simple (self, obj, attr, target, t, end_cb = None,
round_val = False):
"""A simple version of the interp method.
Varies an object's attribute linearly from its current value to a target value
in a set amount of time.
interp_simple(obj, attr, target, t[, end], round_val = False) -> timeout_id
obj, attr: this function varies the attribute attr of the object obj.
target: a target value, in the same form as the current value in the given
attribute.
t: the amount of time to take to reach the target value.
end_cb: a function to call when the target value has been reached.
round_val: whether to round the value(s) (see the interp_round function in this
module for other possible values).
timeout_id: an identifier that can be passed to the rm_timeout method to remove
the callback that continues the interpolation. In this case end_cb
is not called.
"""
get_val = interp_linear(getattr(obj, attr), (target, t))
self.interp(get_val, (obj, attr), end = end_cb, round_val = round_val)
| gpl-3.0 | 7,478,932,857,420,847,000 | 33.035971 | 141 | 0.581695 | false |
walterbender/yupana | utils.py | 1 | 1166 | #Copyright (c) 2011,12 Walter Bender
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# You should have received a copy of the GNU General Public License
# along with this library; if not, write to the Free Software
# Foundation, 51 Franklin Street, Suite 500 Boston, MA 02110-1335 USA
from StringIO import StringIO
import json
json.dumps
from json import load as jload
from json import dump as jdump
def json_load(text):
""" Load JSON data using what ever resources are available. """
# strip out leading and trailing whitespace, nulls, and newlines
io = StringIO(text)
try:
listdata = jload(io)
except ValueError:
# assume that text is ascii list
listdata = text.split()
for i, value in enumerate(listdata):
listdata[i] = int(value)
return listdata
def json_dump(data):
""" Save data using available JSON tools. """
_io = StringIO()
jdump(data, _io)
return _io.getvalue()
| gpl-3.0 | 2,220,261,143,581,196,300 | 29.684211 | 70 | 0.698971 | false |
maaaaz/fgpoliciestocsv | fggroupstocsv.py | 1 | 6311 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of fgpoliciestocsv.
#
# Copyright (C) 2014, 2020, Thomas Debize <tdebize at mail.com>
# All rights reserved.
#
# fgpoliciestocsv is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# fgpoliciestocsv is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with fgpoliciestocsv. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os import path
import io
import sys
import re
import csv
import os
# OptionParser imports
from optparse import OptionParser
from optparse import OptionGroup
# Options definition
parser = OptionParser(usage="%prog [options]")
main_grp = OptionGroup(parser, 'Main parameters')
main_grp.add_option('-i', '--input-file', help='Partial or full Fortigate configuration file. Ex: fgfw.cfg')
main_grp.add_option('-o', '--output-file', help='Output csv file (default ./groups-out.csv)', default=path.abspath(path.join(os.getcwd(), './groups-out.csv')))
main_grp.add_option('-s', '--skip-header', help='Do not print the csv header', action='store_true', default=False)
main_grp.add_option('-n', '--newline', help='Insert a newline between each group for better readability', action='store_true', default=False)
main_grp.add_option('-d', '--delimiter', help='CSV delimiter (default ";")', default=';')
main_grp.add_option('-e', '--encoding', help='Input file encoding (default "utf8")', default='utf8')
parser.option_groups.extend([main_grp])
# Python 2 and 3 compatibility
if (sys.version_info < (3, 0)):
fd_read_options = 'r'
fd_write_options = 'wb'
else:
fd_read_options = 'r'
fd_write_options = 'w'
# Handful patterns
# -- Entering group definition block
p_entering_group_block = re.compile(r'^\s*config firewall addrgrp$', re.IGNORECASE)
# -- Exiting group definition block
p_exiting_group_block = re.compile(r'^end$', re.IGNORECASE)
# -- Commiting the current group definition and going to the next one
p_group_next = re.compile(r'^next$', re.IGNORECASE)
# -- Policy number
p_group_name = re.compile(r'^\s*edit\s+"(?P<group_name>.*)"$', re.IGNORECASE)
# -- Policy setting
p_group_set = re.compile(r'^\s*set\s+(?P<group_key>\S+)\s+(?P<group_value>.*)$', re.IGNORECASE)
# Functions
def parse(options):
"""
Parse the data according to several regexes
@param options: options
@rtype: return a list of groups ( [ {'id' : '1', 'srcintf' : 'internal', ...}, {'id' : '2', 'srcintf' : 'external', ...}, ... ] )
and the list of unique seen keys ['id', 'srcintf', 'dstintf', ...]
"""
global p_entering_group_block, p_exiting_group_block, p_group_next, p_group_name, p_group_set
in_group_block = False
group_list = []
group_elem = {}
order_keys = []
with io.open(options.input_file, mode=fd_read_options, encoding=options.encoding) as fd_input:
for line in fd_input:
line = line.strip()
# We match a group block
if p_entering_group_block.search(line):
in_group_block = True
# We are in a group block
if in_group_block:
if p_group_name.search(line):
group_name = p_group_name.search(line).group('group_name')
group_elem['name'] = group_name
if not('name' in order_keys): order_keys.append('name')
# We match a setting
if p_group_set.search(line):
group_key = p_group_set.search(line).group('group_key')
if not(group_key in order_keys): order_keys.append(group_key)
group_value = p_group_set.search(line).group('group_value').strip()
group_value = re.sub('["]', '', group_value)
group_elem[group_key] = group_value
# We are done with the current group id
if p_group_next.search(line):
group_list.append(group_elem)
group_elem = {}
# We are exiting the group block
if p_exiting_group_block.search(line):
in_group_block = False
return (group_list, order_keys)
def generate_csv(results, keys, options):
"""
Generate a plain ';' separated csv file
"""
if results and keys:
with io.open(options.output_file, mode=fd_write_options) as fd_output:
spamwriter = csv.writer(fd_output, delimiter=options.delimiter, quoting=csv.QUOTE_ALL, lineterminator='\n')
if not(options.skip_header):
spamwriter.writerow(keys)
for group in results:
output_line = []
for key in keys:
if key in group.keys():
if "member" == key:
output_line.append("\n".join(group[key].split(" ")))
else:
output_line.append(group[key])
else:
output_line.append('')
spamwriter.writerow(output_line)
if options.newline:
spamwriter.writerow('')
fd_output.close()
return None
def main():
"""
Dat main
"""
global parser
options, arguments = parser.parse_args()
if (options.input_file == None):
parser.error('Please specify a valid input file')
results, keys = parse(options)
generate_csv(results, keys, options)
return None
if __name__ == "__main__" :
main()
| gpl-3.0 | 8,581,508,258,251,817,000 | 34.857955 | 159 | 0.58501 | false |
RedhawkSDR/integration-gnuhawk | components/multiply_cc_2i/tests/test_multiply_cc_2i.py | 1 | 4535 | #!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in multiply_cc_2i"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
self.assertEqual(self.spd.get_id(), self.comp.ref._get_identifier())
#######################################################################
# Simulate regular component startup
# Verify that initialize nor configure throw errors
self.comp.initialize()
configureProps = self.getPropertySet(kinds=("configure",), modes=("readwrite", "writeonly"), includeNil=False)
self.comp.configure(configureProps)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../multiply_cc_2i.spd.xml") # By default tests all implementations
| gpl-3.0 | 982,034,860,315,897,900 | 47.763441 | 133 | 0.588534 | false |
anshulkgupta/viznow | gary/mhacks/views.py | 1 | 1927 | import os
import json
from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
#from mhacks.equities import Field
EQUITY_MAP = {
'AAPL': 'AAPL US EQUITY'
}
def home_page(request):
return render(request, 'home.html')
def uber_page(request):
return render(request, 'final_uber.html')
def enter_page(request):
return render(request, 'enter.html')
def airline_page(request):
return render(request, 'flightanimation.html')
def bubble_page(request):
return render(request, 'custom_final_bubble.html')
def globe_page(request):
return render(request, 'custom_final_globe.html')
def chord_page(request):
return render(request, 'custom_final_chord.html')
def line_page(request):
return render(request, 'custom_final_line.html')
def chloropleth_page(request):
return render(request, 'custom_final_chloropleth.html')
def final_custom_page(request, page, id):
return render(request, 'custom_final.html', {'page' : page, 'id': id})
def fileupload_page(request, page, id):
return render(request, 'fileupload.html', {'page' : page, 'id': id})
def upload_page(request):
return render(request, 'upload1.html')
def upload_unique_page(request, id):
return render(request, 'upload_unique.html', {'page' : id})
def visualization_page(request, page, id):
return render(request, 'visualization.html', {'page': page, 'id': id})
@csrf_exempt
def handle_upload(request):
#equities = request.post['equities']
#str_param = EQUITY_MAP.get(equities)
root = os.path.dirname(__file__)
json_file = '%s/equities/fixtures/newstock.json' % root
json_data = open(json_file).read()
equities = json.loads(json_data.replace('\n', ''))
#field = Field(str_param)
#return HttpResponse(field.getData(), content_type="application/json")
return JsonResponse(equities) | mit | -4,921,216,448,401,089,000 | 27.776119 | 72 | 0.728075 | false |
marcela2/minhatv | plugin.video.rtpplay/resources/ondemand.py | 1 | 9380 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Author: enen92
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmc,xbmcgui,xbmcaddon,xbmcplugin,xbmcvfs,sys,os,re
from common_variables import *
from directory import *
from webutils import *
from utilities import *
from resolver import *
from rtpplayer import *
from iofile import *
def list_tv_shows(name,url):
try:
page_source = abrir_url(url)
except:
page_source = ''
msgok(translate(30001),translate(30018))
if page_source:
match=re.compile('<a class="text-white" href="(.+?)" title=".+?">(.+?)</a>').findall(page_source)
totalit= len(match)
for urlsbase,titulo in match:
titulo = title_clean_up(titulo)
if selfAddon.getSetting('icon_plot') == 'true':
try:
html_source = abrir_url(base_url + urlsbase)
except: html_source = ''
if html_source:
try: thumbnail=re.compile('<img class="pull-left" src="(.+?)"').findall(html_source)[0]
except: thumbnail=''
sinopse= re.findall('id="promo">.+?\n.+?<p>(.*?)</p>', html_source, re.DOTALL)
if sinopse: information = { "Title": name,"plot": clean_html(title_clean_up(sinopse[0])) }
else: information = { "Title": name,"plot":translate(30026) }
addprograma(titulo,base_url + urlsbase,16,thumbnail,totalit,information)
else:
information = { "Title": name,"plot":translate(30026) }
thumbnail = ''
addprograma(titulo,base_url + urlsbase,15,thumbnail,totalit,information)
xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')
setview('show-view')
else:
sys.exit(0)
def list_episodes(name,url,plot):
program_name = name.split('|')
if len(program_name) > 1: titulo = program_name[1].replace('[/COLOR]','').replace('[/B]','')
else: titulo = name
prog_id=re.compile('http://www.rtp.pt/play/p(.+?)/').findall(url)
if not prog_id: prog_id=re.compile('listProgram=(\d+)&').findall(url)
page_num = re.compile('&page=(\d+)&').findall(url)
if not page_num: current_page = '1'
else: current_page = page_num[0]
if ('recent' not in url) and ('popular' not in url) and ('procura?' not in url):
url='http://www.rtp.pt/play/bg_l_ep/?listDate=&listQuery=&listProgram='+prog_id[0]+'&listcategory=&listchannel=&listtype=recent&page='+current_page+'&type=all'
else:pass
print url
try:
source = abrir_url(url)
except: source=''; msgok(translate(30001),translate(30018))
if source:
match_geral = re.findall('<div class="lazy(.*?)</i></span>',source,re.DOTALL)
if match_geral:
totalit = len(match_geral)
for match in match_geral:
data = re.compile('<span class="small clearfix text-light">(.+?)</span>').findall(match)
lnk = re.compile('href="(.+?)" ').findall(match)
titulo_array = re.compile('title="(.+?)" ').findall(match)
if titulo_array:
if 'itemprop' not in titulo_array[0]:
titulo = title_clean_up(titulo_array[0])
img_tmp = re.compile('itemprop="image" src=".+?src=(.+?)&.+?"').findall(match)
if img_tmp: img = img_base_url + img_tmp[0]
else: img = ''
if data and lnk:
information = { "Title": titulo,"plot":plot,"aired":format_data(data[0]) }
addepisode('[B]' + titulo + '[COLOR blue] (' + title_clean_up(data[0]) +')' + '[/B][/COLOR]',base_url + lnk[0],17,img,totalit,information)
try:
next_url = 'http://www.rtp.pt/play/bg_l_ep/?listDate=&listQuery=&listProgram='+prog_id[0]+'&listcategory=&listchannel=&listtype=recent&page='+str(int(current_page)+1)+'&type=all'
try: source_next = abrir_url(next_url)
except: source_next = ''
if source_next:
if re.findall('itemscope itemtype="http://schema.org/TVSeries"',source_next):
addDir('[B][COLOR blue]'+translate(30028)+'|[/B][/COLOR]'+titulo,next_url,16,os.path.join(artfolder,'next.png'),1,pasta=True,informacion=information)
except: pass
xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
setview('episodes-view')
def list_emissoes(urltmp):
try:
page_source = abrir_url(urltmp)
except:
page_source = ''
msgok(translate(30001),translate(30018))
if page_source:
program_list=re.findall('<section>(.+?)</section>',page_source,re.DOTALL)
if program_list:
match = re.findall('href="(.+?)".*?itemprop="name">(.+?)</b',program_list[1],re.DOTALL)
if match:
totalit = len(match)
for urlsbase,titulo in match:
if selfAddon.getSetting('icon_plot') == 'true':
try:
source = abrir_url(base_url + urlsbase)
sinopse=re.findall('id="promo">.+?\n.+?<p>(.*?)</p>', source, re.DOTALL)
if sinopse: plot = clean_html(title_clean_up(sinopse[0]))
information={ "Title": title_clean_up(titulo),"plot":plot }
try: thumbnail=img_base_url + re.compile('src=(.+?)&').findall(source)[0]
except: thumbnail=''
except: information={ "Title": title_clean_up(titulo),"plot":translate(30026) };thumbnail=''
else: information={ "Title": title_clean_up(titulo),"plot":translate(30026) };thumbnail=''
addepisode(title_clean_up(titulo),base_url + urlsbase,17,thumbnail,totalit,information)
xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
setview('episodes-view')
else: msgok(translate(30001),translate(30032));sys.exit(0)
def pesquisa_emissoes():
if not xbmcvfs.exists(os.path.join(datapath,'searchemiss.txt')):
keyb = xbmc.Keyboard('', translate(30031))
keyb.doModal()
if (keyb.isConfirmed()):
search = keyb.getText()
encode=urllib.quote(search)
urltmp = base_url + '/play/pesquisa?c_t=&q=' + encode
save(os.path.join(datapath,'searchemiss.txt'),urltmp)
list_emissoes(urltmp)
else:
text = readfile(os.path.join(datapath,'searchemiss.txt'))
list_emissoes(text)
def pesquisa_programas():
if not xbmcvfs.exists(os.path.join(datapath,'searchprog.txt')):
keyb = xbmc.Keyboard('', translate(30031))
keyb.doModal()
if (keyb.isConfirmed()):
search = keyb.getText()
encode=urllib.quote(search)
urltmp = base_url + '/play/pesquisa?c_t=&q=' + encode
save(os.path.join(datapath,'searchprog.txt'),urltmp)
list_show_search(urltmp)
else:
text = readfile(os.path.join(datapath,'searchprog.txt'))
list_show_search(text)
def list_show_search(url):
try:
page_source = abrir_url(url)
except:
page_source = ''
msgok(translate(30001),translate(30018))
if page_source:
program_list=re.findall('<section>(.+?)</section>',page_source,re.DOTALL)
if program_list:
match = re.findall('href="(.+?)".*?itemprop="name">(.+?)</b',program_list[0],re.DOTALL)
if match:
totalit = len(match)
for urlsbase,titulo in match:
if selfAddon.getSetting('icon_plot') == 'true':
try:
source = abrir_url(base_url + urlsbase)
sinopse=re.findall('id="promo">.+?\n.+?<p>(.*?)</p>', source, re.DOTALL)
if sinopse: plot = clean_html(title_clean_up(sinopse[0]))
information={ "Title": title_clean_up(titulo),"plot":plot }
try: thumbnail=img_base_url + re.compile('src=(.+?)&').findall(source)[0]
except: thumbnail=''
except: information={ "Title": title_clean_up(titulo),"plot":translate(30026) };thumbnail=''
else: information={ "Title": title_clean_up(titulo),"plot":translate(30026) };thumbnail=''
addprograma(title_clean_up(titulo),base_url + urlsbase,16,thumbnail,totalit,information)
xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')
setview('show-view')
else: msgok(translate(30001),translate(30032));sys.exit(0)
def get_show_episode_parts(name,url,iconimage):
try:
source = abrir_url(url)
except: source = ''
if source:
url_video_list = []
video_list = []
match = re.compile('href="(.+?)" title="Parte.+?" rel="nofollow"').findall(source)
print match
#match = re.compile("<a.+?href='(.+?)'><b>Parte</b>(.+?)</a>").findall(source)
if not match: url_video_list.append(url)
else:
for urlsbase in match:
url_video_list.append(base_url + urlsbase)
number_of_parts = len(url_video_list)
dp = xbmcgui.DialogProgress()
dp.create(translate(30001),translate(30033))
dp.update(0)
i=0
for part in url_video_list:
if dp.iscanceled(): dp.close()
i += 1
video_url = rtp_resolver(part)
if video_url: video_list.append(video_url)
else:pass
dp.update(int((float(i)/number_of_parts)*100), translate(30033))
try:
dp.update(100, translate(30033))
dp.close()
except: pass
playlist = xbmc.PlayList(1)
playlist.clear()
for video in video_list:
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setInfo('Video', {})
liz.setProperty('mimetype', 'video')
playlist.add(video, liz)
xbmcPlayer = xbmc.Player()
xbmcPlayer.play(playlist)
player = RTPPlayer(videoarray=video_list,mainurl=url)
player.play(playlist)
while player._playbackLock:
player._trackPosition()
xbmc.sleep(1000)
else:msgok(translate(30001),translate(30018));sys.exit(0)
| gpl-2.0 | 1,384,147,035,772,757,500 | 39.08547 | 181 | 0.667271 | false |
carolFrohlich/nipype | examples/rsfmri_vol_surface_preprocessing.py | 2 | 42451 | #!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
====================================
rsfMRI: ANTS, FS, FSL, SPM, aCompCor
====================================
A preprocessing workflow for Siemens resting state data.
This workflow makes use of:
- ANTS
- FreeSurfer
- FSL
- SPM
- CompCor
For example::
python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii
-s subj001 -o output -p PBS --plugin_args "dict(qsub_args='-q many')"
or
python rsfmri_vol_surface_preprocessing.py -f SUB_1024011/E?/func/rest.nii
-t OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz --TR 2 -s SUB_1024011
--subjects_dir fsdata --slice_times 0 17 1 18 2 19 3 20 4 21 5 22 6 23
7 24 8 25 9 26 10 27 11 28 12 29 13 30 14 31 15 32 16 -o .
This workflow takes resting timeseries and a Siemens dicom file corresponding
to it and preprocesses it to produce timeseries coordinates or grayordinates.
This workflow also requires 2mm subcortical atlas and templates that are
available from:
http://mindboggle.info/data.html
specifically the 2mm versions of:
- `Joint Fusion Atlas <http://mindboggle.info/data/atlases/jointfusion/OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_2mm_v2.nii.gz>`_
- `MNI template <http://mindboggle.info/data/templates/ants/OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz>`_
"""
from __future__ import division, unicode_literals
from builtins import open, range, str
import os
from nipype.interfaces.base import CommandLine
CommandLine.set_default_terminal_output('allatonce')
from dicom import read_file
from nipype.interfaces import (spm, fsl, Function, ants, freesurfer)
from nipype.interfaces.c3 import C3dAffineTool
fsl.FSLCommand.set_default_output_type('NIFTI')
from nipype import Workflow, Node, MapNode
from nipype.interfaces import matlab as mlab
mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodisplay")
# If SPM is not in your MATLAB path you should add it here
# mlab.MatlabCommand.set_default_paths('/software/matlab/spm12')
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.algorithms.misc import TSNR
from nipype.interfaces.utility import Rename, Merge, IdentityInterface
from nipype.utils.filemanip import filename_to_list
from nipype.interfaces.io import DataSink, FreeSurferSource
import numpy as np
import scipy as sp
import nibabel as nb
imports = ['import os',
'import nibabel as nb',
'import numpy as np',
'import scipy as sp',
'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename',
'from scipy.special import legendre'
]
def get_info(dicom_files):
from dcmstack.extract import default_extractor
"""Given a Siemens dicom file return metadata
Returns
-------
RepetitionTime
Slice Acquisition Times
Spacing between slices
"""
meta = default_extractor(read_file(filename_to_list(dicom_files)[0],
stop_before_pixels=True,
force=True))
return (meta['RepetitionTime'] / 1000., meta['CsaImage.MosaicRefAcqTimes'],
meta['SpacingBetweenSlices'])
def median(in_files):
"""Computes an average of the median of each realigned timeseries
Parameters
----------
in_files: one or more realigned Nifti 4D time series
Returns
-------
out_file: a 3D Nifti file
"""
import numpy as np
import nibabel as nb
average = None
for idx, filename in enumerate(filename_to_list(in_files)):
img = nb.load(filename)
data = np.median(img.get_data(), axis=3)
if average is None:
average = data
else:
average = average + data
median_img = nb.Nifti1Image(average / float(idx + 1), img.affine,
img.header)
filename = os.path.join(os.getcwd(), 'median.nii.gz')
median_img.to_filename(filename)
return filename
def bandpass_filter(files, lowpass_freq, highpass_freq, fs):
"""Bandpass filter the input files
Parameters
----------
files: list of 4d nifti files
lowpass_freq: cutoff frequency for the low pass filter (in Hz)
highpass_freq: cutoff frequency for the high pass filter (in Hz)
fs: sampling rate (in Hz)
"""
from nipype.utils.filemanip import split_filename, list_to_filename
import numpy as np
import nibabel as nb
out_files = []
for filename in filename_to_list(files):
path, name, ext = split_filename(filename)
out_file = os.path.join(os.getcwd(), name + '_bp' + ext)
img = nb.load(filename)
timepoints = img.shape[-1]
F = np.zeros((timepoints))
lowidx = int(timepoints / 2) + 1
if lowpass_freq > 0:
lowidx = np.round(lowpass_freq / fs * timepoints)
highidx = 0
if highpass_freq > 0:
highidx = np.round(highpass_freq / fs * timepoints)
F[highidx:lowidx] = 1
F = ((F + F[::-1]) > 0).astype(int)
data = img.get_data()
if np.all(F == 1):
filtered_data = data
else:
filtered_data = np.real(np.fft.ifftn(np.fft.fftn(data) * F))
img_out = nb.Nifti1Image(filtered_data, img.affine, img.header)
img_out.to_filename(out_file)
out_files.append(out_file)
return list_to_filename(out_files)
def motion_regressors(motion_params, order=0, derivatives=1):
"""Compute motion regressors upto given order and derivative
motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic)
"""
import numpy as np
out_files = []
for idx, filename in enumerate(filename_to_list(motion_params)):
params = np.genfromtxt(filename)
out_params = params
for d in range(1, derivatives + 1):
cparams = np.vstack((np.repeat(params[0, :][None, :], d, axis=0),
params))
out_params = np.hstack((out_params, np.diff(cparams, d, axis=0)))
out_params2 = out_params
for i in range(2, order + 1):
out_params2 = np.hstack((out_params2, np.power(out_params, i)))
filename = os.path.join(os.getcwd(), "motion_regressor%02d.txt" % idx)
np.savetxt(filename, out_params2, fmt=b"%.10f")
out_files.append(filename)
return out_files
def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None):
"""Builds a regressor set comprisong motion parameters, composite norm and
outliers
The outliers are added as a single time point column for each outlier
Parameters
----------
motion_params: a text file containing motion parameters and its derivatives
comp_norm: a text file containing the composite norm
outliers: a text file containing 0-based outlier indices
detrend_poly: number of polynomials to add to detrend
Returns
-------
components_file: a text file containing all the regressors
"""
import numpy as np
import nibabel as nb
from scipy.special import legendre
out_files = []
for idx, filename in enumerate(filename_to_list(motion_params)):
params = np.genfromtxt(filename)
norm_val = np.genfromtxt(filename_to_list(comp_norm)[idx])
out_params = np.hstack((params, norm_val[:, None]))
try:
outlier_val = np.genfromtxt(filename_to_list(outliers)[idx])
except IOError:
outlier_val = np.empty((0))
for index in np.atleast_1d(outlier_val):
outlier_vector = np.zeros((out_params.shape[0], 1))
outlier_vector[index] = 1
out_params = np.hstack((out_params, outlier_vector))
if detrend_poly:
timepoints = out_params.shape[0]
X = np.empty((timepoints, 0))
for i in range(detrend_poly):
X = np.hstack((X, legendre(
i + 1)(np.linspace(-1, 1, timepoints))[:, None]))
out_params = np.hstack((out_params, X))
filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx)
np.savetxt(filename, out_params, fmt=b"%.10f")
out_files.append(filename)
return out_files
def extract_noise_components(realigned_file, mask_file, num_components=5,
extra_regressors=None):
"""Derive components most reflective of physiological noise
Parameters
----------
realigned_file: a 4D Nifti file containing realigned volumes
mask_file: a 3D Nifti file containing white matter + ventricular masks
num_components: number of components to use for noise decomposition
extra_regressors: additional regressors to add
Returns
-------
components_file: a text file containing the noise components
"""
from scipy.linalg.decomp_svd import svd
import numpy as np
import nibabel as nb
import os
imgseries = nb.load(realigned_file)
components = None
for filename in filename_to_list(mask_file):
mask = nb.load(filename).get_data()
if len(np.nonzero(mask > 0)[0]) == 0:
continue
voxel_timecourses = imgseries.get_data()[mask > 0]
voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0
# remove mean and normalize by variance
# voxel_timecourses.shape == [nvoxels, time]
X = voxel_timecourses.T
stdX = np.std(X, axis=0)
stdX[stdX == 0] = 1.
stdX[np.isnan(stdX)] = 1.
stdX[np.isinf(stdX)] = 1.
X = (X - np.mean(X, axis=0)) / stdX
u, _, _ = svd(X, full_matrices=False)
if components is None:
components = u[:, :num_components]
else:
components = np.hstack((components, u[:, :num_components]))
if extra_regressors:
regressors = np.genfromtxt(extra_regressors)
components = np.hstack((components, regressors))
components_file = os.path.join(os.getcwd(), 'noise_components.txt')
np.savetxt(components_file, components, fmt=b"%.10f")
return components_file
def rename(in_files, suffix=None):
from nipype.utils.filemanip import (filename_to_list, split_filename,
list_to_filename)
out_files = []
for idx, filename in enumerate(filename_to_list(in_files)):
_, name, ext = split_filename(filename)
if suffix is None:
out_files.append(name + ('_%03d' % idx) + ext)
else:
out_files.append(name + suffix + ext)
return list_to_filename(out_files)
def get_aparc_aseg(files):
"""Return the aparc+aseg.mgz file"""
for name in files:
if 'aparc+aseg.mgz' in name:
return name
raise ValueError('aparc+aseg.mgz not found')
def extract_subrois(timeseries_file, label_file, indices):
"""Extract voxel time courses for each subcortical roi index
Parameters
----------
timeseries_file: a 4D Nifti file
label_file: a 3D file containing rois in the same space/size of the 4D file
indices: a list of indices for ROIs to extract.
Returns
-------
out_file: a text file containing time courses for each voxel of each roi
The first four columns are: freesurfer index, i, j, k positions in the
label file
"""
from nipype.utils.filemanip import split_filename
import nibabel as nb
import os
img = nb.load(timeseries_file)
data = img.get_data()
roiimg = nb.load(label_file)
rois = roiimg.get_data()
prefix = split_filename(timeseries_file)[1]
out_ts_file = os.path.join(os.getcwd(), '%s_subcortical_ts.txt' % prefix)
with open(out_ts_file, 'wt') as fp:
for fsindex in indices:
ijk = np.nonzero(rois == fsindex)
ts = data[ijk]
for i0, row in enumerate(ts):
fp.write('%d,%d,%d,%d,' % (fsindex, ijk[0][i0],
ijk[1][i0], ijk[2][i0]) +
','.join(['%.10f' % val for val in row]) + '\n')
return out_ts_file
def combine_hemi(left, right):
"""Combine left and right hemisphere time series into a single text file
"""
import os
import numpy as np
lh_data = nb.load(left).get_data()
rh_data = nb.load(right).get_data()
indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None],
2000000 + np.arange(0, rh_data.shape[0])[:, None]))
all_data = np.hstack((indices, np.vstack((lh_data.squeeze(),
rh_data.squeeze()))))
filename = left.split('.')[1] + '_combined.txt'
np.savetxt(filename, all_data,
fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] - 1)))
return os.path.abspath(filename)
def create_reg_workflow(name='registration'):
"""Create a FEAT preprocessing workflow together with freesurfer
Parameters
----------
name : name of workflow (default: 'registration')
Inputs::
inputspec.source_files : files (filename or list of filenames to register)
inputspec.mean_image : reference image to use
inputspec.anatomical_image : anatomical image to coregister to
inputspec.target_image : registration target
Outputs::
outputspec.func2anat_transform : FLIRT transform
outputspec.anat2target_transform : FLIRT+FNIRT transform
outputspec.transformed_files : transformed files in target space
outputspec.transformed_mean : mean image in target space
"""
register = Workflow(name=name)
inputnode = Node(interface=IdentityInterface(fields=['source_files',
'mean_image',
'subject_id',
'subjects_dir',
'target_image']),
name='inputspec')
outputnode = Node(interface=IdentityInterface(fields=['func2anat_transform',
'out_reg_file',
'anat2target_transform',
'transforms',
'transformed_mean',
'segmentation_files',
'anat2target',
'aparc'
]),
name='outputspec')
# Get the subject's freesurfer source directory
fssource = Node(FreeSurferSource(),
name='fssource')
fssource.run_without_submitting = True
register.connect(inputnode, 'subject_id', fssource, 'subject_id')
register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir')
convert = Node(freesurfer.MRIConvert(out_type='nii'),
name="convert")
register.connect(fssource, 'T1', convert, 'in_file')
# Coregister the median to the surface
bbregister = Node(freesurfer.BBRegister(),
name='bbregister')
bbregister.inputs.init = 'fsl'
bbregister.inputs.contrast_type = 't2'
bbregister.inputs.out_fsl_file = True
bbregister.inputs.epi_mask = True
register.connect(inputnode, 'subject_id', bbregister, 'subject_id')
register.connect(inputnode, 'mean_image', bbregister, 'source_file')
register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir')
"""
Estimate the tissue classes from the anatomical image. But use spm's segment
as FSL appears to be breaking.
"""
stripper = Node(fsl.BET(), name='stripper')
register.connect(convert, 'out_file', stripper, 'in_file')
fast = Node(fsl.FAST(), name='fast')
register.connect(stripper, 'out_file', fast, 'in_files')
"""
Binarize the segmentation
"""
binarize = MapNode(fsl.ImageMaths(op_string='-nan -thr 0.9 -ero -bin'),
iterfield=['in_file'],
name='binarize')
register.connect(fast, 'partial_volume_files', binarize, 'in_file')
"""
Apply inverse transform to take segmentations to functional space
"""
applyxfm = MapNode(freesurfer.ApplyVolTransform(inverse=True,
interp='nearest'),
iterfield=['target_file'],
name='inverse_transform')
register.connect(inputnode, 'subjects_dir', applyxfm, 'subjects_dir')
register.connect(bbregister, 'out_reg_file', applyxfm, 'reg_file')
register.connect(binarize, 'out_file', applyxfm, 'target_file')
register.connect(inputnode, 'mean_image', applyxfm, 'source_file')
"""
Apply inverse transform to aparc file
"""
aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True,
interp='nearest'),
name='aparc_inverse_transform')
register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir')
register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file')
register.connect(fssource, ('aparc_aseg', get_aparc_aseg),
aparcxfm, 'target_file')
register.connect(inputnode, 'mean_image', aparcxfm, 'source_file')
"""
Convert the BBRegister transformation to ANTS ITK format
"""
convert2itk = Node(C3dAffineTool(), name='convert2itk')
convert2itk.inputs.fsl2ras = True
convert2itk.inputs.itk_transform = True
register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file')
register.connect(inputnode, 'mean_image', convert2itk, 'source_file')
register.connect(stripper, 'out_file', convert2itk, 'reference_file')
"""
Compute registration between the subject's structural and MNI template
This is currently set to perform a very quick registration. However, the
registration can be made significantly more accurate for cortical
structures by increasing the number of iterations
All parameters are set using the example from:
#https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
"""
reg = Node(ants.Registration(), name='antsRegister')
reg.inputs.output_transform_prefix = "output_"
reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.2, 3.0, 0.0)]
reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[100, 30, 20]]
reg.inputs.dimension = 3
reg.inputs.write_composite_transform = True
reg.inputs.collapse_output_transforms = True
reg.inputs.initial_moving_transform_com = True
reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
reg.inputs.convergence_window_size = [20] * 2 + [5]
reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
reg.inputs.sigma_units = ['vox'] * 3
reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
reg.inputs.use_estimate_learning_rate_once = [True] * 3
reg.inputs.use_histogram_matching = [False] * 2 + [True]
reg.inputs.winsorize_lower_quantile = 0.005
reg.inputs.winsorize_upper_quantile = 0.995
reg.inputs.float = True
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
reg.inputs.num_threads = 4
reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'}
register.connect(stripper, 'out_file', reg, 'moving_image')
register.connect(inputnode, 'target_image', reg, 'fixed_image')
"""
Concatenate the affine and ants transforms into a list
"""
merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')
register.connect(convert2itk, 'itk_transform', merge, 'in2')
register.connect(reg, 'composite_transform', merge, 'in1')
"""
Transform the mean image. First to anatomical and then to target
"""
warpmean = Node(ants.ApplyTransforms(), name='warpmean')
warpmean.inputs.input_image_type = 3
warpmean.inputs.interpolation = 'Linear'
warpmean.inputs.invert_transform_flags = [False, False]
warpmean.inputs.terminal_output = 'file'
warpmean.inputs.args = '--float'
warpmean.inputs.num_threads = 4
register.connect(inputnode, 'target_image', warpmean, 'reference_image')
register.connect(inputnode, 'mean_image', warpmean, 'input_image')
register.connect(merge, 'out', warpmean, 'transforms')
"""
Assign all the output files
"""
register.connect(reg, 'warped_image', outputnode, 'anat2target')
register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
register.connect(applyxfm, 'transformed_file',
outputnode, 'segmentation_files')
register.connect(aparcxfm, 'transformed_file',
outputnode, 'aparc')
register.connect(bbregister, 'out_fsl_file',
outputnode, 'func2anat_transform')
register.connect(bbregister, 'out_reg_file',
outputnode, 'out_reg_file')
register.connect(reg, 'composite_transform',
outputnode, 'anat2target_transform')
register.connect(merge, 'out', outputnode, 'transforms')
return register
"""
Creates the main preprocessing workflow
"""
def create_workflow(files,
target_file,
subject_id,
TR,
slice_times,
norm_threshold=1,
num_components=5,
vol_fwhm=None,
surf_fwhm=None,
lowpass_freq=-1,
highpass_freq=-1,
subjects_dir=None,
sink_directory=os.getcwd(),
target_subject=['fsaverage3', 'fsaverage4'],
name='resting'):
wf = Workflow(name=name)
# Rename files in case they are named identically
name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
iterfield=['in_file', 'run'],
name='rename')
name_unique.inputs.keep_ext = True
name_unique.inputs.run = list(range(1, len(files) + 1))
name_unique.inputs.in_file = files
realign = Node(interface=spm.Realign(), name="realign")
realign.inputs.jobtype = 'estwrite'
num_slices = len(slice_times)
slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
slice_timing.inputs.num_slices = num_slices
slice_timing.inputs.time_repetition = TR
slice_timing.inputs.time_acquisition = TR - TR / float(num_slices)
slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist()
slice_timing.inputs.ref_slice = int(num_slices / 2)
# Comute TSNR on realigned data regressing polynomials upto order 2
tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file')
# Compute the median image across runs
calc_median = Node(Function(input_names=['in_files'],
output_names=['median_file'],
function=median,
imports=imports),
name='median')
wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
"""Segment and Register
"""
registration = create_reg_workflow(name='registration')
wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image')
registration.inputs.inputspec.subject_id = subject_id
registration.inputs.inputspec.subjects_dir = subjects_dir
registration.inputs.inputspec.target_image = target_file
"""Use :class:`nipype.algorithms.rapidart` to determine which of the
images in the functional series are outliers based on deviations in
intensity or movement.
"""
art = Node(interface=ArtifactDetect(), name="art")
art.inputs.use_differences = [True, True]
art.inputs.use_norm = True
art.inputs.norm_threshold = norm_threshold
art.inputs.zintensity_threshold = 9
art.inputs.mask_type = 'spm_global'
art.inputs.parameter_source = 'SPM'
"""Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
voxel sizes.
"""
wf.connect([(name_unique, realign, [('out_file', 'in_files')]),
(realign, slice_timing, [('realigned_files', 'in_files')]),
(slice_timing, art, [('timecorrected_files', 'realigned_files')]),
(realign, art, [('realignment_parameters', 'realignment_parameters')]),
])
def selectindex(files, idx):
import numpy as np
from nipype.utils.filemanip import filename_to_list, list_to_filename
return list_to_filename(np.array(filename_to_list(files))[idx].tolist())
mask = Node(fsl.BET(), name='getmask')
mask.inputs.mask = True
wf.connect(calc_median, 'median_file', mask, 'in_file')
# get segmentation in normalized functional space
def merge_files(in1, in2):
out_files = filename_to_list(in1)
out_files.extend(filename_to_list(in2))
return out_files
# filter some noise
# Compute motion regressors
motreg = Node(Function(input_names=['motion_params', 'order',
'derivatives'],
output_names=['out_files'],
function=motion_regressors,
imports=imports),
name='getmotionregress')
wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')
# Create a filter to remove motion and art confounds
createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm',
'outliers', 'detrend_poly'],
output_names=['out_files'],
function=build_filter1,
imports=imports),
name='makemotionbasedfilter')
createfilter1.inputs.detrend_poly = 2
wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
wf.connect(art, 'outlier_files', createfilter1, 'outliers')
filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
out_pf_name='pF_mcart.nii',
demean=True),
iterfield=['in_file', 'design', 'out_res_name'],
name='filtermotion')
wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file')
wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'),
filter1, 'out_res_name')
wf.connect(createfilter1, 'out_files', filter1, 'design')
createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file',
'num_components',
'extra_regressors'],
output_names=['out_files'],
function=extract_noise_components,
imports=imports),
iterfield=['realigned_file', 'extra_regressors'],
name='makecompcorrfilter')
createfilter2.inputs.num_components = num_components
wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
wf.connect(registration, ('outputspec.segmentation_files', selectindex, [0, 2]),
createfilter2, 'mask_file')
filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
out_pf_name='pF.nii',
demean=True),
iterfield=['in_file', 'design', 'out_res_name'],
name='filter_noise_nosmooth')
wf.connect(filter1, 'out_res', filter2, 'in_file')
wf.connect(filter1, ('out_res', rename, '_cleaned'),
filter2, 'out_res_name')
wf.connect(createfilter2, 'out_files', filter2, 'design')
wf.connect(mask, 'mask_file', filter2, 'mask')
bandpass = Node(Function(input_names=['files', 'lowpass_freq',
'highpass_freq', 'fs'],
output_names=['out_files'],
function=bandpass_filter,
imports=imports),
name='bandpass_unsmooth')
bandpass.inputs.fs = 1. / TR
bandpass.inputs.highpass_freq = highpass_freq
bandpass.inputs.lowpass_freq = lowpass_freq
wf.connect(filter2, 'out_res', bandpass, 'files')
"""Smooth the functional data using
:class:`nipype.interfaces.spm.Smooth`.
"""
smooth = Node(interface=spm.Smooth(), name="smooth")
smooth.inputs.fwhm = vol_fwhm
wf.connect(bandpass, 'out_files', smooth, 'in_files')
collector = Node(Merge(2), name='collect_streams')
wf.connect(smooth, 'smoothed_files', collector, 'in1')
wf.connect(bandpass, 'out_files', collector, 'in2')
"""
Transform the remaining images. First to anatomical and then to target
"""
warpall = MapNode(ants.ApplyTransforms(), iterfield=['input_image'],
name='warpall')
warpall.inputs.input_image_type = 3
warpall.inputs.interpolation = 'Linear'
warpall.inputs.invert_transform_flags = [False, False]
warpall.inputs.terminal_output = 'file'
warpall.inputs.reference_image = target_file
warpall.inputs.args = '--float'
warpall.inputs.num_threads = 1
# transform to target
wf.connect(collector, 'out', warpall, 'input_image')
wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')
mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')
wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')
maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
wf.connect(warpall, 'output_image', maskts, 'in_file')
wf.connect(mask_target, 'out_file', maskts, 'mask_file')
# map to surface
# extract aparc+aseg ROIs
# extract subcortical ROIs
# extract target space ROIs
# combine subcortical and cortical rois into a single cifti file
#######
# Convert aparc to subject functional space
# Sample the average time series in aparc ROIs
sampleaparc = MapNode(freesurfer.SegStats(default_color_table=True),
iterfield=['in_file', 'summary_file',
'avgwf_txt_file'],
name='aparc_ts')
sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) + [17, 18, 26, 47] +
list(range(49, 55)) + [58] + list(range(1001, 1036)) +
list(range(2001, 2036)))
wf.connect(registration, 'outputspec.aparc',
sampleaparc, 'segmentation_file')
wf.connect(collector, 'out', sampleaparc, 'in_file')
def get_names(files, suffix):
"""Generate appropriate names for output files
"""
from nipype.utils.filemanip import (split_filename, filename_to_list,
list_to_filename)
out_names = []
for filename in files:
_, name, _ = split_filename(filename)
out_names.append(name + suffix)
return list_to_filename(out_names)
wf.connect(collector, ('out', get_names, '_avgwf.txt'),
sampleaparc, 'avgwf_txt_file')
wf.connect(collector, ('out', get_names, '_summary.stats'),
sampleaparc, 'summary_file')
# Sample the time series onto the surface of the target surface. Performs
# sampling into left and right hemisphere
target = Node(IdentityInterface(fields=['target_subject']), name='target')
target.iterables = ('target_subject', filename_to_list(target_subject))
samplerlh = MapNode(freesurfer.SampleToSurface(),
iterfield=['source_file'],
name='sampler_lh')
samplerlh.inputs.sampling_method = "average"
samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
samplerlh.inputs.sampling_units = "frac"
samplerlh.inputs.interp_method = "trilinear"
samplerlh.inputs.smooth_surf = surf_fwhm
# samplerlh.inputs.cortex_mask = True
samplerlh.inputs.out_type = 'niigz'
samplerlh.inputs.subjects_dir = subjects_dir
samplerrh = samplerlh.clone('sampler_rh')
samplerlh.inputs.hemi = 'lh'
wf.connect(collector, 'out', samplerlh, 'source_file')
wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
wf.connect(target, 'target_subject', samplerlh, 'target_subject')
samplerrh.set_input('hemi', 'rh')
wf.connect(collector, 'out', samplerrh, 'source_file')
wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
wf.connect(target, 'target_subject', samplerrh, 'target_subject')
# Combine left and right hemisphere to text file
combiner = MapNode(Function(input_names=['left', 'right'],
output_names=['out_file'],
function=combine_hemi,
imports=imports),
iterfield=['left', 'right'],
name="combiner")
wf.connect(samplerlh, 'out_file', combiner, 'left')
wf.connect(samplerrh, 'out_file', combiner, 'right')
# Sample the time series file for each subcortical roi
ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file',
'indices'],
output_names=['out_file'],
function=extract_subrois,
imports=imports),
iterfield=['timeseries_file'],
name='getsubcortts')
ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
list(range(49, 55)) + [58]
ts2txt.inputs.label_file = \
os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
'2mm_v2.nii.gz'))
wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')
######
substitutions = [('_target_subject_', ''),
('_filtermotart_cleaned_bp_trans_masked', ''),
('_filtermotart_cleaned_bp', '')
]
regex_subs = [('_ts_masker.*/sar', '/smooth/'),
('_ts_masker.*/ar', '/unsmooth/'),
('_combiner.*/sar', '/smooth/'),
('_combiner.*/ar', '/unsmooth/'),
('_aparc_ts.*/sar', '/smooth/'),
('_aparc_ts.*/ar', '/unsmooth/'),
('_getsubcortts.*/sar', '/smooth/'),
('_getsubcortts.*/ar', '/unsmooth/'),
('series/sar', 'series/smooth/'),
('series/ar', 'series/unsmooth/'),
('_inverse_transform./', ''),
]
# Save the relevant data into an output directory
datasink = Node(interface=DataSink(), name="datasink")
datasink.inputs.base_directory = sink_directory
datasink.inputs.container = subject_id
datasink.inputs.substitutions = substitutions
datasink.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2')
wf.connect(realign, 'realignment_parameters', datasink, 'resting.qa.motion')
wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
wf.connect(registration, 'outputspec.segmentation_files', datasink, 'resting.mask_files')
wf.connect(registration, 'outputspec.anat2target', datasink, 'resting.qa.ants')
wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
wf.connect(bandpass, 'out_files', datasink, 'resting.timeseries.@bandpassed')
wf.connect(smooth, 'smoothed_files', datasink, 'resting.timeseries.@smoothed')
wf.connect(createfilter1, 'out_files',
datasink, 'resting.regress.@regressors')
wf.connect(createfilter2, 'out_files',
datasink, 'resting.regress.@compcorr')
wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
wf.connect(sampleaparc, 'summary_file',
datasink, 'resting.parcellations.aparc')
wf.connect(sampleaparc, 'avgwf_txt_file',
datasink, 'resting.parcellations.aparc.@avgwf')
wf.connect(ts2txt, 'out_file',
datasink, 'resting.parcellations.grayo.@subcortical')
datasink2 = Node(interface=DataSink(), name="datasink2")
datasink2.inputs.base_directory = sink_directory
datasink2.inputs.container = subject_id
datasink2.inputs.substitutions = substitutions
datasink2.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2')
wf.connect(combiner, 'out_file',
datasink2, 'resting.parcellations.grayo.@surface')
return wf
"""
Creates the full workflow including getting information from dicom files
"""
def create_resting_workflow(args, name=None):
TR = args.TR
slice_times = args.slice_times
if args.dicom_file:
TR, slice_times, slice_thickness = get_info(args.dicom_file)
slice_times = (np.array(slice_times) / 1000.).tolist()
if name is None:
name = 'resting_' + args.subject_id
kwargs = dict(files=[os.path.abspath(filename) for filename in args.files],
target_file=os.path.abspath(args.target_file),
subject_id=args.subject_id,
TR=TR,
slice_times=slice_times,
vol_fwhm=args.vol_fwhm,
surf_fwhm=args.surf_fwhm,
norm_threshold=2.,
subjects_dir=os.path.abspath(args.fsdir),
target_subject=args.target_surfs,
lowpass_freq=args.lowpass_freq,
highpass_freq=args.highpass_freq,
sink_directory=os.path.abspath(args.sink),
name=name)
wf = create_workflow(**kwargs)
return wf
if __name__ == "__main__":
from argparse import ArgumentParser, RawTextHelpFormatter
defstr = ' (default %(default)s)'
parser = ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter)
parser.add_argument("-d", "--dicom_file", dest="dicom_file",
help="an example dicom file from the resting series")
parser.add_argument("-f", "--files", dest="files", nargs="+",
help="4d nifti files for resting state",
required=True)
parser.add_argument("-t", "--target", dest="target_file",
help=("Target in MNI space. Best to use the MindBoggle "
"template - "
"OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz"),
required=True)
parser.add_argument("-s", "--subject_id", dest="subject_id",
help="FreeSurfer subject id", required=True)
parser.add_argument("--subjects_dir", dest="fsdir",
help="FreeSurfer subject directory", required=True)
parser.add_argument("--target_surfaces", dest="target_surfs", nargs="+",
default=['fsaverage5'],
help="FreeSurfer target surfaces" + defstr)
parser.add_argument("--TR", dest="TR", default=None, type=float,
help="TR if dicom not provided in seconds")
parser.add_argument("--slice_times", dest="slice_times", nargs="+",
type=float, help="Slice onset times in seconds")
parser.add_argument('--vol_fwhm', default=6., dest='vol_fwhm',
type=float, help="Spatial FWHM" + defstr)
parser.add_argument('--surf_fwhm', default=15., dest='surf_fwhm',
type=float, help="Spatial FWHM" + defstr)
parser.add_argument("-l", "--lowpass_freq", dest="lowpass_freq",
default=0.1, type=float,
help="Low pass frequency (Hz)" + defstr)
parser.add_argument("-u", "--highpass_freq", dest="highpass_freq",
default=0.01, type=float,
help="High pass frequency (Hz)" + defstr)
parser.add_argument("-o", "--output_dir", dest="sink",
help="Output directory base", required=True)
parser.add_argument("-w", "--work_dir", dest="work_dir",
help="Output directory base")
parser.add_argument("-p", "--plugin", dest="plugin",
default='Linear',
help="Plugin to use")
parser.add_argument("--plugin_args", dest="plugin_args",
help="Plugin arguments")
args = parser.parse_args()
wf = create_resting_workflow(args)
if args.work_dir:
work_dir = os.path.abspath(args.work_dir)
else:
work_dir = os.getcwd()
wf.base_dir = work_dir
if args.plugin_args:
wf.run(args.plugin, plugin_args=eval(args.plugin_args))
else:
wf.run(args.plugin)
| bsd-3-clause | -405,855,577,794,183,200 | 40.41561 | 139 | 0.594639 | false |
lnielsen/invenio | invenio/legacy/bibcirculation/adminlib.py | 1 | 240137 | ## Administrator interface for Bibcirculation
##
## This file is part of Invenio.
## Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
## """Invenio Bibcirculation Administrator Interface."""
from __future__ import division
"""
Invenio Bibcirculation Administrator.
The functions are positioned by grouping into logical
categories('User Pages', 'Loans, Returns and Loan requests',
'ILLs', 'Libraries', 'Vendors' ...)
These orders should be maintained and when necessary, improved
for readability, as and when additional methods are added.
When applicable, methods should be renamed, refactored and
appropriate documentation added.
"""
__revision__ = "$Id$"
__lastupdated__ = """$Date$"""
import datetime, time, types
# Other Invenio imports
from invenio.config import \
CFG_SITE_LANG, \
CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_CERN_SITE
import invenio.modules.access.engine as acce
from invenio.legacy.webpage import page
from invenio.legacy.webuser import getUid, page_not_authorized
from invenio.legacy.webstat.api import register_customevent
from invenio.ext.logging import register_exception
from invenio.ext.email import send_email
from invenio.legacy.search_engine import perform_request_search, record_exists
from invenio.utils.url import create_html_link, create_url, redirect_to_url
from invenio.base.i18n import gettext_set_language
from invenio.config import \
CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, \
CFG_BIBCIRCULATION_ITEM_STATUS_ON_ORDER, \
CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF, \
CFG_BIBCIRCULATION_ITEM_STATUS_IN_PROCESS, \
CFG_BIBCIRCULATION_ITEM_STATUS_UNDER_REVIEW, \
CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN, \
CFG_BIBCIRCULATION_LOAN_STATUS_RETURNED, \
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING, \
CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING, \
CFG_BIBCIRCULATION_REQUEST_STATUS_DONE, \
CFG_BIBCIRCULATION_REQUEST_STATUS_CANCELLED, \
CFG_BIBCIRCULATION_ILL_STATUS_NEW, \
CFG_BIBCIRCULATION_ILL_STATUS_ON_LOAN, \
CFG_BIBCIRCULATION_LIBRARY_TYPE_MAIN, \
CFG_BIBCIRCULATION_ACQ_STATUS_NEW, \
CFG_BIBCIRCULATION_ACQ_STATUS_RECEIVED, \
CFG_BIBCIRCULATION_PROPOSAL_STATUS_ON_ORDER, \
CFG_BIBCIRCULATION_PROPOSAL_STATUS_PUT_ASIDE, \
CFG_BIBCIRCULATION_PROPOSAL_STATUS_RECEIVED
# Bibcirculation imports
from invenio.legacy.bibcirculation.config import \
CFG_BIBCIRCULATION_TEMPLATES, CFG_BIBCIRCULATION_LIBRARIAN_EMAIL, \
CFG_BIBCIRCULATION_LOANS_EMAIL, CFG_BIBCIRCULATION_ILLS_EMAIL, \
CFG_BIBCIRCULATION_PROPOSAL_TYPE, CFG_BIBCIRCULATION_ACQ_STATUS
from invenio.legacy.bibcirculation.utils import book_title_from_MARC, \
update_status_if_expired, \
renew_loan_for_X_days, \
print_pending_hold_requests_information, \
print_new_loan_information, \
validate_date_format, \
generate_email_body, \
book_information_from_MARC, \
search_user, \
tag_all_requests_as_done, \
update_user_info_from_ldap, \
update_request_data, \
update_requests_statuses, \
has_date_format, \
generate_tmp_barcode, \
looks_like_dictionary
import invenio.legacy.bibcirculation.db_layer as db
import invenio.legacy.template
bc_templates = invenio.legacy.template.load('bibcirculation')
def is_adminuser(req):
"""check if user is a registered administrator. """
return acce.acc_authorize_action(req, "runbibcirculation")
def mustloginpage(req, message):
"""show a page asking the user to login."""
navtrail_previous_links = '<a class="navtrail" href="%s/admin/">' \
'Admin Area</a> > ' \
'<a class="navtrail" href="%s/admin/bibcirculation/">' \
'BibCirculation Admin</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL)
return page_not_authorized(req=req, text=message,
navtrail=navtrail_previous_links)
def load_template(template):
"""
Load a letter/notification template from
bibcirculation_config.py.
@type template: string.
@param template: template that will be used.
@return: template(string)
"""
if template == "overdue_letter":
output = CFG_BIBCIRCULATION_TEMPLATES['OVERDUE']
elif template == "reminder":
output = CFG_BIBCIRCULATION_TEMPLATES['REMINDER']
elif template == "notification":
output = CFG_BIBCIRCULATION_TEMPLATES['NOTIFICATION']
elif template == "ill_received":
output = CFG_BIBCIRCULATION_TEMPLATES['ILL_RECEIVED']
elif template == "ill_recall1":
output = CFG_BIBCIRCULATION_TEMPLATES['ILL_RECALL1']
elif template == "ill_recall2":
output = CFG_BIBCIRCULATION_TEMPLATES['ILL_RECALL2']
elif template == "ill_recall3":
output = CFG_BIBCIRCULATION_TEMPLATES['ILL_RECALL3']
elif template == "claim_return":
output = CFG_BIBCIRCULATION_TEMPLATES['SEND_RECALL']
elif template == "proposal_notification":
output = CFG_BIBCIRCULATION_TEMPLATES['PROPOSAL_NOTIFICATION']
elif template == "proposal_acceptance":
output = CFG_BIBCIRCULATION_TEMPLATES['PROPOSAL_ACCEPTANCE_NOTIFICATION']
elif template == "proposal_refusal":
output = CFG_BIBCIRCULATION_TEMPLATES['PROPOSAL_REFUSAL_NOTIFICATION']
elif template == "purchase_notification":
output = CFG_BIBCIRCULATION_TEMPLATES['PURCHASE_NOTIFICATION']
elif template == "purchase_received_tid":
output = CFG_BIBCIRCULATION_TEMPLATES['PURCHASE_RECEIVED_TID']
elif template == "purchase_received_cash":
output = CFG_BIBCIRCULATION_TEMPLATES['PURCHASE_RECEIVED_CASH']
else:
output = CFG_BIBCIRCULATION_TEMPLATES['EMPTY']
return output
def index(req, ln=CFG_SITE_LANG):
"""
main function to show pages for bibcirculationadmin
"""
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_index(ln=ln)
return page(title=_("BibCirculation Admin"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
###
### Loans, Loan Requests, Loan Returns related templates.
###
def loan_on_desk_step1(req, key, string, ln=CFG_SITE_LANG):
"""
Step 1/4 of loan procedure.
Search a user/borrower and return a list with all the possible results.
@type key: string.
@param key: attribute that will be considered during the search. Can be 'name',
'email' or 'ccid/id'.
@type string: string.
@param string: keyword used during the search.
@return: list of potential borrowers.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
infos = []
_ = gettext_set_language(ln)
if key and not string:
infos.append(_('Empty string. Please, try again.'))
body = bc_templates.tmpl_loan_on_desk_step1(result=None, key=key,
string=string, infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("Loan on desk"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
result = search_user(key, string)
borrowers_list = []
if len(result) == 0 and key:
if CFG_CERN_SITE:
infos.append(_("0 borrowers found.") + ' ' +_("Search by CCID."))
else:
new_borrower_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/add_new_borrower_step1',
{'ln': ln}, _("Register new borrower."))
message = _("0 borrowers found.") + ' ' + new_borrower_link
infos.append(message)
elif len(result) == 1:
return loan_on_desk_step2(req, result[0][0], ln)
else:
for user in result:
borrower_data = db.get_borrower_data_by_id(user[0])
borrowers_list.append(borrower_data)
body = bc_templates.tmpl_loan_on_desk_step1(result=borrowers_list,
key=key,
string=string,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("Circulation management"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def loan_on_desk_step2(req, user_id, ln=CFG_SITE_LANG):
"""
Step 2/4 of loan procedure.
Display the user/borrower's information.
@type user_id: integer
@param user_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
_ = gettext_set_language(ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
infos = []
body = bc_templates.tmpl_loan_on_desk_step2(user_id=user_id,
infos=infos,
ln=ln)
return page(title=_("Circulation management"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def loan_on_desk_step3(req, user_id, list_of_barcodes, ln=CFG_SITE_LANG):
"""
Step 3/4 of loan procedure.
Checks that the barcodes exist and that there are no request on these records.
Lets the librarian change the due dates and add notes.
@type user_id: integer
@param user_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type list_of_barcodes: list
@param list_of_barcodes: list of strings with the barcodes
introduced by the librarian with the barcode reader
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
_ = gettext_set_language(ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
infos = []
list_of_books = []
# to avoid duplicates
aux = []
for bc in list_of_barcodes:
if bc not in aux:
aux.append(bc)
list_of_barcodes = aux
for value in list_of_barcodes:
recid = db.get_id_bibrec(value)
loan_id = db.is_item_on_loan(value)
item_description = db.get_item_description(value)
if recid is None:
infos.append(_('%(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s Unknown barcode.') % {'x_barcode': value, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'} + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_loan_on_desk_step2(user_id=user_id,
infos=infos,
ln=ln)
elif loan_id:
infos.append('The item with the barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s is on a loan. Cannot be checked out.' % {'x_barcode': value, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_loan_on_desk_step2(user_id=user_id,
infos=infos,
ln=ln)
elif user_id is None:
infos.append(_('You must select one borrower.'))
body = bc_templates.tmpl_loan_on_desk_step1(result=None,
key='',
string='',
infos=infos,
ln=ln)
else:
queue = db.get_queue_request(recid, item_description)
(library_id, location) = db.get_lib_location(value)
tup = (recid, value, library_id, location)
list_of_books.append(tup)
book_details = db.get_item_info(value)
item_status = book_details[7]
if item_status != CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF:
message = _("%(x_strong_tag_open)sWARNING:%(x_strong_tag_close)s Note that item %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s status is %(x_strong_tag_open)s%(x_status)s%(x_strong_tag_close)s") % {'x_barcode': value, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>', 'x_status': item_status}
infos.append(message)
if CFG_CERN_SITE:
library_type = db.get_library_type(library_id)
if library_type != CFG_BIBCIRCULATION_LIBRARY_TYPE_MAIN:
library_name = db.get_library_name(library_id)
message = _("%(x_strong_tag_open)sWARNING:%(x_strong_tag_close)s Note that item %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s location is %(x_strong_tag_open)s%(x_location)s%(x_strong_tag_close)s") % {'x_barcode': value, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>', 'x_location': library_name}
infos.append(message)
if len(queue) != 0 and queue[0][0] != user_id:
message = _("Another user is waiting for the book: %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s. \n\n If you want continue with this loan choose %(x_strong_tag_open)s[Continue]%(x_strong_tag_close)s.") % {'x_title': book_title_from_MARC(recid), 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
infos.append(message)
body = bc_templates.tmpl_loan_on_desk_step3(user_id=user_id,
list_of_books=list_of_books,
infos=infos, ln=ln)
if list_of_barcodes == []:
infos.append(_('Empty barcode.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_loan_on_desk_step2(user_id=user_id,
infos=infos,
ln=ln)
if infos == []:
# shortcut to simplify loan process
due_dates = []
for bc in list_of_barcodes:
due_dates.append(renew_loan_for_X_days(bc))
return loan_on_desk_step4(req, list_of_barcodes, user_id,
due_dates, None, ln)
else:
return page(title=_("Circulation management"),
uid=id_user,
req=req,
body=body,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.min.css\" type=\"text/css\" />" % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def loan_on_desk_step4(req, list_of_barcodes, user_id,
due_date, note, ln=CFG_SITE_LANG):
"""
Step 4/4 of loan procedure.
Checks that items are not on loan and that the format of
the dates is correct and creates the loans
@type user_id: integer
@param user_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type list_of_barcodes: list
@param list_of_barcodes: list of strings with the barcodes
introduced by the librarian with the barcode reader
@type due_date: list.
@param due_date: list of due dates.
@type note: string.
@param note: note about the new loan.
@return: page with the list 'Last Loans'
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
_ = gettext_set_language(ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
infos = []
#loaned_on = datetime.date.today()
#Check if one of the given items is on loan.
on_loan = []
for barcode in list_of_barcodes:
is_on_loan = db.is_item_on_loan(barcode)
if is_on_loan:
on_loan.append(barcode)
if len(on_loan) != 0:
message = _("The items with barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s are already on loan.") % {'x_barcode': on_loan, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
infos.append(message)
body = bc_templates.tmpl_loan_on_desk_step1(result=None, key='',
string='', infos=infos,
ln=ln)
return page(title=_("Loan on desk"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
# validate the period of interest given by the admin
for date in due_date:
if validate_date_format(date) is False:
infos = []
message = _("The given due date %(x_strong_tag_open)s%(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': date, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
infos.append(message)
list_of_books = []
for bc in list_of_barcodes:
recid = db.get_id_bibrec(bc)
(library_id, location) = db.get_lib_location(bc)
tup = (recid, bc, library_id, location)
list_of_books.append(tup)
body = bc_templates.tmpl_loan_on_desk_step3(user_id=user_id,
list_of_books=list_of_books,
infos=infos, ln=ln)
return page(title=_("Circulation management"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
#if borrower_id == None:
# db.new_borrower(ccid, name, email, phone, address, mailbox, '')
# borrower_id = db.get_borrower_id_by_email(email)
for i in range(len(list_of_barcodes)):
note_format = {}
if note:
note_format[time.strftime("%Y-%m-%d %H:%M:%S")] = str(note)
barcode = list_of_barcodes[i]
recid = db.get_id_bibrec(barcode)
db.new_loan(user_id, recid, barcode, due_date[i],
CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
'normal', note_format)
# Duplicate requests on items belonging to a single record has been disabled.
db.tag_requests_as_done(user_id, barcode)
# tag_all_requests_as_done(barcode, user_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
update_requests_statuses(barcode)
infos.append(_("A loan for the item %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s, with barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s, has been registered with success.") % {'x_title': book_title_from_MARC(recid), 'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
infos.append(_("You could enter the barcode for this user's next loan, if any."))
body = bc_templates.tmpl_loan_on_desk_step2(user_id=user_id,
infos=infos, ln=ln)
return page(title=_("Circulation management"),
uid=id_user,
req=req,
body=body,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.min.css\" type=\"text/css\" />" % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def loan_on_desk_confirm(req, barcode=None, borrower_id=None, ln=CFG_SITE_LANG):
"""
*** Obsolete and unmantained function ***
Confirm the return of an item.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
result = db.loan_on_desk_confirm(barcode, borrower_id)
body = bc_templates.tmpl_loan_on_desk_confirm(result=result,
barcode=barcode,
borrower_id=borrower_id,
ln=ln)
return page(title=_("Loan on desk confirm"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_new_loan(req, barcode, borrower_id,
request_id, new_note, print_data, ln=CFG_SITE_LANG):
"""
Register a new loan. This function is from the "Create Loan" pages.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type request_id: integer.
@param request_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
@type new_note: string.
@param new_note: associate a note to this loan.
@type print_data: string.
@param print_data: print the information about this loan.
@return: new loan
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
has_recid = db.get_id_bibrec(barcode)
loan_id = db.is_item_on_loan(barcode)
recid = db.get_request_recid(request_id)
req_barcode = db.get_requested_barcode(request_id)
req_description = db.get_item_description(req_barcode)
# Get all the items belonging to the record whose
# description is the same.
list_of_barcodes = db.get_barcodes(recid, req_description)
infos = []
if print_data == 'true':
return print_new_loan_information(req, ln)
else:
if has_recid is None:
message = _('%(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s Unknown barcode.') % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'} + ' ' + _('Please, try again.')
infos.append(message)
borrower = db.get_borrower_details(borrower_id)
title = _("Create Loan")
body = bc_templates.tmpl_create_loan(request_id=request_id,
recid=recid,
borrower=borrower,
infos=infos,
ln=ln)
elif loan_id:
infos.append(_('The item with the barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s is on loan.') % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
borrower = db.get_borrower_details(borrower_id)
title = _("Create Loan")
body = bc_templates.tmpl_create_loan(request_id=request_id,
recid=recid,
borrower=borrower,
infos=infos,
ln=ln)
elif barcode not in list_of_barcodes:
infos.append(_('The given barcode "%(x_barcode)s" does not correspond to requested item.') % {'x_barcode': barcode})
borrower = db.get_borrower_details(borrower_id)
title = _("Create Loan")
body = bc_templates.tmpl_create_loan(request_id=request_id,
recid=recid,
borrower=borrower,
infos=infos,
ln=ln)
else:
recid = db.get_id_bibrec(barcode)
#loaned_on = datetime.date.today()
due_date = renew_loan_for_X_days(barcode)
if new_note:
note_format = '[' + time.ctime() + '] ' + new_note + '\n'
else:
note_format = ''
last_id = db.new_loan(borrower_id, recid, barcode,
due_date, CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
'normal', note_format)
# register event in webstat
try:
register_customevent("loanrequest", [request_id, last_id])
except:
register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'")
tag_all_requests_as_done(barcode, borrower_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_DONE,
request_id)
db.update_request_barcode(barcode, request_id)
update_requests_statuses(barcode)
result = db.get_all_loans(20)
infos.append(_('A new loan has been registered with success.'))
title = _("Current loans")
body = bc_templates.tmpl_all_loans(result=result,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=title,
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_loan(req, request_id, recid, borrower_id, ln=CFG_SITE_LANG):
"""
Create a new loan from a hold request.
@type request_id: integer.
@param request_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
borrower = db.get_borrower_details(borrower_id)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_create_loan(request_id=request_id,
recid=recid,
borrower=borrower,
infos=infos,
ln=ln)
return page(title=_("Create Loan"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def make_new_loan_from_request(req, check_id, barcode, ln=CFG_SITE_LANG):
"""
Turns a request into a loan.
@type check_id: integer.
@param check_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
recid = db.get_request_recid(check_id)
borrower_id = db.get_request_borrower_id(check_id)
borrower_info = db.get_borrower_details(borrower_id)
due_date = renew_loan_for_X_days(barcode)
if db.is_item_on_loan(barcode):
infos.append('The item with the barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s is on loan.' % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
return redirect_to_url(req,
'%s/admin2/bibcirculation/all_loans?ln=%s&msg=ok' % (CFG_SITE_SECURE_URL, ln))
else:
db.new_loan(borrower_id, recid, barcode, due_date,
CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN, 'normal', '')
infos.append(_('A new loan has been registered with success.'))
#try:
# register_customevent("baskets", ["display", "", user_str])
#except:
# register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'")
tag_all_requests_as_done(barcode, borrower_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
update_requests_statuses(barcode)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_register_new_loan(borrower_info=borrower_info,
infos=infos,
recid=recid,
ln=ln)
return page(title=_("New Loan"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def loan_return(req, ln=CFG_SITE_LANG):
"""
Page where is possible to register the return of an item.
"""
_ = gettext_set_language(ln)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
body = bc_templates.tmpl_loan_return(infos=infos, ln=ln)
return page(title=_("Loan return"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def loan_return_confirm(req, barcode, ln=CFG_SITE_LANG):
"""
Performs the return of a loan and displays a confirmation page.
In case the book is requested, it is possible to select a request
and make a loan from it (make_new_loan_from_request)
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
infos = []
_ = gettext_set_language(ln)
recid = db.get_id_bibrec(barcode)
loan_id = db.is_item_on_loan(barcode)
if recid is None:
infos.append(_('%(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s Unknown barcode.') % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'} + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_loan_return(infos=infos, ln=ln)
elif loan_id is None:
message = _("The item the with barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s is not on loan. Please, try again.") % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
infos.append(message)
body = bc_templates.tmpl_loan_return(infos=infos, ln=ln)
else:
library_id = db.get_item_info(barcode)[1]
if CFG_CERN_SITE:
library_type = db.get_library_type(library_id)
if library_type != CFG_BIBCIRCULATION_LIBRARY_TYPE_MAIN:
library_name = db.get_library_name(library_id)
message = _("%(x_strong_tag_open)sWARNING:%(x_strong_tag_close)s Note that item %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s location is %(x_strong_tag_open)s%(x_location)s%(x_strong_tag_close)s") % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>', 'x_location': library_name}
infos.append(message)
borrower_id = db.get_borrower_id(barcode)
borrower_name = db.get_borrower_name(borrower_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF, barcode)
db.return_loan(barcode)
update_requests_statuses(barcode)
description = db.get_item_description(barcode)
result = db.get_pending_loan_request(recid, description)
body = bc_templates.tmpl_loan_return_confirm(
infos=infos,
borrower_name=borrower_name,
borrower_id=borrower_id,
recid=recid,
barcode=barcode,
return_date=datetime.date.today(),
result=result,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("Loan return"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def claim_book_return(req, borrower_id, recid, loan_id,
template, ln=CFG_SITE_LANG):
"""
Claim the return of an item.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
recid: identify the record. It is also the primary key of
the table bibrec.
template: letter template.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
email_body = generate_email_body(load_template(template), loan_id)
email = db.get_borrower_email(borrower_id)
subject = book_title_from_MARC(int(recid))
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_borrower_notification(email=email,
subject=subject,
email_body=email_body,
borrower_id=borrower_id,
from_address=CFG_BIBCIRCULATION_LOANS_EMAIL,
ln=ln)
return page(title=_("Claim return"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def change_due_date_step1(req, barcode, borrower_id, ln=CFG_SITE_LANG):
"""
Change the due date of a loan, step1.
loan_id: identify a loan. It is the primery key of the table
crcLOAN.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
loan_id = db.get_current_loan_id(barcode)
loan_details = db.get_loan_infos(loan_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_change_due_date_step1(loan_details=loan_details,
loan_id=loan_id,
borrower_id=borrower_id,
ln=ln)
return page(title=_("Change due date"),
uid=id_user,
req=req,
body=body, language=ln,
#metaheaderadd = '<link rel="stylesheet" '\
# 'href="%s/img/jquery-ui/themes/redmond/ui.theme.css" '\
# 'type="text/css" />' % CFG_SITE_SECURE_URL,
metaheaderadd = '<link rel="stylesheet" href="%s/vendors/jquery-ui/themes/redmond/jquery-ui.css" '\
'type="text/css" />' % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def change_due_date_step2(req, new_due_date, loan_id, borrower_id,
ln=CFG_SITE_LANG):
"""
Change the due date of a loan, step2.
due_date: new due date.
loan_id: identify a loan. It is the primery key of the table
crcLOAN.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
db.update_due_date(loan_id, new_due_date)
update_status_if_expired(loan_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_change_due_date_step2(new_due_date=new_due_date,
borrower_id=borrower_id,
ln=ln)
return page(title=_("Change due date"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def place_new_request_step1(req, barcode, recid, key, string, ln=CFG_SITE_LANG):
"""
Place a new request from the item's page, step1.
barcode: identify the item. It is the primary key of the table
crcITEM.
recid: identify the record. It is also the primary key of
the table bibrec.
key: search field.
string: search pattern.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
recid = db.get_id_bibrec(barcode)
infos = []
if key and not string:
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_place_new_request_step1(result=None,
key=key,
string=string,
barcode=barcode,
recid=recid,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
result = search_user(key, string)
borrowers_list = []
if len(result) == 0 and key:
if CFG_CERN_SITE:
infos.append(_("0 borrowers found.") + ' ' +_("Search by CCID."))
else:
new_borrower_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/add_new_borrower_step1',
{'ln': ln}, _("Register new borrower."))
message = _("0 borrowers found.") + ' ' + new_borrower_link
infos.append(message)
else:
for user in result:
borrower_data = db.get_borrower_data_by_id(user[0])
borrowers_list.append(borrower_data)
if len(result) == 1:
return place_new_request_step2(req, barcode, recid,
borrowers_list[0], ln)
else:
body = bc_templates.tmpl_place_new_request_step1(result=borrowers_list,
key=key,
string=string,
barcode=barcode,
recid=recid,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def place_new_request_step2(req, barcode, recid, user_info, ln=CFG_SITE_LANG):
"""
Place a new request from the item's page, step2.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type user_info: list.
@param user_info: information of the user/borrower who was selected.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
body = bc_templates.tmpl_place_new_request_step2(barcode=barcode,
recid=recid,
user_info=user_info,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.min.css\" type=\"text/css\" />" % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def place_new_request_step3(req, barcode, recid, user_info,
period_from, period_to, ln=CFG_SITE_LANG):
"""
Place a new request from the item's page, step3.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@return: new request.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
(_id, ccid, name, email, phone, address, mailbox) = user_info
# validate the period of interest given by the admin
if validate_date_format(period_from) is False:
infos = []
infos.append(_("The period of interest %(x_strong_tag_open)sFrom: %(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': period_from, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_place_new_request_step2(barcode=barcode,
recid=recid,
user_info=user_info,
infos=infos,
ln=ln)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
elif validate_date_format(period_to) is False:
infos = []
infos.append(_("The period of interest %(x_strong_tag_open)sTo: %(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': period_to, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_place_new_request_step2(barcode=barcode,
recid=recid,
user_info=user_info,
infos=infos,
ln=ln)
# Register request
borrower_id = db.get_borrower_id_by_email(email)
if borrower_id == None:
db.new_borrower(ccid, name, email, phone, address, mailbox, '')
borrower_id = db.get_borrower_id_by_email(email)
req_id = db.new_hold_request(borrower_id, recid, barcode,
period_from, period_to,
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING)
pending_request = update_requests_statuses(barcode)
if req_id == pending_request:
(title, year, author,
isbn, publisher) = book_information_from_MARC(int(recid))
details = db.get_loan_request_details(req_id)
if details:
library = details[3]
location = details[4]
request_date = details[7]
else:
location = ''
library = ''
request_date = ''
link_to_holdings_details = CFG_SITE_URL + \
'/record/%s/holdings' % str(recid)
subject = _('New request')
message = load_template('notification')
message = message % (name, ccid, email, address, mailbox, title,
author, publisher, year, isbn, location, library,
link_to_holdings_details, request_date)
send_email(fromaddr = CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr = CFG_BIBCIRCULATION_LOANS_EMAIL,
subject = subject,
content = message,
header = '',
footer = '',
attempt_times=1,
attempt_sleeptime=10
)
send_email(fromaddr = CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr = email,
subject = subject,
content = message,
header = '',
footer = '',
attempt_times=1,
attempt_sleeptime=10
)
body = bc_templates.tmpl_place_new_request_step3(ln=ln)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def place_new_loan_step1(req, barcode, recid, key, string, ln=CFG_SITE_LANG):
"""
Place a new loan from the item's page, step1.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type key: string.
@param key: search field.
@type string: string.
@param string: search pattern.
@return: list of users/borrowers.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
recid = db.get_id_bibrec(barcode)
infos = []
if key and not string:
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_place_new_loan_step1(result=None,
key=key,
string=string,
barcode=barcode,
recid=recid,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
return page(title=_("New loan"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
result = search_user(key, string)
borrowers_list = []
if len(result) == 0 and key:
if CFG_CERN_SITE:
infos.append(_("0 borrowers found.") + ' ' +_("Search by CCID."))
else:
new_borrower_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/add_new_borrower_step1',
{'ln': ln}, _("Register new borrower."))
message = _("0 borrowers found.") + ' ' + new_borrower_link
infos.append(message)
else:
for user in result:
borrower_data = db.get_borrower_data_by_id(user[0])
borrowers_list.append(borrower_data)
body = bc_templates.tmpl_place_new_loan_step1(result=borrowers_list,
key=key,
string=string,
barcode=barcode,
recid=recid,
infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("New loan"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def place_new_loan_step2(req, barcode, recid, user_info, ln=CFG_SITE_LANG):
"""
Place a new loan from the item's page, step2.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type user_info: list.
@param user_info: information of the user/borrower who was selected.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_place_new_loan_step2(barcode=barcode,
recid=recid,
user_info=user_info,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("New loan"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def place_new_loan_step3(req, barcode, recid, ccid, name, email, phone,
address, mailbox, due_date, notes, ln=CFG_SITE_LANG):
"""
Place a new loan from the item's page, step3.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type name: string.
@type email: string.
@type phone: string.
@type address: string.
@type mailbos: string.
@type due_date: string.
@type notes: string.
@return: new loan.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if notes:
notes_format = '[' + time.ctime() + '] ' + notes + '\n'
else:
notes_format = ''
#loaned_on = datetime.date.today()
borrower_id = db.get_borrower_id_by_email(email)
borrower_info = db.get_borrower_data(borrower_id)
if db.is_on_loan(barcode):
infos.append(_("Item with barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s is already on loan.") % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
copies = db.get_item_copies_details(recid)
requests = db.get_item_requests(recid)
loans = db.get_item_loans(recid)
purchases = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_NEW, recid)
req_hist_overview = db.get_item_requests_historical_overview(recid)
loans_hist_overview = db.get_item_loans_historical_overview(recid)
purchases_hist_overview = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_RECEIVED, recid)
title = _("Item details")
body = bc_templates.tmpl_get_item_details(
recid=recid, copies=copies,
requests=requests, loans=loans,
purchases=purchases,
req_hist_overview=req_hist_overview,
loans_hist_overview=loans_hist_overview,
purchases_hist_overview=purchases_hist_overview,
infos=infos, ln=ln)
elif borrower_id != 0:
db.new_loan(borrower_id, recid, barcode,
due_date, CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
'normal', notes_format)
tag_all_requests_as_done(barcode, borrower_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
update_requests_statuses(barcode)
title = _("New loan")
body = bc_templates.tmpl_register_new_loan(borrower_info=borrower_info,
infos=infos,
recid=recid, ln=ln)
else:
db.new_borrower(ccid, name, email, phone, address, mailbox, '')
borrower_id = db.get_borrower_id_by_email(email)
db.new_loan(borrower_id, recid, barcode,
due_date, CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
'normal', notes_format)
tag_all_requests_as_done(barcode, borrower_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
update_requests_statuses(barcode)
title = _("New loan")
body = bc_templates.tmpl_register_new_loan(borrower_info=borrower_info,
infos=infos,
recid=recid,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=title,
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_new_request_step1(req, borrower_id, p="", f="", search=None,
ln=CFG_SITE_LANG):
"""
Create a new request from the borrower's page, step1.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
p: search pattern.
f: field
search: search an item.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if borrower_id != None:
borrower = db.get_borrower_details(borrower_id)
else:
message = _('Empty borrower ID.')
return borrower_search(req, message, False, ln)
if search and p == '':
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
result = ''
elif search and f == 'barcode':
p = p.strip('\'" \t')
has_recid = db.get_id_bibrec(p)
if has_recid is None:
infos.append(_('The barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s does not exist on BibCirculation database.') % {'x_barcode': p, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
result = ''
else:
result = has_recid
elif search:
result = perform_request_search(cc="Books", sc="1", p=p, f=f)
else:
result = ''
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if type(result) is types.IntType or type(result) is types.LongType:
recid = result
holdings_information = db.get_holdings_information(recid)
user_info = db.get_borrower_details(borrower_id)
body = bc_templates.tmpl_create_new_request_step2(user_info=user_info,
holdings_information=holdings_information,
recid=recid, ln=ln)
else:
body = bc_templates.tmpl_create_new_request_step1(borrower=borrower,
infos=infos,
result=result,
p=p,
f=f,
ln=ln)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_new_request_step2(req, recid, borrower_id, ln=CFG_SITE_LANG):
"""
Create a new request from the borrower's page, step2.
recid: identify the record. It is also the primary key of
the table bibrec.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
holdings_information = db.get_holdings_information(recid)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
user_info = db.get_borrower_details(borrower_id)
body = bc_templates.tmpl_create_new_request_step2(user_info=user_info,
holdings_information=holdings_information,
recid=recid, ln=ln)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_new_request_step3(req, borrower_id, barcode, recid,
ln=CFG_SITE_LANG):
"""
Create a new request from the borrower's page, step3.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
barcode: identify the item. It is the primary key of the table
crcITEM.
recid: identify the record. It is also the primary key of
the table bibrec.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
item_info = db.get_item_info(barcode)
if item_info[6] == 'Reference':
body = bc_templates.tmpl_book_not_for_loan(ln=ln)
else:
body = bc_templates.tmpl_create_new_request_step3(
borrower_id=borrower_id,
barcode=barcode,
recid=recid,
ln=ln)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.min.css\" type=\"text/css\" />" % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_new_request_step4(req, period_from, period_to, barcode,
borrower_id, recid, ln=CFG_SITE_LANG):
"""
Create a new request from the borrower's page, step4.
period_from: begining of the period of interest.
period_to: end of the period of interest.
barcode: identify the item. It is the primary key of the table
crcITEM.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
recid: identify the record. It is also the primary key of
the table bibrec.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
db.new_hold_request(borrower_id, recid, barcode,
period_from, period_to,
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING)
update_requests_statuses(barcode)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_create_new_request_step4(ln=ln)
return page(title=_("New request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_new_loan_step1(req, borrower_id, ln=CFG_SITE_LANG):
"""
Create a new loan from the borrower's page, step1.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
borrower = db.get_borrower_details(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_create_new_loan_step1(borrower=borrower,
infos=infos,
ln=ln)
return page(title=_("New loan"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def create_new_loan_step2(req, borrower_id, barcode, notes, ln=CFG_SITE_LANG):
"""
Create a new loan from the borrower's page, step2.
borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
barcode: identify the item. It is the primary key of the table
crcITEM.
notes: notes about the new loan.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
#borrower_info = db.get_borrower_data(borrower_id)
has_recid = db.get_id_bibrec(barcode)
loan_id = db.is_item_on_loan(barcode)
if notes:
notes_format = '[' + time.ctime() + '] ' + notes + '\n'
else:
notes_format = ''
infos = []
if has_recid is None:
infos.append(_('%(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s Unknown barcode.') % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'} + ' ' + _('Please, try again.'))
borrower = db.get_borrower_details(borrower_id)
title = _("New loan")
body = bc_templates.tmpl_create_new_loan_step1(borrower=borrower,
infos=infos,
ln=ln)
elif loan_id:
infos.append(_('The item with the barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s is on loan.') % {'x_barcode': barcode, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
borrower = db.get_borrower_details(borrower_id)
title = _("New loan")
body = bc_templates.tmpl_create_new_loan_step1(borrower=borrower,
infos=infos,
ln=ln)
else:
#loaned_on = datetime.date.today()
due_date = renew_loan_for_X_days(barcode)
db.new_loan(borrower_id, has_recid, barcode,
due_date, CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
'normal', notes_format)
tag_all_requests_as_done(barcode, borrower_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
update_requests_statuses(barcode)
result = db.get_all_loans(20)
title = _("Current loans")
infos.append(_('A new loan has been registered with success.'))
body = bc_templates.tmpl_all_loans(result=result, infos=infos, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=title,
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def all_requests(req, request_id, ln=CFG_SITE_LANG):
"""
Display all requests.
@type request_id: integer.
@param request_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if request_id:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_CANCELLED,
request_id)
result = db.get_all_requests()
else:
result = db.get_all_requests()
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_all_requests(result=result, ln=ln)
return page(title=_("List of hold requests"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def all_loans(req, msg=None, ln=CFG_SITE_LANG):
"""
Display all loans.
@type loans_per_page: integer.
@param loans_per_page: number of loans per page.
@type jloan: integer.
@param jloan: jump to next loan.
@return: list with all loans (current loans).
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if msg == 'ok':
infos.append(_('A new loan has been registered with success.'))
result = db.get_all_loans(20)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_all_loans(result=result, infos=infos, ln=ln)
return page(title=_("Current loans"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def all_expired_loans(req, ln=CFG_SITE_LANG):
"""
Display all loans.
@type loans_per_page: integer.
@param loans_per_page: number of loans per page.
@return: list with all expired loans (overdue loans).
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
result = db.get_all_expired_loans()
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_all_expired_loans(result=result,
infos=infos,
ln=ln)
return page(title=_('Overdue loans'),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_pending_requests(req, request_id, print_data, ln=CFG_SITE_LANG):
"""
Retrun all loan requests that are pending. If request_id is not None,
cancel the request and then, return all loan requests that are pending.
@type request_id: integer.
@param request_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
@type print_data: string.
@param print_data: print requests information.
@return: list of pending requests (on shelf with hold).
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if print_data == 'true':
return print_pending_hold_requests_information(req, ln)
elif request_id:
# Cancel a request too.
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_CANCELLED,
request_id)
barcode = db.get_request_barcode(request_id)
update_requests_statuses(barcode)
result = db.get_loan_request_by_status(CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING)
else:
result = db.get_loan_request_by_status(CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_pending_requests(result=result, ln=ln)
return page(title=_("Items on shelf with holds"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_waiting_requests(req, request_id, print_data, ln=CFG_SITE_LANG):
"""
Get all loans requests that are waiting.
@type request_id: integer.
@param request_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
@type print_data: string.
@param print_data: print requests information.
@return: list of waiting requests (on loan with hold).
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if print_data == 'true':
return print_pending_hold_requests_information(req, ln)
elif request_id:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_CANCELLED,
request_id)
result = db.get_loan_request_by_status(CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING)
aux = ()
for request in result:
if db.get_nb_copies_on_loan(request[1]):
aux += request,
result = aux
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_waiting_requests(result=result, ln=ln)
return page(title=_("Items on loan with holds"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_expired_loans_with_waiting_requests(req, request_id, ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if request_id:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_CANCELLED,
request_id)
result = db.get_expired_loans_with_waiting_requests()
else:
result = db.get_expired_loans_with_waiting_requests()
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_get_expired_loans_with_waiting_requests(result=result,
ln=ln)
return page(title=_("Overdue loans with holds"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_loans_notes(req, loan_id, delete_key,
library_notes, back, ln=CFG_SITE_LANG):
"""
Get loan's note(s).
@type loan_id: integer.
@param loan_id: identify a loan. It is the primery key of the table
crcLOAN.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if delete_key and loan_id:
if looks_like_dictionary(db.get_loan_notes(loan_id)):
loans_notes = eval(db.get_loan_notes(loan_id))
if delete_key in loans_notes.keys():
del loans_notes[delete_key]
db.update_loan_notes(loan_id, loans_notes)
elif library_notes:
if db.get_loan_notes(loan_id):
if looks_like_dictionary(db.get_loan_notes(loan_id)):
loans_notes = eval(db.get_loan_notes(loan_id))
else:
loans_notes = {}
else:
loans_notes = {}
note_time = time.strftime("%Y-%m-%d %H:%M:%S")
if note_time not in loans_notes.keys():
loans_notes[note_time] = str(library_notes)
db.update_loan_notes(loan_id, loans_notes)
loans_notes = db.get_loan_notes(loan_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
referer = req.headers_in.get('referer')
body = bc_templates.tmpl_get_loans_notes(loans_notes=loans_notes,
loan_id=loan_id,
referer=referer, back=back,
ln=ln)
return page(title=_("Loan notes"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_item_loans_notes(req, loan_id, add_notes, new_note, ln=CFG_SITE_LANG):
"""
Get loan's notes.
@param loan_id: identify a loan. It is the primery key of the table
crcLOAN.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@param add_notes: display the textarea where will be written a new notes.
@param new_notes: note that will be added to the others library's notes.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if new_note:
date = '[' + time.ctime() + '] '
new_line = '\n'
new_note = date + new_note + new_line
db.add_new_loan_note(new_note, loan_id)
loans_notes = db.get_loan_notes(loan_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_loans_notes(loans_notes=loans_notes,
loan_id=loan_id,
add_notes=add_notes,
ln=ln)
return page(title=_("Loan notes"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
###
### Items and their copies' related .
###
def get_item_details(req, recid, ln=CFG_SITE_LANG):
"""
Display the details of an item.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@return: item details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
id_user = 1
infos = []
if recid == None:
infos.append(_("Record id not valid"))
copies = db.get_item_copies_details(recid)
requests = db.get_item_requests(recid)
loans = db.get_item_loans(recid)
purchases = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_NEW, recid)
req_hist_overview = db.get_item_requests_historical_overview(recid)
loans_hist_overview = db.get_item_loans_historical_overview(recid)
purchases_hist_overview = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_RECEIVED, recid)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_item_details(recid=recid,
copies=copies,
requests=requests,
loans=loans,
purchases=purchases,
req_hist_overview=req_hist_overview,
loans_hist_overview=loans_hist_overview,
purchases_hist_overview=purchases_hist_overview,
infos=infos,
ln=ln)
return page(title=_("Item details"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_item_requests_details(req, recid, request_id, ln=CFG_SITE_LANG):
"""
Display all requests for a specific item.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type request_id: integer.
@param request_id: identify the hold request. It is also the primary key
of the table crcLOANREQUEST.
@return: Item requests details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if request_id:
db.cancel_request(request_id)
update_request_data(request_id)
result = db.get_item_requests(recid)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_item_requests_details(result=result,
ln=ln)
return page(title=_("Hold requests") + \
" - %s" % (book_title_from_MARC(recid)),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_item_loans_details(req, recid, barcode, loan_id, force,
ln=CFG_SITE_LANG):
"""
Show all the details about all current loans related with a record.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type loan_id: integer.
@param loan_id: identify a loan. It is the primery key of the table
crcLOAN.
@type force: string.
@param force: force the renew of a loan, when usually this is not possible.
@return: item loans details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if loan_id and barcode and force == 'true':
new_due_date = renew_loan_for_X_days(barcode)
#db.update_due_date(loan_id, new_due_date)
db.renew_loan(loan_id, new_due_date)
update_status_if_expired(loan_id)
infos.append(_("Loan renewed with success."))
elif barcode:
recid = db.get_id_bibrec(barcode)
item_description = db.get_item_description(barcode)
queue = db.get_queue_request(recid, item_description)
new_due_date = renew_loan_for_X_days(barcode)
force_renew_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/get_item_loans_details',
{'barcode': barcode, 'loan_id': loan_id, 'force': 'true',
'recid': recid, 'ln': ln}, (_("Yes")))
no_renew_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/get_item_loans_details',
{'recid': recid, 'ln': ln},
(_("No")))
if len(queue) != 0:
title = book_title_from_MARC(recid)
message = _("Another user is waiting for this book %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s.") % {'x_title': title, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
message += '\n\n'
message += _("Do you want renew this loan anyway?")
message += '\n\n'
message += "[%s] [%s]" % (force_renew_link, no_renew_link)
infos.append(message)
else:
db.renew_loan(loan_id, new_due_date)
#db.update_due_date(loan_id, new_due_date)
update_status_if_expired(loan_id)
infos.append(_("Loan renewed with success."))
result = db.get_item_loans(recid)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_item_loans_details(result=result,
recid=recid,
infos=infos,
ln=ln)
return page(title=_("Loans details") + \
" - %s" % (book_title_from_MARC(int(recid))),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_item_req_historical_overview(req, recid, ln=CFG_SITE_LANG):
"""
Display the requests historical overview of an item.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@return: Item requests - historical overview.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
req_hist_overview = db.get_item_requests_historical_overview(recid)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_item_req_historical_overview(
req_hist_overview=req_hist_overview,
ln=ln)
return page(title=_("Requests") + " - " + _("historical overview"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_item_loans_historical_overview(req, recid, ln=CFG_SITE_LANG):
"""
Display the loans historical overview of an item.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@return: Item loans - historical overview.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
loans_hist_overview = db.get_item_loans_historical_overview(recid)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_get_item_loans_historical_overview(
loans_hist_overview=loans_hist_overview,
ln=ln)
return page(title=_("Loans") + " - " + _("historical overview"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_copy_step1(req, ln=CFG_SITE_LANG):
"""
Add a new copy.
"""
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_add_new_copy_step1(ln)
return page(title=_("Add new copy") + " - I",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_copy_step2(req, p, f, ln=CFG_SITE_LANG):
"""
Add a new copy.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
result = perform_request_search(cc="Books", sc="1", p=p, f=f)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_add_new_copy_step2(result=result, ln=ln)
return page(title=_("Add new copy") + " - II",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_copy_step3(req, recid, barcode, ln=CFG_SITE_LANG):
"""
Add a new copy.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
result = db.get_item_copies_details(recid)
libraries = db.get_internal_libraries()
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if barcode is not None:
if not db.barcode_in_use(barcode):
barcode = None
tmp_barcode = generate_tmp_barcode()
body = bc_templates.tmpl_add_new_copy_step3(recid=recid,
result=result,
libraries=libraries,
original_copy_barcode=barcode,
tmp_barcode=tmp_barcode,
infos=infos,
ln=ln)
return page(title=_("Add new copy") + " - III",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_copy_step4(req, barcode, library, location, collection, description,
loan_period, status, expected_arrival_date, recid,
ln=CFG_SITE_LANG):
"""
Add a new copy.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
infos = []
result = db.get_item_copies_details(recid)
libraries = db.get_internal_libraries()
if db.barcode_in_use(barcode):
infos.append(_("The given barcode <strong>%(x_name)s</strong> is already in use.", x_name=barcode))
title = _("Add new copy") + " - III"
body = bc_templates.tmpl_add_new_copy_step3(recid=recid,
result=result,
libraries=libraries,
original_copy_barcode=None,
tmp_barcode=None,
infos=infos,
ln=ln)
elif not barcode:
infos.append(_("The given barcode is empty."))
title = _("Add new copy") + " - III"
body = bc_templates.tmpl_add_new_copy_step3(recid=recid,
result=result,
libraries=libraries,
original_copy_barcode=None,
tmp_barcode=None,
infos=infos,
ln=ln)
elif barcode[:3] == 'tmp' \
and status in [CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF,
CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN,
CFG_BIBCIRCULATION_ITEM_STATUS_IN_PROCESS]:
infos.append(_("The status selected does not accept tamporary barcodes."))
title = _("Add new copy") + " - III"
tmp_barcode = generate_tmp_barcode()
body = bc_templates.tmpl_add_new_copy_step3(recid=recid,
result=result,
libraries=libraries,
original_copy_barcode=None,
tmp_barcode=tmp_barcode,
infos=infos,
ln=ln)
else:
library_name = db.get_library_name(library)
tup_infos = (barcode, library, library_name, location, collection,
description, loan_period, status, expected_arrival_date,
recid)
title = _("Add new copy") + " - IV"
body = bc_templates.tmpl_add_new_copy_step4(tup_infos=tup_infos, ln=ln)
return page(title=title,
uid=id_user,
req=req,
body=body,
metaheaderadd='<link rel="stylesheet" href="%s/vendors/jquery-ui/themes/themes/jquery-ui.min.css" '\
'type="text/css" />' % CFG_SITE_SECURE_URL,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_copy_step5(req, barcode, library, location, collection, description,
loan_period, status, expected_arrival_date, recid,
ln=CFG_SITE_LANG):
"""
Add a new copy.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if not db.barcode_in_use(barcode):
db.add_new_copy(barcode, recid, library, collection, location, description.strip() or '-',
loan_period, status, expected_arrival_date)
update_requests_statuses(barcode)
else:
infos.append(_("The given barcode <strong>%(x_name)s</strong> is already in use.", x_name=barcode))
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_add_new_copy_step5(infos=infos, recid=recid, ln=ln)
return page(title=_("Add new copy") + " - V",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def delete_copy_step1(req, barcode, ln):
#id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
barcode = barcode.strip('\'" \t')
recid = db.get_id_bibrec(barcode)
if recid:
#recid = recid[0]
infos.append(_("Do you really want to delete this copy of the book?"))
copies = db.get_item_copies_details(recid)
title = _("Delete copy")
body = bc_templates.tmpl_delete_copy_step1(barcode_to_delete=barcode,
recid=recid,
result=copies,
infos=infos,
ln=ln)
else:
message = _("""The barcode <strong>%(x_name)s</strong> was not found""", x_name=(barcode))
infos.append(message)
title = _("Item search")
body = bc_templates.tmpl_item_search(infos=infos, ln=ln)
return page(title=title,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def delete_copy_step2(req, barcode, ln):
#id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
barcode = barcode.strip('\'" \t')
recid = db.get_id_bibrec(barcode)
if recid:
#recid = recid[0]
if db.delete_copy(barcode)==1:
message = _("The copy with barcode <strong>%(x_name)s</strong> has been deleted.", x_name=barcode)
else:
message = _('It was NOT possible to delete the copy with barcode <strong>%(x_name)s</strong>', x_name=barcode)
infos.append(message)
copies = db.get_item_copies_details(recid)
requests = db.get_item_requests(recid)
loans = db.get_item_loans(recid)
purchases = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_NEW, recid)
req_hist_overview = db.get_item_requests_historical_overview(recid)
loans_hist_overview = db.get_item_loans_historical_overview(recid)
purchases_hist_overview = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_RECEIVED, recid)
title = _("Item details")
body = bc_templates.tmpl_get_item_details(
recid=recid, copies=copies,
requests=requests, loans=loans,
purchases=purchases,
req_hist_overview=req_hist_overview,
loans_hist_overview=loans_hist_overview,
purchases_hist_overview=purchases_hist_overview,
infos=infos, ln=ln)
else:
message = _("The barcode <strong>%(x_name)s</strong> was not found", x_name=barcode)
infos.append(message)
title = _("Item search")
body = bc_templates.tmpl_item_search(infos=infos, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=title,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_item_info_step1(req, ln=CFG_SITE_LANG):
"""
Update the item's information.
"""
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_update_item_info_step1(ln=ln)
return page(title=_("Update item information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_item_info_step2(req, p, f, ln=CFG_SITE_LANG):
"""
Update the item's information.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
result = perform_request_search(cc="Books", sc="1", p=p, f=f)
body = bc_templates.tmpl_update_item_info_step2(result=result, ln=ln)
return page(title="Update item information",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_item_info_step3(req, recid, ln=CFG_SITE_LANG):
"""
Update the item's information.
"""
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
result = db.get_item_copies_details(recid)
body = bc_templates.tmpl_update_item_info_step3(recid=recid, result=result,
ln=ln)
return page(title=_("Update item information"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_item_info_step4(req, barcode, ln=CFG_SITE_LANG):
"""
Update the item's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
recid = db.get_id_bibrec(barcode)
result = db.get_item_info(barcode)
libraries = db.get_internal_libraries()
libraries += db.get_hidden_libraries()
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if recid == None:
_ = gettext_set_language(ln)
infos = []
infos.append(_("Barcode <strong>%(x_name)s</strong> not found", x_name=barcode))
return item_search(req, infos, ln)
body = bc_templates.tmpl_update_item_info_step4(recid=recid,
result=result,
libraries=libraries,
ln=ln)
return page(title=_("Update item information"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_item_info_step5(req, barcode, old_barcode, library, location,
collection, description, loan_period, status,
expected_arrival_date, recid, ln=CFG_SITE_LANG):
"""
Update the item's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
library_name = db.get_library_name(library)
tup_infos = (barcode, old_barcode, library, library_name, location,
collection, description, loan_period, status,
expected_arrival_date, recid)
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_update_item_info_step5(tup_infos=tup_infos, ln=ln)
return page(title=_("Update item information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_item_info_step6(req, tup_infos, ln=CFG_SITE_LANG):
"""
Update the item's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
# tuple containing information for the update process.
(barcode, old_barcode, library_id, location, collection,
description, loan_period, status, expected_arrival_date, recid) = tup_infos
is_on_loan = db.is_on_loan(old_barcode)
#is_requested = db.is_requested(old_barcode)
# if item on loan and new status is CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF,
# item has to be returned.
if is_on_loan and status == CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF:
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF, old_barcode)
db.return_loan(old_barcode)
if not is_on_loan and status == CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN:
status = db.get_copy_details(barcode)[7]
infos.append(_("Item <strong>[%(x_name)s]</strong> updated, but the <strong>status was not modified</strong>.",x_name=old_barcode))
# update item information.
db.update_item_info(old_barcode, library_id, collection, location, description.strip(),
loan_period, status, expected_arrival_date)
update_requests_statuses(old_barcode)
navtrail_previous_links = '<a class="navtrail"' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if barcode != old_barcode:
if db.barcode_in_use(barcode):
infos.append(_("Item <strong>[%(x_name)s]</strong> updated, but the <strong>barcode was not modified</strong> because it is already in use.", x_name=old_barcode))
else:
if db.update_barcode(old_barcode, barcode):
infos.append(_("Item <strong>[%(x_name)s]</strong> updated to <strong>[%(x_new)s]</strong> with success.",
x_name=old_barcode, x_new=barcode))
else:
infos.append(_("Item <strong>[%(x_name)s]</strong> updated, but the <strong>barcode was not modified</strong> because it was not found (!?).", x_name=old_barcode))
copies = db.get_item_copies_details(recid)
requests = db.get_item_requests(recid)
loans = db.get_item_loans(recid)
purchases = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_NEW, recid)
req_hist_overview = db.get_item_requests_historical_overview(recid)
loans_hist_overview = db.get_item_loans_historical_overview(recid)
purchases_hist_overview = db.get_item_purchases(CFG_BIBCIRCULATION_ACQ_STATUS_RECEIVED, recid)
body = bc_templates.tmpl_get_item_details(recid=recid,
copies=copies,
requests=requests,
loans=loans,
purchases=purchases,
req_hist_overview=req_hist_overview,
loans_hist_overview=loans_hist_overview,
purchases_hist_overview=purchases_hist_overview,
infos=infos,
ln=ln)
return page(title=_("Update item information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return redirect_to_url(req, CFG_SITE_SECURE_URL +
"/record/edit/#state=edit&recid=" + str(recid))
def item_search(req, infos=[], ln=CFG_SITE_LANG):
"""
Display a form where is possible to searh for an item.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
_ = gettext_set_language(ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
body = bc_templates.tmpl_item_search(infos=infos, ln=ln)
return page(title=_("Item search"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def item_search_result(req, p, f, ln=CFG_SITE_LANG):
"""
Search an item and return a list with all the possible results. To retrieve
the information desired, we use the method 'perform_request_search' (from
search_engine.py). In the case of BibCirculation, we are just looking for
books (items) inside the collection 'Books'.
@type p: string
@param p: search pattern
@type f: string
@param f: search field
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if p == '':
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
return item_search(req, infos, ln)
if f == 'barcode':
p = p.strip('\'" \t')
recid = db.get_id_bibrec(p)
if recid is None:
infos.append(_('The barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s does not exist on BibCirculation database.') % {'x_barcode': p, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_item_search(infos=infos, ln=ln)
else:
return get_item_details(req, recid, ln=ln)
elif f == 'recid':
p = p.strip('\'" \t')
recid = p
if not record_exists(recid):
infos.append(_("Requested record does not seem to exist."))
body = bc_templates.tmpl_item_search(infos=infos, ln=ln)
else:
return get_item_details(req, recid, ln=ln)
else:
result = perform_request_search(cc="Books", sc="1", p=p, f=f)
body = bc_templates.tmpl_item_search_result(result=result, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=_("Item search result"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
###
### "Borrower" related templates
###
def get_borrower_details(req, borrower_id, update, ln=CFG_SITE_LANG):
"""
Display the details of a borrower.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if update and CFG_CERN_SITE:
update_user_info_from_ldap(borrower_id)
borrower = db.get_borrower_details(borrower_id)
if borrower == None:
info = _('Borrower not found.') + ' ' + _('Please, try again.')
return borrower_search(req, info, False, ln)
else:
requests = db.get_borrower_request_details(borrower_id)
loans = db.get_borrower_loan_details(borrower_id)
notes = db.get_borrower_notes(borrower_id)
ill = db.get_ill_requests_details(borrower_id)
proposals = db.get_proposal_requests_details(borrower_id)
req_hist = db.bor_requests_historical_overview(borrower_id)
loans_hist = db.bor_loans_historical_overview(borrower_id)
ill_hist = db.bor_ill_historical_overview(borrower_id)
proposal_hist = db.bor_proposal_historical_overview(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_borrower_details(borrower=borrower,
requests=requests,
loans=loans,
notes=notes,
ill=ill,
proposals=proposals,
req_hist=req_hist,
loans_hist=loans_hist,
ill_hist=ill_hist,
proposal_hist=proposal_hist,
ln=ln)
return page(title=_("Borrower details"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_borrower_step1(req, ln=CFG_SITE_LANG):
"""
Add new borrower. Step 1
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_add_new_borrower_step1(ln=ln)
return page(title=_("Add new borrower") + " - I",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_borrower_step2(req, name, email, phone, address, mailbox,
notes, ln=CFG_SITE_LANG):
"""
Add new borrower. Step 2.
@type name: string.
@type email: string.
@type phone: string.
@type address: string.
@type mailbox: string.
@type notes: string.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if name == '':
infos.append(_("Please, insert a name"))
if email == '':
infos.append(_("Please, insert a valid email address"))
else:
borrower_id = db.get_borrower_id_by_email(email)
if borrower_id is not None:
infos.append(_("There is already a borrower using the following email:")
+ " <strong>%s</strong>" % (email))
tup_infos = (name, email, phone, address, mailbox, notes)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if len(infos) > 0:
body = bc_templates.tmpl_add_new_borrower_step1(tup_infos=tup_infos,
infos=infos, ln=ln)
title = _("Add new borrower") + " - I"
else:
if notes != '':
borrower_notes = {}
note_time = time.strftime("%Y-%m-%d %H:%M:%S")
borrower_notes[note_time] = notes
else:
borrower_notes = ''
borrower_id = db.new_borrower(None, name, email, phone,
address, mailbox, borrower_notes)
return redirect_to_url(req,
'%s/admin2/bibcirculation/get_borrower_details?ln=%s&borrower_id=%s' \
% (CFG_SITE_SECURE_URL, ln, borrower_id))
#body = bc_templates.tmpl_add_new_borrower_step2(tup_infos=tup_infos,
# infos=infos, ln=ln)
#title = _("Add new borrower") + " - II"
return page(title=title,
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_borrower_step3(req, tup_infos, ln=CFG_SITE_LANG):
"""
Add new borrower. Step 3.
@type tup_infos: tuple.
@param tup_infos: tuple containing borrower information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if tup_infos[5] != '':
borrower_notes = {}
note_time = time.strftime("%Y-%m-%d %H:%M:%S")
borrower_notes[note_time] = str(tup_infos[5])
else:
borrower_notes = ''
db.new_borrower(None, tup_infos[0], tup_infos[1], tup_infos[2],
tup_infos[3], tup_infos[4], str(borrower_notes))
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_add_new_borrower_step3(ln=ln)
return page(title=_("Add new borrower") + " - III",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_borrower_info_step1(req, borrower_id, ln=CFG_SITE_LANG):
"""
Update the borrower's information.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
borrower_details = db.get_borrower_details(borrower_id)
tup_infos = (borrower_details[0], borrower_details[2], borrower_details[3],
borrower_details[4], borrower_details[5], borrower_details[6])
body = bc_templates.tmpl_update_borrower_info_step1(tup_infos=tup_infos,
ln=ln)
return page(title=_("Update borrower information"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_borrower_info_step2(req, borrower_id, name, email, phone, address,
mailbox, ln=CFG_SITE_LANG):
"""
Update the borrower's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if name == '':
infos.append(_("Please, insert a name"))
if email == '':
infos.append(_("Please, insert a valid email address"))
else:
borrower_email_id = db.get_borrower_id_by_email(email)
if borrower_email_id is not None and borrower_id != borrower_email_id:
infos.append(_("There is already a borrower using the following email:")
+ " <strong>%s</strong>" % (email))
tup_infos = (borrower_id, name, email, phone, address, mailbox)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if len(infos) > 0:
body = bc_templates.tmpl_update_borrower_info_step1(tup_infos=tup_infos,
infos=infos, ln=ln)
else:
db.update_borrower_info(borrower_id, name, email,
phone, address, mailbox)
return redirect_to_url(req,
'%s/admin2/bibcirculation/get_borrower_details?ln=%s&borrower_id=%s' \
% (CFG_SITE_SECURE_URL, ln, borrower_id))
return page(title=_("Update borrower information"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_borrower_requests_details(req, borrower_id, request_id,
ln=CFG_SITE_LANG):
"""
Display loans details of a borrower.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type request_id: integer.
@param request_id: identify the hold request to be cancelled
@return: borrower requests details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if request_id:
db.cancel_request(request_id)
update_request_data(request_id)
result = db.get_borrower_request_details(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
name = db.get_borrower_name(borrower_id)
title = _("Hold requests details") + " - %s" % (name)
body = bc_templates.tmpl_borrower_request_details(result=result,
borrower_id=borrower_id,
ln=ln)
return page(title=title,
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_borrower_loans_details(req, recid, barcode, borrower_id,
renewal, force, loan_id, ln=CFG_SITE_LANG):
"""
Show borrower's loans details.
@type recid: integer.
@param recid: identify the record. It is also the primary key of
the table bibrec.
@type barcode: string.
@param barcode: identify the item. It is the primary key of the table
crcITEM.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type renewal: string.
@param renewal: renew all loans.
@type force: string.
@param force: force the renew of a loan, when usually this is not possible.
@type loan_id: integer.
@param loan_id: identify a loan. It is the primery key of the table
crcLOAN.
@return: borrower loans details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
force_renew_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/get_borrower_loans_details',
{'barcode': barcode, 'borrower_id': borrower_id,
'loan_id': loan_id, 'force': 'true', 'ln': ln},
(_("Yes")))
no_renew_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/get_borrower_loans_details',
{'borrower_id': borrower_id, 'ln': ln},
(_("No")))
if barcode and loan_id and recid:
item_description = db.get_item_description(barcode)
queue = db.get_queue_request(recid, item_description)
new_due_date = renew_loan_for_X_days(barcode)
if len(queue) != 0:
title = book_title_from_MARC(recid)
message = _("Another user is waiting for this book %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s.") % {'x_title': title, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
message += '\n\n'
message += _("Do you want renew this loan anyway?")
message += '\n\n'
message += "[%s] [%s]" % (force_renew_link, no_renew_link)
infos.append(message)
else:
#db.update_due_date(loan_id, new_due_date)
db.renew_loan(loan_id, new_due_date)
#update_status_if_expired(loan_id)
infos.append(_("Loan renewed with success."))
elif loan_id and barcode and force == 'true':
new_due_date = renew_loan_for_X_days(barcode)
db.renew_loan(loan_id, new_due_date)
update_status_if_expired(loan_id)
infos.append(_("Loan renewed with success."))
elif borrower_id and renewal=='true':
list_of_loans = db.get_recid_borrower_loans(borrower_id)
for (loan_id, recid, barcode) in list_of_loans:
item_description = db.get_item_description(barcode)
queue = db.get_queue_request(recid, item_description)
new_due_date = renew_loan_for_X_days(barcode)
force_renewall_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/get_borrower_loans_details',
{'barcode': barcode, 'borrower_id': borrower_id,
'loan_id': loan_id, 'force': 'true', 'ln': ln},
(_("Yes")))
if len(queue) != 0:
title = book_title_from_MARC(recid)
message = _("Another user is waiting for this book %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s.") % {'x_title': title, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
message += '\n\n'
message += _("Do you want renew this loan anyway?")
message += '\n\n'
message += "[%s] [%s]" % (force_renewall_link, no_renew_link)
infos.append(message)
else:
db.renew_loan(loan_id, new_due_date)
update_status_if_expired(loan_id)
if infos == []:
infos.append(_("All loans renewed with success."))
borrower_loans = db.get_borrower_loan_details(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_borrower_loans_details(
borrower_loans=borrower_loans,
borrower_id=borrower_id,
infos=infos, ln=ln)
return page(title=_("Loans details") + \
" - %s" %(db.get_borrower_name(borrower_id)),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def bor_loans_historical_overview(req, borrower_id, ln=CFG_SITE_LANG):
"""
Display the loans historical overview of a borrower.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@return: borrower loans - historical overview.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
loans_hist_overview = db.bor_loans_historical_overview(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_bor_loans_historical_overview(
loans_hist_overview = loans_hist_overview,
ln=ln)
return page(title=_("Loans") + " - " + _("historical overview"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def bor_requests_historical_overview(req, borrower_id, ln=CFG_SITE_LANG):
"""
Display the requests historical overview of a borrower.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@return: borrower requests - historical overview.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
req_hist_overview = db.bor_requests_historical_overview(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_bor_requests_historical_overview(
req_hist_overview = req_hist_overview,
ln=ln)
return page(title=_("Requests") + " - " + _("historical overview"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_borrower_ill_details(req, borrower_id, request_type='', ln=CFG_SITE_LANG):
"""
Display ILL details of a borrower.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type ill_id: integer.
@param ill_id: identify the ILL request. It is also the primary key
of the table crcILLREQUEST.
@return: borrower ILL details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if request_type == 'proposal-book':
result = db.get_proposal_requests_details(borrower_id)
else:
result = db.get_ill_requests_details(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
name = db.get_borrower_name(borrower_id)
title = _("ILL details") + "- %s" % (name)
body = bc_templates.tmpl_borrower_ill_details(result=result,
borrower_id=borrower_id,
ln=ln)
return page(title=title,
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def bor_ill_historical_overview(req, borrower_id, request_type='', ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if request_type == 'proposal-book':
result = db.bor_proposal_historical_overview(borrower_id)
else:
result = db.bor_ill_historical_overview(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
name = db.get_borrower_name(borrower_id)
title = _("ILL historical overview") + " - %s" % (name)
body = bc_templates.tmpl_borrower_ill_details(result=result,
borrower_id=borrower_id,
ln=ln)
return page(title=title,
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def borrower_notification(req, borrower_id, template, message, load_msg_template,
subject, send_message, from_address, ln=CFG_SITE_LANG):
"""
Send an email to a borrower or simply load and display an editable email
template.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
@type borrower_email: string.
@param borrower_email: The librarian can change the email manually.
In that case, this value will be taken instead
of the that in borrower details.
@type template: string.
@param template: The name of the notification template to be loaded.
If the @param load_msg_template holds True, the
template is not loaded.
@type message: string.
@param message: Message to be sent if the flag @param send_message is set.
@type subject: string.
@param subject: Subject of the message.
@type from_address: string.
@param from_address: From address in the message sent.
@return: Display the email template or send an email to a borrower.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
email = db.get_borrower_email(borrower_id)
if load_msg_template == 'False' and template is not None:
# Do not load the template. It is the email body itself.
body = bc_templates.tmpl_borrower_notification(email=email,
subject=subject,
email_body=template,
borrower_id=borrower_id,
from_address=from_address,
ln=ln)
elif send_message:
send_email(fromaddr = from_address,
toaddr = email,
subject = subject,
content = message,
header = '',
footer = '',
attempt_times = 1,
attempt_sleeptime = 10
)
body = bc_templates.tmpl_send_notification(ln=ln)
else:
show_template = load_template(template)
body = bc_templates.tmpl_borrower_notification(email=email,
subject=subject,
email_body=show_template,
borrower_id=borrower_id,
from_address=from_address,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title="User Notification",
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_borrower_notes(req, borrower_id, delete_key, library_notes,
ln=CFG_SITE_LANG):
"""
Retrieve the notes of a borrower.
@type borrower_id: integer.
@param borrower_id: identify the borrower. It is also the primary key of
the table crcBORROWER.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if delete_key and borrower_id:
if looks_like_dictionary(db.get_borrower_notes(borrower_id)):
borrower_notes = eval(db.get_borrower_notes(borrower_id))
if delete_key in borrower_notes.keys():
del borrower_notes[delete_key]
db.update_borrower_notes(borrower_id, borrower_notes)
elif library_notes:
if db.get_borrower_notes(borrower_id):
if looks_like_dictionary(db.get_borrower_notes(borrower_id)):
borrower_notes = eval(db.get_borrower_notes(borrower_id))
else:
borrower_notes = {}
else:
borrower_notes = {}
note_time = time.strftime("%Y-%m-%d %H:%M:%S")
if note_time not in borrower_notes.keys():
borrower_notes[note_time] = str(library_notes)
db.update_borrower_notes(borrower_id, borrower_notes)
borrower_notes = db.get_borrower_notes(borrower_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_borrower_notes(borrower_notes=borrower_notes,
borrower_id=borrower_id,
ln=ln)
return page(title=_("Borrower notes"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def borrower_search(req, empty_barcode, redirect_to_new_request=False,
ln=CFG_SITE_LANG):
"""
Page (for administrator) where is it possible to search
for a borrower (who is on crcBORROWER table) using his/her name,
email, phone or id.
If redirect_to_new_request is False, the returned page will be "Borrower details"
If redirect_to_new_request is True, the returned page will be "New Request"
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if empty_barcode:
infos.append(empty_barcode)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_borrower_search(infos=infos,
redirect_to_new_request=redirect_to_new_request,
ln=ln)
if redirect_to_new_request:
title = _("New Request")
else:
title = _("Borrower Search")
return page(title=title,
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def borrower_search_result(req, column, string, redirect_to_new_request=False,
ln=CFG_SITE_LANG):
"""
Search a borrower and return a list with all the possible results.
@type column: string
@param column: identify the column, of the table crcBORROWER, that will be
considered during the search. Can be 'name', 'email' or 'id'.
@type string: string
@param string: string used for the search process.
If redirect_to_new_request is True, the returned page will be "Borrower details"
If redirect_to_new_request is False, the returned page will be "New Request"
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if string == '':
message = _('Empty string.') + ' ' + _('Please, try again.')
return borrower_search(req, message, redirect_to_new_request, ln)
else:
result = search_user(column, string)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
if len(result) == 1:
if redirect_to_new_request:
return create_new_request_step1(req, result[0][0])
else:
return get_borrower_details(req, result[0][0], False, ln)
#return create_new_request_step1(req, borrower_id, p, f, search, ln)
else:
body = bc_templates.tmpl_borrower_search_result(result=result,
redirect_to_new_request=redirect_to_new_request,
ln=ln)
return page(title=_("Borrower search result"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
###
### ILL/Purchase/Acquisition related functions.
### Naming of the methods is not intuitive. Should be improved
### and appropriate documentation added, when required.
### Also, methods could be refactored.
###
def register_ill_from_proposal(req, ill_request_id, bor_id=None, ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
book_info = db.get_ill_book_info(ill_request_id)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if looks_like_dictionary(book_info):
book_info = eval(book_info)
if not bor_id:
bid = db.get_ill_borrower(ill_request_id)
else:
bid = bor_id
if 'recid' in book_info and bid:
recid = book_info['recid']
if not db.has_loan_request(bid, recid, ill=1):
db.tag_requests_as_done(bid, recid=recid)
library_notes = {}
library_notes[time.strftime("%Y-%m-%d %H:%M:%S")] = \
_("This ILL has been created from a proposal.")
db.register_ill_from_proposal(ill_request_id,
bid, library_notes)
infos.append(_('An ILL has been created for the user.'))
else:
infos.append(_('An active ILL already exists for this user on this record.'))
else:
infos.append(_('Could not create an ILL from the proposal'))
else:
infos.append(_('Could not create an ILL from the proposal'))
ill_req = db.get_ill_requests(CFG_BIBCIRCULATION_ILL_STATUS_NEW)
body = bc_templates.tmpl_list_ill(ill_req, infos=infos, ln=ln)
return page(title=_("ILL requests"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
#return redirect_to_url(req,
# '%s/admin2/bibcirculation/list_proposal?status=%s' % \
# (CFG_SITE_SECURE_URL, CFG_BIBCIRCULATION_PROPOSAL_STATUS_PUT_ASIDE))
def register_ill_request_with_no_recid_step1(req, borrower_id,
ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_register_ill_request_with_no_recid_step1(
infos=infos,
borrower_id=borrower_id,
admin=True, ln=ln)
return page(title=_("Register ILL request"),
uid=id_user,
req=req,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.min.css\" type=\"text/css\" />" % CFG_SITE_SECURE_URL,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_request_with_no_recid_step2(req, title, authors, place,
publisher, year, edition, isbn, budget_code,
period_of_interest_from, period_of_interest_to,
additional_comments, only_edition, key, string,
borrower_id, ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
book_info = (title, authors, place, publisher, year, edition, isbn)
request_details = (budget_code, period_of_interest_from,
period_of_interest_to, additional_comments, only_edition)
if borrower_id in (None, '', 'None'):
body = None
if not key:
borrowers_list = None
elif not string:
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
borrowers_list = None
else:
if validate_date_format(period_of_interest_from) is False:
infos = []
infos.append(_("The period of interest %(x_strong_tag_open)sFrom: %(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': period_of_interest_from, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_register_ill_request_with_no_recid_step1(
infos=infos,
borrower_id=None,
admin=True,
ln=ln)
elif validate_date_format(period_of_interest_to) is False:
infos = []
infos.append(_("The period of interest %(x_strong_tag_open)sTo: %(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': period_of_interest_to, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_register_ill_request_with_no_recid_step1(
infos=infos,
ln=ln)
else:
result = search_user(key, string)
borrowers_list = []
if len(result) == 0:
infos.append(_("0 borrowers found."))
else:
for user in result:
borrower_data = db.get_borrower_data_by_id(user[0])
borrowers_list.append(borrower_data)
if body == None:
body = bc_templates.tmpl_register_ill_request_with_no_recid_step2(
book_info=book_info, request_details=request_details,
result=borrowers_list, key=key, string=string,
infos=infos, ln=ln)
else:
user_info = db.get_borrower_data_by_id(borrower_id)
return register_ill_request_with_no_recid_step3(req, title, authors,
place, publisher,year, edition,
isbn, user_info, budget_code,
period_of_interest_from,
period_of_interest_to,
additional_comments, only_edition,
ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=_("Register ILL request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_request_with_no_recid_step3(req, title, authors, place,
publisher, year, edition, isbn,
user_info, budget_code,
period_of_interest_from,
period_of_interest_to,
additional_comments,
only_edition, ln=CFG_SITE_LANG):
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
request_details = (budget_code, period_of_interest_from,
period_of_interest_to, additional_comments, only_edition)
book_info = (title, authors, place, publisher, year, edition, isbn)
if user_info is None:
return register_ill_request_with_no_recid_step2(req, title, authors,
place, publisher, year, edition, isbn, budget_code,
period_of_interest_from, period_of_interest_to,
additional_comments, only_edition, 'name', None,
None, ln)
else:
body = bc_templates.tmpl_register_ill_request_with_no_recid_step3(
book_info=book_info,
user_info=user_info,
request_details=request_details,
admin=True,
ln=ln)
return page(title=_("Register ILL request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_request_with_no_recid_step4(req, book_info, borrower_id,
request_details, ln):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
_ = gettext_set_language(ln)
(title, authors, place, publisher, year, edition, isbn) = book_info
#create_ill_record(book_info))
(budget_code, period_of_interest_from,
period_of_interest_to, library_notes, only_edition) = request_details
ill_request_notes = {}
if library_notes:
ill_request_notes[time.strftime("%Y-%m-%d %H:%M:%S")] = \
str(library_notes)
### budget_code ###
if db.get_borrower_data_by_id(borrower_id) == None:
_ = gettext_set_language(ln)
infos = []
infos.append(_("<strong>Request not registered:</strong> wrong borrower id"))
body = bc_templates.tmpl_register_ill_request_with_no_recid_step2(
book_info=book_info,
request_details=request_details, result=[],
key='name', string=None, infos=infos, ln=ln)
return page(title=_("Register ILL request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
else:
book_info = {'title': title, 'authors': authors, 'place': place,
'publisher': publisher,'year' : year, 'edition': edition,
'isbn' : isbn}
db.ill_register_request_on_desk(borrower_id, book_info,
period_of_interest_from,
period_of_interest_to,
CFG_BIBCIRCULATION_ILL_STATUS_NEW,
str(ill_request_notes),
only_edition, 'book', budget_code)
return list_ill_request(req, CFG_BIBCIRCULATION_ILL_STATUS_NEW, ln)
def register_ill_book_request(req, borrower_id, ln=CFG_SITE_LANG):
"""
Display a form where is possible to searh for an item.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
_ = gettext_set_language(ln)
infos = []
body = bc_templates.tmpl_register_ill_book_request(infos=infos,
borrower_id=borrower_id,
ln=ln)
return page(title=_("Register ILL Book request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_book_request_result(req, borrower_id, p, f, ln=CFG_SITE_LANG):
"""
Search an item and return a list with all the possible results. To retrieve
the information desired, we use the method 'perform_request_search' (from
search_engine.py). In the case of BibCirculation, we are just looking for
books (items) inside the collection 'Books'.
@type p: string
@param p: search pattern
@type f: string
@param f: search field
@return: list of recids
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if p == '':
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_register_ill_book_request(infos=infos,
borrower_id=borrower_id,
ln=ln)
else:
if f == 'barcode':
p = p.strip('\'" \t')
recid = db.get_id_bibrec(p)
if recid is None:
infos.append(_('The barcode %(x_strong_tag_open)s%(x_barcode)s%(x_strong_tag_close)s does not exist on BibCirculation database.') % {'x_barcode': p, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_register_ill_book_request(infos=infos,
borrower_id=borrower_id,
ln=ln)
else:
body = bc_templates.tmpl_register_ill_book_request_result(
result=[recid],
borrower_id=borrower_id,
ln=ln)
else:
result = perform_request_search(cc="Books", sc="1", p=p, f=f)
if len(result) == 0:
return register_ill_request_with_no_recid_step1(req,
borrower_id, ln)
else:
body = bc_templates.tmpl_register_ill_book_request_result(
result=result,
borrower_id=borrower_id,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=_("Register ILL Book request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_article_request_step1(req, ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">' \
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_register_ill_article_request_step1(infos=infos,
ln=ln)
return page(title=_("Register ILL Article request"),
uid=id_user,
req=req,
body=body,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.css\" type=\"text/css\" />"%(CFG_SITE_SECURE_URL),
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_article_request_step2(req, periodical_title, article_title,
author, report_number, volume, issue,
pages, year, budget_code, issn,
period_of_interest_from,
period_of_interest_to,
additional_comments, key, string,
ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if key and not string:
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
article_info = (periodical_title, article_title, author, report_number,
volume, issue, pages, year, issn)
request_details = (period_of_interest_from, period_of_interest_to,
budget_code, additional_comments)
body = bc_templates.tmpl_register_ill_article_request_step2(
article_info=article_info,
request_details=request_details,
result=None, key=key,
string=string, infos=infos,
ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=_("Register ILL request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
result = search_user(key, string)
borrowers_list = []
if len(result) == 0 and key:
if CFG_CERN_SITE:
infos.append(_("0 borrowers found.") + ' ' +_("Search by CCID."))
else:
new_borrower_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/add_new_borrower_step1',
{'ln': ln}, _("Register new borrower."))
message = _("0 borrowers found.") + ' ' + new_borrower_link
infos.append(message)
else:
for user in result:
borrower_data = db.get_borrower_data_by_id(user[0])
borrowers_list.append(borrower_data)
if validate_date_format(period_of_interest_from) is False:
infos = []
infos.append(_("The period of interest %(x_strong_tag_open)sFrom: %(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': period_of_interest_from, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_register_ill_article_request_step1(infos=infos,
ln=ln)
elif validate_date_format(period_of_interest_to) is False:
infos = []
infos.append(_("The period of interest %(x_strong_tag_open)sTo: %(x_date)s%(x_strong_tag_close)s is not a valid date or date format") % {'x_date': period_of_interest_to, 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'})
body = bc_templates.tmpl_register_ill_article_request_step1(infos=infos,
ln=ln)
else:
article_info = (periodical_title, article_title, author, report_number,
volume, issue, pages, year, issn)
request_details = (period_of_interest_from, period_of_interest_to,
budget_code, additional_comments)
body = bc_templates.tmpl_register_ill_article_request_step2(
article_info=article_info,
request_details=request_details,
result=borrowers_list,
key=key, string=string,
infos=infos, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return invenio.webpage.page(title=_("Register ILL request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_ill_article_request_step3(req, periodical_title, title, authors,
report_number, volume, issue,
page_number, year, issn, user_info,
request_details, ln=CFG_SITE_LANG):
#id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
#info = (title, authors, "", "", year, "", issn)
#create_ill_record(info)
item_info = {'periodical_title': periodical_title, 'title': title,
'authors': authors, 'place': "", 'publisher': "",
'year' : year, 'edition': "", 'issn' : issn,
'volume': volume, 'issue': issue, 'page': page_number }
(period_of_interest_from, period_of_interest_to, budget_code,
library_notes) = request_details
only_edition = ""
if user_info is None:
return register_ill_article_request_step2(req, periodical_title, title,
authors, report_number, volume, issue,
page_number, year, budget_code, issn,
period_of_interest_from,
period_of_interest_to,
library_notes, 'name', None, ln)
else:
borrower_id = user_info[0]
ill_request_notes = {}
if library_notes:
ill_request_notes[time.strftime("%Y-%m-%d %H:%M:%S")] \
= str(library_notes)
db.ill_register_request_on_desk(borrower_id, item_info,
period_of_interest_from,
period_of_interest_to,
CFG_BIBCIRCULATION_ILL_STATUS_NEW,
str(ill_request_notes),
only_edition, 'article', budget_code)
return list_ill_request(req, CFG_BIBCIRCULATION_ILL_STATUS_NEW, ln)
def register_purchase_request_step1(req, request_type, recid, title, authors,
place, publisher, year, edition, this_edition_only,
isbn, standard_number,
budget_code, cash, period_of_interest_from,
period_of_interest_to, additional_comments,
ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
if recid:
fields = (request_type, recid, budget_code, cash,
period_of_interest_from, period_of_interest_to,
additional_comments)
else:
fields = (request_type, title, authors, place, publisher, year, edition,
this_edition_only, isbn, standard_number, budget_code,
cash, period_of_interest_from, period_of_interest_to,
additional_comments)
body = bc_templates.tmpl_register_purchase_request_step1(infos=infos,
fields=fields, admin=True, ln=ln)
return page(title=_("Register purchase request"),
uid=id_user,
req=req,
body=body,
language=ln,
metaheaderadd='<link rel="stylesheet" ' \
'href="%s/vendors/jquery-ui/themes/redmond/jquery-ui.css" ' \
'type="text/css" />' % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_purchase_request_step2(req, request_type, recid, title, authors,
place, publisher, year, edition, this_edition_only,
isbn, standard_number,
budget_code, cash, period_of_interest_from,
period_of_interest_to, additional_comments,
p, f, ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
infos = []
if cash and budget_code == '':
budget_code = 'cash'
if recid:
fields = (request_type, recid, budget_code, cash,
period_of_interest_from, period_of_interest_to,
additional_comments)
else:
fields = (request_type, title, authors, place, publisher, year, edition,
this_edition_only, isbn, standard_number, budget_code,
cash, period_of_interest_from, period_of_interest_to,
additional_comments)
if budget_code == '' and not cash:
infos.append(_("Payment method information is mandatory. \
Please, type your budget code or tick the 'cash' checkbox."))
body = bc_templates.tmpl_register_purchase_request_step1(infos=infos,
fields=fields, admin=True, ln=ln)
else:
########################
########################
if p and not f:
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_register_purchase_request_step2(
infos=infos, fields=fields,
result=None, p=p, f=f, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=_("Register ILL request"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
result = search_user(f, p)
borrowers_list = []
if len(result) == 0 and f:
if CFG_CERN_SITE:
infos.append(_("0 borrowers found.") + ' ' +_("Search by CCID."))
else:
new_borrower_link = create_html_link(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/add_new_borrower_step1',
{'ln': ln}, _("Register new borrower."))
message = _("0 borrowers found.") + ' ' + new_borrower_link
infos.append(message)
else:
for user in result:
borrower_data = db.get_borrower_data_by_id(user[0])
borrowers_list.append(borrower_data)
body = bc_templates.tmpl_register_purchase_request_step2(
infos=infos, fields=fields,
result=borrowers_list, p=p,
f=f, ln=ln)
########################
########################
return page(title=_("Register purchase request"),
uid=id_user,
req=req,
body=body,
language=ln,
metaheaderadd='<link rel="stylesheet" ' \
'href="%s/vendors/jquery-ui/themes/redmond/jquery-ui.css" ' \
'type="text/css" />' % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def register_purchase_request_step3(req, request_type, recid, title, authors,
place, publisher, year, edition, this_edition_only,
isbn, standard_number,
budget_code, cash, period_of_interest_from,
period_of_interest_to, additional_comments,
borrower_id, ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
infos = []
if recid:
fields = (request_type, recid, budget_code, cash,
period_of_interest_from, period_of_interest_to,
additional_comments)
else:
fields = (request_type, title, authors, place, publisher, year, edition,
this_edition_only, isbn, standard_number, budget_code,
cash, period_of_interest_from, period_of_interest_to,
additional_comments)
if budget_code == '' and not cash:
infos.append(_("Payment method information is mandatory. \
Please, type your budget code or tick the 'cash' checkbox."))
body = bc_templates.tmpl_register_purchase_request_step1(infos=infos,
fields=fields, admin=True, ln=ln)
else:
if recid:
item_info = "{'recid': " + str(recid) + "}"
title = book_title_from_MARC(recid)
else:
item_info = {'title': title, 'authors': authors, 'place': place,
'publisher': publisher, 'year' : year, 'edition': edition,
'isbn' : isbn, 'standard_number': standard_number}
ill_request_notes = {}
if additional_comments:
ill_request_notes[time.strftime("%Y-%m-%d %H:%M:%S")] \
= str(additional_comments)
if cash and budget_code == '':
budget_code = 'cash'
if borrower_id:
borrower_email = db.get_borrower_email(borrower_id)
else:
borrower_email = db.get_invenio_user_email(id_user)
borrower_id = db.get_borrower_id_by_email(borrower_email)
db.ill_register_request_on_desk(borrower_id, item_info,
period_of_interest_from,
period_of_interest_to,
CFG_BIBCIRCULATION_ACQ_STATUS_NEW,
str(ill_request_notes),
this_edition_only, request_type, budget_code)
msg_for_user = load_template('purchase_notification') % title
send_email(fromaddr = CFG_BIBCIRCULATION_ILLS_EMAIL,
toaddr = borrower_email,
subject = _("Your book purchase request"),
header = '', footer = '',
content = msg_for_user,
attempt_times=1,
attempt_sleeptime=10
)
return redirect_to_url(req,
'%s/admin2/bibcirculation/list_purchase?ln=%s&status=%s' % \
(CFG_SITE_SECURE_URL, ln,
CFG_BIBCIRCULATION_ACQ_STATUS_NEW))
return page(title=_("Register purchase request"),
uid=id_user,
req=req,
body=body,
language=ln,
metaheaderadd='<link rel="stylesheet" ' \
'href="%s/vendors/jquery-ui/themes/redmond/jquery-ui.css" ' \
'type="text/css" />' % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def ill_request_details_step1(req, delete_key, ill_request_id, new_status,
ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if delete_key and ill_request_id:
if looks_like_dictionary(db.get_ill_request_notes(ill_request_id)):
library_notes = eval(db.get_ill_request_notes(ill_request_id))
if delete_key in library_notes.keys():
del library_notes[delete_key]
db.update_ill_request_notes(ill_request_id, library_notes)
if new_status:
db.update_ill_request_status(ill_request_id, new_status)
ill_request_borrower_details = \
db.get_ill_request_borrower_details(ill_request_id)
if ill_request_borrower_details is None \
or len(ill_request_borrower_details) == 0:
infos.append(_("Borrower request details not found."))
ill_request_details = db.get_ill_request_details(ill_request_id)
if ill_request_details is None or len(ill_request_details) == 0:
infos.append(_("Request not found."))
libraries = db.get_external_libraries()
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
title = _("ILL request details")
if infos == []:
body = bc_templates.tmpl_ill_request_details_step1(
ill_request_id=ill_request_id,
ill_request_details=ill_request_details,
libraries=libraries,
ill_request_borrower_details=ill_request_borrower_details,
ln=ln)
else:
body = bc_templates.tmpl_display_infos(infos, ln)
return page(title=title,
uid=id_user,
req=req,
metaheaderadd='<link rel="stylesheet" ' \
'href="%s/vendors/jquery-ui/themes/redmond/jquery-ui.css" ' \
'type="text/css" />' % CFG_SITE_SECURE_URL,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def ill_request_details_step2(req, delete_key, ill_request_id, new_status,
library_id, request_date, expected_date,
arrival_date, due_date, return_date,
cost, _currency, barcode, library_notes,
book_info, article_info, ln=CFG_SITE_LANG):
#id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if delete_key and ill_request_id:
if looks_like_dictionary(db.get_ill_request_notes(ill_request_id)):
library_previous_notes = eval(db.get_ill_request_notes(ill_request_id))
if delete_key in library_previous_notes.keys():
del library_previous_notes[delete_key]
db.update_ill_request_notes(ill_request_id, library_previous_notes)
if db.get_ill_request_notes(ill_request_id):
if looks_like_dictionary(db.get_ill_request_notes(ill_request_id)):
library_previous_notes = eval(db.get_ill_request_notes(ill_request_id))
else:
library_previous_notes = {}
else:
library_previous_notes = {}
if library_notes:
library_previous_notes[time.strftime("%Y-%m-%d %H:%M:%S")] = \
str(library_notes)
if new_status == CFG_BIBCIRCULATION_LOAN_STATUS_RETURNED:
borrower_id = db.get_ill_borrower(ill_request_id)
barcode = db.get_ill_barcode(ill_request_id)
db.update_ill_loan_status(borrower_id, barcode, return_date, 'ill')
db.update_ill_request(ill_request_id, library_id, request_date,
expected_date, arrival_date, due_date, return_date,
new_status, cost, barcode,
str(library_previous_notes))
request_type = db.get_ill_request_type(ill_request_id)
if request_type == 'book':
item_info = book_info
else:
item_info = article_info
db.update_ill_request_item_info(ill_request_id, item_info)
if new_status == CFG_BIBCIRCULATION_ILL_STATUS_ON_LOAN:
# Redirect to an email template when the ILL 'book' arrives
# (Not for articles.)
subject = _("ILL received: ")
book_info = db.get_ill_book_info(ill_request_id)
if looks_like_dictionary(book_info):
book_info = eval(book_info)
if 'recid' in book_info:
subject += "'" + book_title_from_MARC(int(book_info['recid'])) + "'"
bid = db.get_ill_borrower(ill_request_id)
msg = load_template("ill_received")
return redirect_to_url(req,
create_url(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/borrower_notification',
{'borrower_id': bid,
'subject': subject,
'load_msg_template': False,
'template': msg,
'from_address': CFG_BIBCIRCULATION_ILLS_EMAIL
}
)
)
return list_ill_request(req, new_status, ln)
def purchase_details_step1(req, delete_key, ill_request_id, new_status,
ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
if delete_key and ill_request_id:
if looks_like_dictionary(db.get_ill_request_notes(ill_request_id)):
library_notes = eval(db.get_ill_request_notes(ill_request_id))
if delete_key in library_notes.keys():
del library_notes[delete_key]
db.update_ill_request_notes(ill_request_id, library_notes)
if new_status:
db.update_ill_request_status(ill_request_id, new_status)
ill_request_borrower_details = \
db.get_purchase_request_borrower_details(ill_request_id)
if ill_request_borrower_details is None \
or len(ill_request_borrower_details) == 0:
infos.append(_("Borrower request details not found."))
ill_request_details = db.get_ill_request_details(ill_request_id)
if ill_request_details is None or len(ill_request_details) == 0:
infos.append(_("Request not found."))
vendors = db.get_all_vendors()
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if infos == []:
body = bc_templates.tmpl_purchase_details_step1(
ill_request_id=ill_request_id,
ill_request_details=ill_request_details,
libraries=vendors,
ill_request_borrower_details=ill_request_borrower_details,
ln=ln)
title = _("Purchase details")
else:
body = bc_templates.tmpl_display_infos(infos, ln)
return page(title=title,
uid=id_user,
req=req,
metaheaderadd = "<link rel=\"stylesheet\" href=\"%s/vendors/jquery-ui/themes/redmond/jquery-ui.css\" type=\"text/css\" />" % CFG_SITE_SECURE_URL,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def purchase_details_step2(req, delete_key, ill_request_id, new_status,
library_id, request_date, expected_date,
arrival_date, due_date, return_date,
cost, budget_code, library_notes,
item_info, ln=CFG_SITE_LANG):
#id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if delete_key and ill_request_id:
if looks_like_dictionary(db.get_ill_request_notes(ill_request_id)):
library_previous_notes = eval(db.get_ill_request_notes(ill_request_id))
if delete_key in library_previous_notes.keys():
del library_previous_notes[delete_key]
db.update_ill_request_notes(ill_request_id, library_previous_notes)
if db.get_ill_request_notes(ill_request_id):
if looks_like_dictionary(db.get_ill_request_notes(ill_request_id)):
library_previous_notes = eval(db.get_ill_request_notes(ill_request_id))
else:
library_previous_notes = {}
else:
library_previous_notes = {}
if library_notes:
library_previous_notes[time.strftime("%Y-%m-%d %H:%M:%S")] = \
str(library_notes)
if new_status == CFG_BIBCIRCULATION_LOAN_STATUS_RETURNED:
borrower_id = db.get_ill_borrower(ill_request_id)
db.update_purchase_request(ill_request_id, library_id, request_date,
expected_date, arrival_date, due_date, return_date,
new_status, cost, budget_code,
str(library_previous_notes))
request_type = db.get_ill_request_type(ill_request_id)
if request_type not in CFG_BIBCIRCULATION_PROPOSAL_TYPE:
db.update_ill_request_item_info(ill_request_id, item_info)
if new_status in (CFG_BIBCIRCULATION_PROPOSAL_STATUS_ON_ORDER,
CFG_BIBCIRCULATION_PROPOSAL_STATUS_PUT_ASIDE):
barcode = db.get_ill_barcode(ill_request_id)
if new_status == CFG_BIBCIRCULATION_PROPOSAL_STATUS_ON_ORDER:
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_ORDER, barcode)
subject = _("Book suggestion accepted: ")
template = "proposal_acceptance"
else:
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_UNDER_REVIEW, barcode)
subject = _("Book suggestion refused: ")
template = "proposal_refusal"
book_info = db.get_ill_book_info(ill_request_id)
if looks_like_dictionary(book_info):
book_info = eval(book_info)
if 'recid' in book_info:
bid = db.get_ill_borrower(ill_request_id)
if db.has_loan_request(bid, book_info['recid']):
subject += "'" + book_title_from_MARC(int(book_info['recid'])) + "'"
return redirect_to_url(req,
create_url(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/borrower_notification',
{'borrower_id': bid,
'subject': subject,
'template': template,
'from_address': CFG_BIBCIRCULATION_ILLS_EMAIL
}
)
)
if new_status == CFG_BIBCIRCULATION_PROPOSAL_STATUS_RECEIVED:
barcode = db.get_ill_barcode(ill_request_id)
# Reset the item description to the default value.
db.set_item_description(barcode, '-')
#db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_IN_PROCESS, barcode)
borrower_id = db.get_ill_borrower(ill_request_id)
recid = db.get_id_bibrec(barcode)
if db.has_loan_request(borrower_id, recid):
#If an ILL has already been created(After the book had been put aside), there
#would be no waiting request by the proposer.
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING,
barcode=barcode,
borrower_id=borrower_id)
return redirect_to_url(req,
'%s/admin2/bibcirculation/update_item_info_step4?barcode=%s' % \
(CFG_SITE_SECURE_URL, barcode))
if new_status == CFG_BIBCIRCULATION_ACQ_STATUS_RECEIVED:
subject = _("Purchase received: ")
book_info = db.get_ill_book_info(ill_request_id)
if looks_like_dictionary(book_info):
book_info = eval(book_info)
if 'recid' in book_info:
subject += "'" + book_title_from_MARC(int(book_info['recid'])) + "'"
bid = db.get_ill_borrower(ill_request_id)
if budget_code == 'cash':
msg = load_template("purchase_received_cash") % cost
else:
msg = load_template("purchase_received_tid") % cost
return redirect_to_url(req,
create_url(CFG_SITE_SECURE_URL +
'/admin2/bibcirculation/borrower_notification',
{'borrower_id': bid,
'subject': subject,
'load_msg_template': False,
'template': msg,
'from_address': CFG_BIBCIRCULATION_ILLS_EMAIL
}
)
)
if new_status in CFG_BIBCIRCULATION_ACQ_STATUS or \
new_status == CFG_BIBCIRCULATION_PROPOSAL_STATUS_ON_ORDER:
# The items 'on order' whether for acquisition for the library or purchase
# on behalf of the user are displayed in the same list.
return redirect_to_url(req,
'%s/admin2/bibcirculation/list_purchase?ln=%s&status=%s' % \
(CFG_SITE_SECURE_URL, ln, new_status))
else:
return redirect_to_url(req,
'%s/admin2/bibcirculation/list_proposal?ln=%s&status=%s' % \
(CFG_SITE_SECURE_URL, ln, new_status))
def get_ill_library_notes(req, ill_id, delete_key, library_notes,
ln=CFG_SITE_LANG):
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if delete_key and ill_id:
if looks_like_dictionary(db.get_ill_notes(ill_id)):
ill_notes = eval(db.get_ill_notes(ill_id))
if delete_key in ill_notes.keys():
del ill_notes[delete_key]
db.update_ill_notes(ill_id, ill_notes)
elif library_notes:
if db.get_ill_notes(ill_id):
if looks_like_dictionary(db.get_ill_notes(ill_id)):
ill_notes = eval(db.get_ill_notes(ill_id))
else:
ill_notes = {}
else:
ill_notes = {}
ill_notes[time.strftime("%Y-%m-%d %H:%M:%S")] = str(library_notes)
db.update_ill_notes(ill_id, ill_notes)
ill_notes = db.get_ill_notes(ill_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_ill_notes(ill_notes=ill_notes,
ill_id=ill_id,
ln=ln)
return page(title=_("ILL notes"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def list_ill_request(req, status, ln=CFG_SITE_LANG):
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
ill_req = db.get_ill_requests(status)
body = bc_templates.tmpl_list_ill(ill_req=ill_req, ln=ln)
return page(title=_("List of ILL requests"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def list_purchase(req, status, recid=None, ln=CFG_SITE_LANG):
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if recid:
# Purchases of a particular item to be displayed in the item info page.
purchase_reqs = db.get_item_purchases(status, recid)
else:
purchase_reqs = db.get_purchases(status)
body = bc_templates.tmpl_list_purchase(purchase_reqs, ln=ln)
return page(title=_("List of purchase requests"),
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def list_proposal(req, status, ln=CFG_SITE_LANG):
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if status == "requests-putaside":
requests = db.get_requests_on_put_aside_proposals()
body = bc_templates.tmpl_list_requests_on_put_aside_proposals(requests, ln=ln)
title=_("List of requests on put aside proposals")
else:
proposals = db.get_proposals(status)
body = bc_templates.tmpl_list_proposal(proposals, ln=ln)
title=_("List of proposals")
return page(title=title,
uid=id_user,
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def ill_search(req, ln=CFG_SITE_LANG):
infos = []
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_ill_search(infos=infos, ln=ln)
return page(title=_("ILL search"),
uid=id_user,
req=req,
body=body,
language=ln,
metaheaderadd='<link rel="stylesheet" href="%s/vendors/jquery-ui/themes/redmond/jquery-ui.min.css" '\
'type="text/css" />' % CFG_SITE_SECURE_URL,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def ill_search_result(req, p, f, date_from, date_to, ln):
"""
Search an item and return a list with all the possible results. To retrieve
the information desired, we use the method 'perform_request_search' (from
search_engine.py). In the case of BibCirculation, we are just looking for
books (items) inside the collection 'Books'.
@type p: string
@param p: search pattern
@type f: string
@param f: search field
@return: list of recids
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
#id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if not has_date_format(date_from):
date_from = '0000-00-00'
if not has_date_format(date_to):
date_to = '9999-12-31'
if f == 'title':
ill_req = db.search_ill_requests_title(p, date_from, date_to)
body = bc_templates.tmpl_list_ill(ill_req=ill_req, ln=ln)
elif f == 'ILL_request_ID':
ill_req = db.search_ill_requests_id(p, date_from, date_to)
body = bc_templates.tmpl_list_ill(ill_req=ill_req, ln=ln)
elif f == 'cost':
purchase_reqs = db.search_requests_cost(p, date_from, date_to)
body = bc_templates.tmpl_list_purchase(purchase_reqs=purchase_reqs, ln=ln)
elif f == 'notes':
purchase_reqs = db.search_requests_notes(p, date_from, date_to)
body = bc_templates.tmpl_list_purchase(purchase_reqs=purchase_reqs, ln=ln)
return page(title=_("List of ILL requests"),
req=req,
body=body,
language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
###
### "Library" related templates ###
###
def get_library_details(req, library_id, ln=CFG_SITE_LANG):
"""
Display the details of a library.
@type library_id: integer.
@param library_id: identify the library. It is also the primary key of
the table crcLIBRARY.
@return: library details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail" ' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
library_details = db.get_library_details(library_id)
if library_details is None:
_ = gettext_set_language(ln)
infos = []
infos.append(_('Library ID not found.'))
return search_library_step1(req, infos, ln)
library_items = db.get_library_items(library_id)
body = bc_templates.tmpl_library_details(library_details=library_details,
library_items=library_items,
ln=ln)
return page(title=_("Library details"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def merge_libraries_step1(req, library_id, f=None, p=None, ln=CFG_SITE_LANG):
"""
Step 1/3 of library merging procedure
@param library_id: ID of the library to be deleted
@param p: search pattern.
@param f: field
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail" ' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
library_details = db.get_library_details(library_id)
library_items = db.get_library_items(library_id)
result = None
if f is not None:
if p in (None, '', '*'):
result = db.get_all_libraries() #list of (id, name)
elif f == 'name':
result = db.search_library_by_name(p)
elif f == 'email':
result = db.search_library_by_email(p)
body = bc_templates.tmpl_merge_libraries_step1(
library_details=library_details,
library_items=library_items,
result=result,
p=p,
ln=ln)
return page(title=_("Merge libraries"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def merge_libraries_step2(req, library_from, library_to, ln=CFG_SITE_LANG):
"""
Step 2/3 of library merging procedure
Confirm the libraries selected
@param library_from: ID of the library to be deleted
@param library_to: ID of the resulting library
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail" ' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
library_from_details = db.get_library_details(library_from)
library_from_items = db.get_library_items(library_from)
library_to_details = db.get_library_details(library_to)
library_to_items = db.get_library_items(library_to)
body = bc_templates.tmpl_merge_libraries_step2(
library_from_details=library_from_details,
library_from_items=library_from_items,
library_to_details=library_to_details,
library_to_items=library_to_items,
ln=ln)
return page(title=_("Merge libraries"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def merge_libraries_step3(req, library_from, library_to, ln=CFG_SITE_LANG):
"""
Step 3/3 of library merging procedure
Perform the merge and display the details of the resulting library
@param library_from: ID of the library to be deleted
@param library_to: ID of the resulting library
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
db.merge_libraries(library_from, library_to)
return get_library_details(req, library_to, ln)
def add_new_library_step1(req, ln=CFG_SITE_LANG):
"""
Add a new Library.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_add_new_library_step1(ln=ln)
return page(title=_("Add new library"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_library_step2(req, name, email, phone, address,
lib_type, notes, ln=CFG_SITE_LANG):
"""
Add a new Library.
"""
tup_infos = (name, email, phone, address, lib_type, notes)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
_ = gettext_set_language(ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
body = bc_templates.tmpl_add_new_library_step2(tup_infos=tup_infos, ln=ln)
return page(title=_("Add new library"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_library_step3(req, name, email, phone, address,
lib_type, notes, ln=CFG_SITE_LANG):
"""
Add a new Library.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
db.add_new_library(name, email, phone, address, lib_type, notes)
body = bc_templates.tmpl_add_new_library_step3(ln=ln)
return page(title=_("Add new library"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_library_info_step1(req, ln=CFG_SITE_LANG):
"""
Update the library's information.
"""
infos = []
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_update_library_info_step1(infos=infos, ln=ln)
return page(title=_("Update library information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_library_info_step2(req, column, string, ln=CFG_SITE_LANG):
"""
Update the library's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if not string:
infos = []
infos.append(_("Empty string.") + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_update_library_info_step1(infos=infos, ln=ln)
elif string == '*':
result = db.get_all_libraries()
body = bc_templates.tmpl_update_library_info_step2(result=result, ln=ln)
else:
if column == 'name':
result = db.search_library_by_name(string)
else:
result = db.search_library_by_email(string)
body = bc_templates.tmpl_update_library_info_step2(result=result, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("Update library information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_library_info_step3(req, library_id, ln=CFG_SITE_LANG):
"""
Update the library's information.
library_id - identify the library. It is also the primary key of
the table crcLIBRARY.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
library_info = db.get_library_details(library_id)
body = bc_templates.tmpl_update_library_info_step3(
library_info=library_info,
ln=ln)
return page(title=_("Update library information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_library_info_step4(req, name, email, phone, address, lib_type,
library_id, ln=CFG_SITE_LANG):
"""
Update the library's information.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
tup_infos = (library_id, name, email, phone, address, lib_type)
body = bc_templates.tmpl_update_library_info_step4(tup_infos=tup_infos,
ln=ln)
return page(title=_("Update library information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_library_info_step5(req, name, email, phone, address, lib_type,
library_id, ln=CFG_SITE_LANG):
"""
Update the library's information.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
#(library_id, name, email, phone, address) = tup_infos
db.update_library_info(library_id, name, email, phone, address, lib_type)
body = bc_templates.tmpl_update_library_info_step5(ln=ln)
return page(title=_("Update library information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_library_notes(req, library_id, delete_key,
library_notes, ln=CFG_SITE_LANG):
"""
Retrieve notes related with a library.
library_id - identify the library. It is also the primary key of
the table crcLIBRARY.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if delete_key and library_id:
if looks_like_dictionary(db.get_library_notes(library_id)):
lib_notes = eval(db.get_library_notes(library_id))
if delete_key in lib_notes.keys():
del lib_notes[delete_key]
db.update_library_notes(library_id, lib_notes)
elif library_notes:
if db.get_library_notes(library_id):
if looks_like_dictionary(db.get_library_notes(library_id)):
lib_notes = eval(db.get_library_notes(library_id))
else:
lib_notes = {}
else:
lib_notes = {}
lib_notes[time.strftime("%Y-%m-%d %H:%M:%S")] = str(library_notes)
db.update_library_notes(library_id, lib_notes)
lib_notes = db.get_library_notes(library_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
body = bc_templates.tmpl_library_notes(library_notes=lib_notes,
library_id=library_id,
ln=ln)
return page(title=_("Library notes"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def search_library_step1(req, infos=[], ln=CFG_SITE_LANG):
"""
Display the form where we can search a library (by name or email).
"""
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_search_library_step1(infos=infos,
ln=ln)
return page(title=_("Search library"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def search_library_step2(req, column, string, ln=CFG_SITE_LANG):
"""
Search a library and return a list with all the possible results, using the
parameters received from the previous step.
column - identify the column, of the table crcLIBRARY, that will be
considered during the search. Can be 'name' or 'email'.
str - string used for the search process.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if not string:
infos = []
infos.append(_("Emptry string.") + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_search_library_step1(infos=infos, ln=ln)
elif string == '*':
result = db.get_all_libraries()
body = bc_templates.tmpl_search_library_step2(result=result, ln=ln)
else:
if column == 'name':
result = db.search_library_by_name(string)
else:
result = db.search_library_by_email(string)
body = bc_templates.tmpl_search_library_step2(result=result, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a> > <a class="navtrail" ' \
'href="%s/admin2/bibcirculation/loan_on_desk_step1?ln=%s">'\
'Circulation Management' \
'</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, ln)
return page(title=_("Search library"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
###
### "Vendor" related templates ###
###
def get_vendor_details(req, vendor_id, ln=CFG_SITE_LANG):
"""
Display the details of a vendor.
@type vendor_id: integer.
@param vendor_id: identify the vendor. It is also the primary key of
the table crcVENDOR.
@return: vendor details.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
vendor_details = db.get_vendor_details(vendor_id)
navtrail_previous_links = '<a class="navtrail" ' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_vendor_details(vendor_details=vendor_details,
ln=ln)
return page(title=_("Vendor details"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_vendor_step1(req, ln=CFG_SITE_LANG):
"""
Add a new Vendor.
"""
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
body = bc_templates.tmpl_add_new_vendor_step1(ln=ln)
return page(title=_("Add new vendor"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_vendor_step2(req, name, email, phone, address,
notes, ln=CFG_SITE_LANG):
"""
Add a new Vendor.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
tup_infos = (name, email, phone, address, notes)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_add_new_vendor_step2(tup_infos=tup_infos, ln=ln)
return page(title=_("Add new vendor"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def add_new_vendor_step3(req, name, email, phone, address,
notes, ln=CFG_SITE_LANG):
"""
Add a new Vendor.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
db.add_new_vendor(name, email, phone, address, notes)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_add_new_vendor_step3(ln=ln)
return page(title=_("Add new vendor"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_vendor_info_step1(req, ln=CFG_SITE_LANG):
"""
Update the vendor's information.
"""
infos = []
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
_ = gettext_set_language(ln)
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
body = bc_templates.tmpl_update_vendor_info_step1(infos=infos, ln=ln)
return page(title=_("Update vendor information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_vendor_info_step2(req, column, string, ln=CFG_SITE_LANG):
"""
Update the vendor's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if not string:
infos = []
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_update_vendor_info_step1(infos=infos, ln=ln)
elif string == '*':
result = db.get_all_vendors()
body = bc_templates.tmpl_update_vendor_info_step2(result=result, ln=ln)
else:
if column == 'name':
result = db.search_vendor_by_name(string)
else:
result = db.search_vendor_by_email(string)
body = bc_templates.tmpl_update_vendor_info_step2(result=result, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_update_vendor_info_step2(result=result, ln=ln)
return page(title=_("Update vendor information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_vendor_info_step3(req, vendor_id, ln=CFG_SITE_LANG):
"""
Update the library's information.
vendor_id - identify the vendor. It is also the primary key of
the table crcVENDOR.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
vendor_info = db.get_vendor_details(vendor_id)
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_update_vendor_info_step3(vendor_info=vendor_info,
ln=ln)
return page(title=_("Update vendor information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_vendor_info_step4(req, name, email, phone, address,
vendor_id, ln=CFG_SITE_LANG):
"""
Update the vendor's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
tup_infos = (vendor_id, name, email, phone, address)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_update_vendor_info_step4(tup_infos=tup_infos,
ln=ln)
return page(title=_("Update vendor information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def update_vendor_info_step5(req, name, email, phone, address,
vendor_id, ln=CFG_SITE_LANG):
"""
Update the library's information.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
db.update_vendor_info(vendor_id, name, email, phone, address)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_update_vendor_info_step5(ln=ln)
return page(title=_("Update vendor information"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def get_vendor_notes(req, vendor_id, add_notes, new_note, ln=CFG_SITE_LANG):
"""
Retrieve notes related with a vendor.
vendor_id - identify the vendor. It is also the primary key of
the table crcVENDOR.
@param add_notes: display the textarea where will be written a new notes.
@param new_notes: note that will be added to the others vendor's notes.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if new_note:
date = '[' + time.ctime() + '] '
new_line = '\n'
new_note = date + new_note + new_line
db.add_new_vendor_note(new_note, vendor_id)
vendor_notes = db.get_vendor_notes(vendor_id)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_vendor_notes(vendor_notes=vendor_notes,
vendor_id=vendor_id,
add_notes=add_notes,
ln=ln)
return page(title=_("Vendor notes"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def search_vendor_step1(req, ln=CFG_SITE_LANG):
"""
Display the form where we can search a vendor (by name or email).
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
infos = []
navtrail_previous_links = '<a class="navtrail"' \
' href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
body = bc_templates.tmpl_search_vendor_step1(infos=infos,
ln=ln)
return page(title=_("Search vendor"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def search_vendor_step2(req, column, string, ln=CFG_SITE_LANG):
"""
Search a vendor and return a list with all the possible results, using the
parameters received from the previous step.
column - identify the column, of the table crcVENDOR, that will be
considered during the search. Can be 'name' or 'email'.
str - string used for the search process.
"""
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
_ = gettext_set_language(ln)
if not string:
infos = []
infos.append(_('Empty string.') + ' ' + _('Please, try again.'))
body = bc_templates.tmpl_search_vendor_step1(infos=infos,
ln=ln)
elif string == '*':
result = db.get_all_vendors()
body = bc_templates.tmpl_search_vendor_step2(result=result, ln=ln)
else:
if column == 'name':
result = db.search_vendor_by_name(string)
else:
result = db.search_vendor_by_email(string)
body = bc_templates.tmpl_search_vendor_step2(result=result, ln=ln)
navtrail_previous_links = '<a class="navtrail" ' \
'href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
return page(title=_("Search vendor"),
uid=id_user,
req=req,
body=body, language=ln,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
| gpl-2.0 | 8,329,778,068,518,806,000 | 37.514354 | 347 | 0.521028 | false |
googleapis/python-phishingprotection | google/cloud/phishingprotection_v1beta1/services/phishing_protection_service_v1_beta1/transports/base.py | 1 | 6953 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.phishingprotection_v1beta1.types import phishingprotection
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-phishingprotection",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class PhishingProtectionServiceV1Beta1Transport(abc.ABC):
"""Abstract transport class for PhishingProtectionServiceV1Beta1."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "phishingprotection.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials is service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(
cls, host: str, scopes: Optional[Sequence[str]]
) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.report_phishing: gapic_v1.method.wrap_method(
self.report_phishing, default_timeout=600.0, client_info=client_info,
),
}
@property
def report_phishing(
self,
) -> Callable[
[phishingprotection.ReportPhishingRequest],
Union[
phishingprotection.ReportPhishingResponse,
Awaitable[phishingprotection.ReportPhishingResponse],
],
]:
raise NotImplementedError()
__all__ = ("PhishingProtectionServiceV1Beta1Transport",)
| apache-2.0 | -7,155,690,963,816,367,000 | 38.731429 | 103 | 0.65238 | false |
oostende/openblachole | lib/python/Components/Converter/TransponderInfo.py | 2 | 2843 | # -*- coding: utf-8 -*-
from Components.Converter.Converter import Converter
from enigma import iServiceInformation, iPlayableService, iPlayableServicePtr, eServiceCenter
from Components.Element import cached
from ServiceReference import resolveAlternate, ServiceReference
from Tools.Transponder import ConvertToHumanReadable, getChannelNumber
from Components.NimManager import nimmanager
import Screens.InfoBar
class TransponderInfo(Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
self.type = type.split(";")
@cached
def getText(self):
service = self.source.service
if isinstance(service, iPlayableServicePtr):
info = service and service.info()
ref = None
else: # reference
info = service and self.source.info
ref = service
if not info:
return ""
if ref:
nref = resolveAlternate(ref)
if nref:
ref = nref
info = eServiceCenter.getInstance().info(ref)
transponderraw = info.getInfoObject(ref, iServiceInformation.sTransponderData)
else:
transponderraw = info.getInfoObject(iServiceInformation.sTransponderData)
if "InRootOnly" in self.type and not self.rootBouquet():
return ""
if "NoRoot" in self.type and self.rootBouquet():
return ""
if transponderraw:
transponderdata = ConvertToHumanReadable(transponderraw)
if not transponderdata["system"]:
transponderdata["system"] = transponderraw.get("tuner_type", "None")
if not transponderdata["system"]:
return ""
if "DVB-T" in transponderdata["system"]:
return "%s %s %s %s" % (transponderdata["system"], transponderdata["channel"], transponderdata["frequency"], transponderdata["bandwidth"])
elif "DVB-C" in transponderdata["system"]:
return "%s %s %s %s %s" % (transponderdata["system"], transponderdata["frequency"], transponderdata["symbol_rate"], transponderdata["fec_inner"], \
transponderdata["modulation"])
return "%s %s %s %s %s %s %s" % (transponderdata["system"], transponderdata["frequency"], transponderdata["polarization_abbreviation"], transponderdata["symbol_rate"], \
transponderdata["fec_inner"], transponderdata["modulation"], transponderdata["detailed_satpos" in self.type and "orbital_position" or "orb_pos"])
if ref:
result = ref.toString().replace("%3a",":")
else:
result = info.getInfoString(iServiceInformation.sServiceref)
if "://" in result:
return _("Stream") + " " + result.rsplit("://", 1)[1].split("/")[0]
return ""
text = property(getText)
def rootBouquet(self):
servicelist = Screens.InfoBar.InfoBar.instance.servicelist
epg_bouquet = servicelist and servicelist.getRoot()
if ServiceReference(epg_bouquet).getServiceName():
return False
return True
def changed(self, what):
if what[0] != self.CHANGED_SPECIFIC or what[1] in (iPlayableService.evStart,):
Converter.changed(self, what)
| gpl-2.0 | -3,631,324,653,382,697,500 | 39.042254 | 172 | 0.724938 | false |
Maplenormandy/list-62x | python/dataProcessing/generatePlots.py | 1 | 1362 | import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from statsmodels.stats.weightstats import ttost_paired
data = pd.read_csv(open('combined_data.csv'))
for t in data.index:
if int(data.loc[t, 'Baseline']) == 0:
data.loc[t, 'STF Baseline'] = data.loc[t, 'Succesfully Tracked Features 0']
data.loc[t, 'STF Experiment'] = data.loc[t, 'Succesfully Tracked Features 1']
else:
data.loc[t, 'STF Baseline'] = data.loc[t, 'Succesfully Tracked Features 1']
data.loc[t, 'STF Experiment'] = data.loc[t, 'Succesfully Tracked Features 0']
pvalue, stats1, stats2 = ttost_paired(data['STF Experiment'], data['STF Baseline'], 0, 10000)
print pvalue
print stats1
print stats2
plt.scatter(data.index, data['STF Baseline'], label='baseline')
plt.scatter(data.index, data['STF Experiment'], color="green", label='experiment')
plt.legend(loc='upper right')
plt.draw()
dataMax = max(data['STF Baseline'].max(), data['STF Experiment'].max())
bins = np.linspace(0, dataMax)
plt.figure()
plt.hist(data['STF Baseline'], alpha = 0.5, bins=bins, label="baseline")
plt.hist(data['STF Experiment'], alpha = 0.5, bins=bins, label="experiment")
plt.legend(loc='upper right')
plt.draw()
plt.figure()
plt.hist(data['STF Experiment'] - data['STF Baseline'], bins=30, color="red")
plt.xlabel('Experiment - Baseline')
plt.show()
| mit | 3,732,625,034,884,263,400 | 31.428571 | 93 | 0.693098 | false |
eduble/panteda | sakura/daemon/code/git.py | 1 | 4338 | from pathlib import Path
from sakura.common.tools import yield_operator_subdirs, run_cmd
from sakura.common.errors import APIRequestError
GIT_CLONE_TIMEOUT = 60.0 # seconds
GIT_LS_REMOTE_TIMEOUT = 5.0 # seconds
def fetch_updates(code_dir, code_ref):
if code_ref.startswith('branch:'):
remote_ref = code_ref[7:]
elif code_ref.startswith('tag:'):
remote_ref = 'refs/tags/' + code_ref[4:]
try:
run_cmd('git fetch origin %(remote_ref)s' % dict(
remote_ref = remote_ref), cwd=code_dir)
except:
raise APIRequestError('Fetching code failed. Verify given branch or tag.')
def get_worktree(code_workdir, repo_url, code_ref, commit_hash):
code_workdir = Path(code_workdir)
code_workdir.mkdir(parents=True, exist_ok=True)
code_workdir = code_workdir.resolve()
repo_url_path = repo_url.replace('//', '/').replace(':', '')
code_repodir = code_workdir / 'repos' / repo_url_path
# clone if needed
if not code_repodir.exists():
code_repodir.parent.mkdir(parents=True, exist_ok=True)
try:
run_cmd('git clone --no-checkout %(url)s %(dest)s' % dict(
url = repo_url,
dest = code_repodir),
timeout = GIT_CLONE_TIMEOUT)
except:
raise APIRequestError('Cloning repository failed. Verify given URL.')
# get worktree if needed
worktree_dir = code_workdir / 'worktrees' / repo_url_path / commit_hash
if not worktree_dir.exists():
# ensure our local clone knows this commit
fetch_updates(code_repodir, code_ref)
# create the worktree dir
worktree_dir.parent.mkdir(parents=True, exist_ok=True)
try:
run_cmd('git worktree add %(wtdir)s %(commit_hash)s' % dict(
wtdir = worktree_dir,
commit_hash = commit_hash), cwd=code_repodir)
except:
raise APIRequestError('Could not checkout code. Verify given commit hash.')
return worktree_dir
def get_commit_metadata(worktree_dir, commit_hash=None):
cmd = "git log -1 --format='%H%n%at%n%ct%n%ae%n%s'"
if commit_hash != None:
cmd += ' ' + commit_hash
try:
info_lines = run_cmd(cmd, cwd=worktree_dir).splitlines()
except:
raise APIRequestError('Could not find given commit hash.')
commit_hash, s_author_date, s_committer_date, author_email, commit_subject = info_lines
return dict(
commit_hash = commit_hash,
author_date = int(s_author_date),
committer_date = int(s_committer_date),
author_email = author_email,
commit_subject = commit_subject
)
def list_code_revisions(repo_url, ref_type = None):
if ref_type is None:
return list_code_revisions(repo_url, 'tag') + list_code_revisions(repo_url, 'branch')
if ref_type == 'tag':
opt = '--tags'
rev_tags = ()
else: # branches
opt = '--heads'
rev_tags = ('HEAD',)
try:
info = run_cmd("git ls-remote %(opt)s %(url)s" % \
dict(opt = opt, url = repo_url), timeout = GIT_LS_REMOTE_TIMEOUT)
except:
raise APIRequestError('Querying repository failed. Verify given URL.')
words = info.strip().replace('\t', ' ').replace('/', ' ').replace('\n', ' ').split(' ')
commits = words[0::4]
refs = list(ref_type + ':' + w for w in words[3::4])
rev_tags = [ rev_tags ] * len(commits)
return tuple(zip(refs, commits, rev_tags))
def get_last_commit_hash(repo_url, code_ref):
words = code_ref.split(':')
if len(words) != 2 or words[0] not in ('branch', 'tag'):
raise APIRequestError('Invalid code ref.')
short_ref = words[1]
try:
info = run_cmd("git ls-remote %(url)s %(ref)s" % \
dict(url = repo_url, ref = short_ref), timeout = GIT_LS_REMOTE_TIMEOUT)
except:
raise APIRequestError('Querying repository failed. Verify given URL.')
return info.split()[0]
def list_operator_subdirs(code_workdir, repo_url, code_ref):
commit_hash = get_last_commit_hash(repo_url, code_ref)
worktree_dir = get_worktree(code_workdir, repo_url, code_ref, commit_hash)
return sorted(str(op_dir.relative_to(worktree_dir)) \
for op_dir in yield_operator_subdirs(worktree_dir))
| gpl-3.0 | 5,805,838,144,312,942,000 | 41.116505 | 93 | 0.607653 | false |
mrtazz/towbar | tests/unit/test_towbar_unit.py | 1 | 1704 | # -*- coding: utf-8 -*-
import unittest
import os
import sys
import mock
sys.path.append(os.getcwd())
import towbar
class TestTowbar(unittest.TestCase):
def setUp(self):
self.t = towbar.Towbar("foo", "bar")
def tearDown(self):
pass
@mock.patch('time.time')
@mock.patch('requests.post')
def test_notify_myself_simple(self, mock_requests, mock_time):
mock_time.return_value = 1
data = {'notification[from_screen_name]': 'me',
'notification[message]': 'msg',
'notification[from_remote_service_id]': 1}
self.t.notify_myself("msg", "me")
mock_requests.assert_called_once_with('https://boxcar.io/notifications',
data=data,
auth=("foo", "bar"))
@mock.patch('time.time')
@mock.patch('requests.post')
def test_notify_myself_full(self, mock_requests, mock_time):
mock_time.return_value = 1
data = {'notification[from_screen_name]': 'me',
'notification[message]': 'msg',
'notification[from_remote_service_id]': 1,
"notification[source_url]": "source_url",
"notification[icon_url]": "icon_url",
"notification[sound]": "sound",
"callback": "callback"}
self.t.notify_myself("msg", "me", "source_url", "icon_url", "sound", "callback")
mock_requests.assert_called_once_with('https://boxcar.io/notifications',
data=data,
auth=("foo", "bar"))
if __name__ == '__main__':
unittest.main()
| mit | 5,397,757,464,395,163,000 | 34.5 | 88 | 0.524648 | false |
vmuriart/sqldef | src/parsers/sql1992_grammar.py | 1 | 124558 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# CAVEAT UTILITOR
#
# This file was automatically generated by Grako.
#
# https://pypi.python.org/pypi/grako/
#
# Any changes you make to it will be overwritten the next time
# the file is generated.
from __future__ import print_function, division, absolute_import, unicode_literals
from grako.parsing import graken, Parser
from grako.util import re, RE_FLAGS, generic_main # noqa
__version__ = (2016, 8, 1, 1, 11, 9, 0)
__all__ = [
'SqlParser',
'SqlSemantics',
'main'
]
KEYWORDS = set([])
class SqlParser(Parser):
def __init__(self,
whitespace='\\s+',
nameguard=None,
comments_re='/\\*[\\s\\S]*?\\*/',
eol_comments_re='--.*?$',
ignorecase=True,
left_recursion=True,
keywords=KEYWORDS,
namechars='',
**kwargs):
super(SqlParser, self).__init__(
whitespace=whitespace,
nameguard=nameguard,
comments_re=comments_re,
eol_comments_re=eol_comments_re,
ignorecase=ignorecase,
left_recursion=left_recursion,
keywords=keywords,
namechars=namechars,
**kwargs
)
@graken()
def _digit_(self):
self._pattern(r'\d+')
@graken()
def _double_quote_(self):
self._token('"')
@graken()
def _quote_(self):
self._token("'")
@graken()
def _left_paren_(self):
self._token('(')
@graken()
def _right_paren_(self):
self._token(')')
@graken()
def _asterisk_(self):
self._token('*')
@graken()
def _plus_sign_(self):
self._token('+')
@graken()
def _comma_(self):
self._token(',')
@graken()
def _minus_sign_(self):
self._token('-')
@graken()
def _period_(self):
self._token('.')
@graken()
def _solidus_(self):
self._token('/')
@graken()
def _colon_(self):
self._token(':')
@graken()
def _semicolon_(self):
self._token(';')
@graken()
def _less_than_operator_(self):
self._token('<')
@graken()
def _equals_operator_(self):
self._token('=')
@graken()
def _greater_than_operator_(self):
self._token('>')
@graken()
def _question_mark_(self):
self._token('?')
@graken()
def _underscore_(self):
self._token('_')
@graken()
def _regular_identifier_(self):
self._pattern(r'[a-z]\w*')
self._check_name()
@graken()
def _delimited_identifier_(self):
self._double_quote_()
self._delimited_identifier_body_()
self._double_quote_()
@graken()
def _delimited_identifier_body_(self):
self._pattern(r'(""|[^"\n])+')
@graken()
def _not_equals_operator_(self):
self._token('<>')
@graken()
def _greater_than_or_equals_operator_(self):
self._token('>=')
@graken()
def _less_than_or_equals_operator_(self):
self._token('<=')
@graken()
def _concatenation_operator_(self):
self._token('||')
@graken()
def _literal_(self):
with self._choice():
with self._option():
self._signed_numeric_literal_()
with self._option():
self._general_literal_()
self._error('no available options')
@graken()
def _unsigned_literal_(self):
with self._choice():
with self._option():
self._unsigned_numeric_literal_()
with self._option():
self._general_literal_()
self._error('no available options')
@graken()
def _general_literal_(self):
with self._choice():
with self._option():
self._character_string_literal_()
with self._option():
self._national_character_string_literal_()
with self._option():
self._bit_string_literal_()
with self._option():
self._hex_string_literal_()
with self._option():
self._datetime_literal_()
with self._option():
self._interval_literal_()
self._error('no available options')
@graken()
def _character_string_literal_(self):
with self._optional():
self._underscore_()
self._character_set_name_()
def block0():
self._quote_()
self._character_representation_()
self._quote_()
self._positive_closure(block0)
@graken()
def _character_representation_(self):
self._pattern(r"(''|[^'\n])*")
@graken()
def _national_character_string_literal_(self):
self._token('N')
def block0():
self._quote_()
self._character_representation_()
self._quote_()
self._positive_closure(block0)
@graken()
def _bit_string_literal_(self):
self._token('B')
def block0():
self._quote_()
with self._optional():
def block1():
self._bit_()
self._positive_closure(block1)
self._quote_()
self._positive_closure(block0)
@graken()
def _hex_string_literal_(self):
self._token('X')
def block0():
self._quote_()
with self._optional():
def block1():
self._hexit_()
self._positive_closure(block1)
self._quote_()
self._positive_closure(block0)
@graken()
def _bit_(self):
self._pattern(r'[01]')
@graken()
def _hexit_(self):
self._pattern(r'[a-f\d]')
@graken()
def _signed_numeric_literal_(self):
with self._optional():
self._sign_()
self._unsigned_numeric_literal_()
@graken()
def _unsigned_numeric_literal_(self):
with self._choice():
with self._option():
self._exact_numeric_literal_()
with self._option():
self._approximate_numeric_literal_()
self._error('no available options')
@graken()
def _exact_numeric_literal_(self):
with self._choice():
with self._option():
self._unsigned_integer_()
with self._optional():
self._period_()
with self._optional():
self._unsigned_integer_()
with self._option():
self._period_()
self._unsigned_integer_()
self._error('no available options')
@graken()
def _sign_(self):
with self._choice():
with self._option():
self._plus_sign_()
with self._option():
self._minus_sign_()
self._error('no available options')
@graken()
def _approximate_numeric_literal_(self):
self._exact_numeric_literal_()
self._token('E')
self._signed_integer_()
@graken()
def _signed_integer_(self):
with self._optional():
self._sign_()
self._unsigned_integer_()
@graken()
def _unsigned_integer_(self):
self._digit_()
@graken()
def _datetime_literal_(self):
with self._choice():
with self._option():
self._date_literal_()
with self._option():
self._time_literal_()
with self._option():
self._timestamp_literal_()
self._error('no available options')
@graken()
def _date_literal_(self):
self._token('DATE')
self._date_string_()
@graken()
def _time_literal_(self):
self._token('TIME')
self._time_string_()
@graken()
def _timestamp_literal_(self):
self._token('TIMESTAMP')
self._timestamp_string_()
@graken()
def _date_string_(self):
self._quote_()
self._date_value_()
self._quote_()
@graken()
def _time_string_(self):
self._quote_()
self._time_value_()
with self._optional():
self._time_zone_interval_()
self._quote_()
@graken()
def _timestamp_string_(self):
self._quote_()
self._date_value_()
self._time_value_()
with self._optional():
self._time_zone_interval_()
self._quote_()
@graken()
def _time_zone_interval_(self):
self._sign_()
self._hours_value_()
self._colon_()
self._minutes_value_()
@graken()
def _date_value_(self):
self._years_value_()
self._minus_sign_()
self._months_value_()
self._minus_sign_()
self._days_value_()
@graken()
def _time_value_(self):
self._hours_value_()
self._colon_()
self._minutes_value_()
self._colon_()
self._seconds_value_()
@graken()
def _interval_literal_(self):
self._token('INTERVAL')
with self._optional():
self._sign_()
self._interval_string_()
self._interval_qualifier_()
@graken()
def _interval_string_(self):
self._quote_()
with self._group():
with self._choice():
with self._option():
self._year_month_literal_()
with self._option():
self._day_time_literal_()
self._error('no available options')
self._quote_()
@graken()
def _year_month_literal_(self):
with self._choice():
with self._option():
self._years_value_()
with self._option():
with self._optional():
self._years_value_()
self._minus_sign_()
self._months_value_()
self._error('no available options')
@graken()
def _day_time_literal_(self):
with self._choice():
with self._option():
self._day_time_interval_()
with self._option():
self._time_interval_()
self._error('no available options')
@graken()
def _day_time_interval_(self):
self._days_value_()
with self._optional():
self._hours_value_()
with self._optional():
self._colon_()
self._minutes_value_()
with self._optional():
self._colon_()
self._seconds_value_()
@graken()
def _time_interval_(self):
with self._choice():
with self._option():
self._hours_value_()
with self._optional():
self._colon_()
self._minutes_value_()
with self._optional():
self._colon_()
self._seconds_value_()
with self._option():
self._minutes_value_()
with self._optional():
self._colon_()
self._seconds_value_()
with self._option():
self._seconds_value_()
self._error('no available options')
@graken()
def _years_value_(self):
self._datetime_value_()
@graken()
def _months_value_(self):
self._datetime_value_()
@graken()
def _days_value_(self):
self._datetime_value_()
@graken()
def _hours_value_(self):
self._datetime_value_()
@graken()
def _minutes_value_(self):
self._datetime_value_()
@graken()
def _seconds_value_(self):
self._unsigned_integer_()
with self._optional():
self._period_()
with self._optional():
self._unsigned_integer_()
@graken()
def _datetime_value_(self):
self._unsigned_integer_()
@graken()
def _identifier_(self):
with self._optional():
self._underscore_()
self._character_set_name_()
self._actual_identifier_()
@graken()
def _identifier_list_(self):
def sep0():
self._token(',')
def block0():
self._identifier_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _actual_identifier_(self):
with self._choice():
with self._option():
self._regular_identifier_()
with self._option():
self._delimited_identifier_()
self._error('no available options')
@graken()
def _table_name_(self):
with self._choice():
with self._option():
self._schema_qualified_name_()
with self._option():
self._qualified_local_table_name_()
self._error('no available options')
@graken()
def _qualified_local_table_name_(self):
self._token('MODULE')
self._period_()
self._identifier_()
@graken()
def _schema_name_(self):
with self._optional():
self._identifier_()
self._period_()
self._identifier_()
@graken()
def _schema_qualified_name_(self):
with self._optional():
self._schema_name_()
self._period_()
self._identifier_()
@graken()
def _parameter_name_(self):
self._colon_()
self._identifier_()
@graken()
def _character_set_name_(self):
with self._optional():
self._schema_name_()
self._period_()
self._regular_identifier_()
@graken()
def _connection_name_(self):
self._simple_value_specification_()
@graken()
def _data_type_(self):
with self._choice():
with self._option():
self._character_string_type_()
with self._optional():
self._token('CHARACTER')
self._token('SET')
self._character_set_name_()
with self._option():
self._national_character_string_type_()
with self._option():
self._bit_string_type_()
with self._option():
self._numeric_type_()
with self._option():
self._datetime_type_()
with self._option():
self._interval_type_()
self._error('no available options')
@graken()
def _character_string_type_(self):
with self._choice():
with self._option():
self._token('CHARACTER')
with self._optional():
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('CHAR')
with self._optional():
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('CHARACTER')
self._token('VARYING')
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('CHAR')
self._token('VARYING')
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('VARCHAR')
self._left_paren_()
self._length_()
self._right_paren_()
self._error('expecting one of: CHAR CHARACTER')
@graken()
def _national_character_string_type_(self):
with self._choice():
with self._option():
self._token('NATIONAL')
self._token('CHARACTER')
with self._optional():
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('NATIONAL')
self._token('CHAR')
with self._optional():
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('NCHAR')
with self._optional():
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('NATIONAL')
self._token('CHARACTER')
self._token('VARYING')
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('NATIONAL')
self._token('CHAR')
self._token('VARYING')
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('NCHAR')
self._token('VARYING')
self._left_paren_()
self._length_()
self._right_paren_()
self._error('expecting one of: NATIONAL NCHAR')
@graken()
def _bit_string_type_(self):
with self._choice():
with self._option():
self._token('BIT')
with self._optional():
self._left_paren_()
self._length_()
self._right_paren_()
with self._option():
self._token('BIT')
self._token('VARYING')
self._left_paren_()
self._length_()
self._right_paren_()
self._error('expecting one of: BIT')
@graken()
def _numeric_type_(self):
with self._choice():
with self._option():
self._exact_numeric_type_()
with self._option():
self._approximate_numeric_type_()
self._error('no available options')
@graken()
def _exact_numeric_type_(self):
with self._choice():
with self._option():
self._token('NUMERIC')
with self._optional():
self._left_paren_()
self._precision_()
with self._optional():
self._comma_()
self._scale_()
self._right_paren_()
with self._option():
self._token('DECIMAL')
with self._optional():
self._left_paren_()
self._precision_()
with self._optional():
self._comma_()
self._scale_()
self._right_paren_()
with self._option():
self._token('DEC')
with self._optional():
self._left_paren_()
self._precision_()
with self._optional():
self._comma_()
self._scale_()
self._right_paren_()
with self._option():
self._token('INTEGER')
with self._option():
self._token('INT')
with self._option():
self._token('SMALLINT')
self._error('expecting one of: DEC DECIMAL INT INTEGER NUMERIC SMALLINT')
@graken()
def _approximate_numeric_type_(self):
with self._choice():
with self._option():
self._token('FLOAT')
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
with self._option():
self._token('REAL')
with self._option():
self._token('DOUBLE')
self._token('PRECISION')
self._error('expecting one of: DOUBLE FLOAT REAL')
@graken()
def _length_(self):
self._unsigned_integer_()
@graken()
def _precision_(self):
self._unsigned_integer_()
@graken()
def _scale_(self):
self._unsigned_integer_()
@graken()
def _datetime_type_(self):
with self._choice():
with self._option():
self._token('DATE')
with self._option():
self._token('TIME')
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
with self._optional():
self._token('WITH')
self._token('TIME')
self._token('ZONE')
with self._option():
self._token('TIMESTAMP')
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
with self._optional():
self._token('WITH')
self._token('TIME')
self._token('ZONE')
self._error('expecting one of: DATE TIME TIMESTAMP')
@graken()
def _interval_type_(self):
self._token('INTERVAL')
self._interval_qualifier_()
@graken()
def _value_specification_(self):
with self._choice():
with self._option():
self._literal_()
with self._option():
self._general_value_specification_()
self._error('no available options')
@graken()
def _unsigned_value_specification_(self):
with self._choice():
with self._option():
self._unsigned_literal_()
with self._option():
self._general_value_specification_()
self._error('no available options')
@graken()
def _general_value_specification_(self):
with self._choice():
with self._option():
self._parameter_specification_()
with self._option():
self._question_mark_()
with self._option():
self._token('USER')
with self._option():
self._token('CURRENT_USER')
with self._option():
self._token('SESSION_USER')
with self._option():
self._token('SYSTEM_USER')
with self._option():
self._token('VALUE')
self._error('expecting one of: CURRENT_USER SESSION_USER SYSTEM_USER USER VALUE')
@graken()
def _simple_value_specification_(self):
with self._choice():
with self._option():
self._parameter_name_()
with self._option():
self._literal_()
self._error('no available options')
@graken()
def _parameter_specification_(self):
self._parameter_name_()
with self._optional():
self._indicator_parameter_()
@graken()
def _indicator_parameter_(self):
with self._optional():
self._token('INDICATOR')
self._parameter_name_()
@graken()
def _table_reference_(self):
with self._choice():
with self._option():
self._table_name_()
with self._optional():
self._as_clause_()
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
with self._option():
self._subquery_()
self._as_clause_()
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
with self._option():
self._joined_table_()
self._error('no available options')
@graken()
def _column_name_list_(self):
self._identifier_list_()
@graken()
def _column_reference_(self):
with self._optional():
self._qualifier_()
self._period_()
self._identifier_()
@graken()
def _qualifier_(self):
with self._choice():
with self._option():
self._table_name_()
with self._option():
self._identifier_()
self._error('no available options')
@graken()
def _set_function_specification_(self):
with self._choice():
with self._option():
self._token('COUNT')
self._left_paren_()
self._asterisk_()
self._right_paren_()
with self._option():
self._general_set_function_()
self._error('no available options')
@graken()
def _general_set_function_(self):
self._set_function_type_()
self._left_paren_()
with self._optional():
self._set_quantifier_()
self._value_expression_()
self._right_paren_()
@graken()
def _set_function_type_(self):
with self._choice():
with self._option():
self._token('AVG')
with self._option():
self._token('MAX')
with self._option():
self._token('MIN')
with self._option():
self._token('SUM')
with self._option():
self._token('COUNT')
self._error('expecting one of: AVG COUNT MAX MIN SUM')
@graken()
def _set_quantifier_(self):
with self._choice():
with self._option():
self._token('DISTINCT')
with self._option():
self._token('ALL')
self._error('expecting one of: ALL DISTINCT')
@graken()
def _numeric_value_function_(self):
with self._choice():
with self._option():
self._position_expression_()
with self._option():
self._extract_expression_()
with self._option():
self._length_expression_()
self._error('no available options')
@graken()
def _position_expression_(self):
self._token('POSITION')
self._left_paren_()
self._character_value_expression_()
self._token('IN')
self._character_value_expression_()
self._right_paren_()
@graken()
def _length_expression_(self):
with self._choice():
with self._option():
self._char_length_expression_()
with self._option():
self._octet_length_expression_()
with self._option():
self._bit_length_expression_()
self._error('no available options')
@graken()
def _char_length_expression_(self):
with self._group():
with self._choice():
with self._option():
self._token('CHAR_LENGTH')
with self._option():
self._token('CHARACTER_LENGTH')
self._error('expecting one of: CHARACTER_LENGTH CHAR_LENGTH')
self._left_paren_()
self._string_value_expression_()
self._right_paren_()
@graken()
def _octet_length_expression_(self):
self._token('OCTET_LENGTH')
self._left_paren_()
self._string_value_expression_()
self._right_paren_()
@graken()
def _bit_length_expression_(self):
self._token('BIT_LENGTH')
self._left_paren_()
self._string_value_expression_()
self._right_paren_()
@graken()
def _extract_expression_(self):
self._token('EXTRACT')
self._left_paren_()
self._extract_field_()
self._token('FROM')
self._extract_source_()
self._right_paren_()
@graken()
def _extract_field_(self):
with self._choice():
with self._option():
self._datetime_field_()
with self._option():
self._time_zone_field_()
self._error('no available options')
@graken()
def _time_zone_field_(self):
with self._choice():
with self._option():
self._token('TIMEZONE_HOUR')
with self._option():
self._token('TIMEZONE_MINUTE')
self._error('expecting one of: TIMEZONE_HOUR TIMEZONE_MINUTE')
@graken()
def _extract_source_(self):
with self._choice():
with self._option():
self._datetime_value_expression_()
with self._option():
self._interval_value_expression_()
self._error('no available options')
@graken()
def _string_value_function_(self):
with self._choice():
with self._option():
self._character_value_function_()
with self._option():
self._bit_substring_function_()
self._error('no available options')
@graken()
def _character_value_function_(self):
with self._choice():
with self._option():
self._character_substring_function_()
with self._option():
self._fold_()
with self._option():
self._form_of_use_conversion_()
with self._option():
self._character_translation_()
with self._option():
self._trim_function_()
self._error('no available options')
@graken()
def _character_substring_function_(self):
self._token('SUBSTRING')
self._left_paren_()
self._character_value_expression_()
self._token('FROM')
self._start_position_()
with self._optional():
self._token('FOR')
self._string_length_()
self._right_paren_()
@graken()
def _fold_(self):
with self._group():
with self._choice():
with self._option():
self._token('UPPER')
with self._option():
self._token('LOWER')
self._error('expecting one of: LOWER UPPER')
self._left_paren_()
self._character_value_expression_()
self._right_paren_()
@graken()
def _form_of_use_conversion_(self):
self._token('CONVERT')
self._left_paren_()
self._character_value_expression_()
self._token('USING')
self._schema_qualified_name_()
self._right_paren_()
@graken()
def _character_translation_(self):
self._token('TRANSLATE')
self._left_paren_()
self._character_value_expression_()
self._token('USING')
self._schema_qualified_name_()
self._right_paren_()
@graken()
def _trim_function_(self):
self._token('TRIM')
self._left_paren_()
self._trim_operands_()
self._right_paren_()
@graken()
def _trim_operands_(self):
with self._optional():
with self._optional():
self._trim_specification_()
with self._optional():
self._character_value_expression_()
self._token('FROM')
self._character_value_expression_()
@graken()
def _trim_specification_(self):
with self._choice():
with self._option():
self._token('LEADING')
with self._option():
self._token('TRAILING')
with self._option():
self._token('BOTH')
self._error('expecting one of: BOTH LEADING TRAILING')
@graken()
def _bit_substring_function_(self):
self._token('SUBSTRING')
self._left_paren_()
self._bit_value_expression_()
self._token('FROM')
self._start_position_()
with self._optional():
self._token('FOR')
self._string_length_()
self._right_paren_()
@graken()
def _start_position_(self):
self._numeric_value_expression_()
@graken()
def _string_length_(self):
self._numeric_value_expression_()
@graken()
def _datetime_value_function_(self):
with self._choice():
with self._option():
self._token('CURRENT_DATE')
with self._option():
self._current_time_value_function_()
with self._option():
self._current_timestamp_value_function_()
self._error('expecting one of: CURRENT_DATE')
@graken()
def _current_time_value_function_(self):
self._token('CURRENT_TIME')
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
@graken()
def _current_timestamp_value_function_(self):
self._token('CURRENT_TIMESTAMP')
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
@graken()
def _case_expression_(self):
with self._choice():
with self._option():
self._case_abbreviation_()
with self._option():
self._case_specification_()
self._error('no available options')
@graken()
def _case_abbreviation_(self):
with self._choice():
with self._option():
self._token('NULLIF')
self._left_paren_()
self._value_expression_()
self._comma_()
self._value_expression_()
self._right_paren_()
with self._option():
self._token('COALESCE')
self._left_paren_()
self._value_expression_()
def block0():
self._comma_()
self._value_expression_()
self._positive_closure(block0)
self._right_paren_()
self._error('no available options')
@graken()
def _case_specification_(self):
with self._choice():
with self._option():
self._simple_case_()
with self._option():
self._searched_case_()
self._error('no available options')
@graken()
def _simple_case_(self):
self._token('CASE')
self._value_expression_()
def block0():
self._simple_when_clause_()
self._positive_closure(block0)
with self._optional():
self._else_clause_()
self._token('END')
@graken()
def _searched_case_(self):
self._token('CASE')
def block0():
self._searched_when_clause_()
self._positive_closure(block0)
with self._optional():
self._else_clause_()
self._token('END')
@graken()
def _simple_when_clause_(self):
self._token('WHEN')
self._value_expression_()
self._token('THEN')
self._result_()
@graken()
def _searched_when_clause_(self):
self._token('WHEN')
self._search_condition_()
self._token('THEN')
self._result_()
@graken()
def _else_clause_(self):
self._token('ELSE')
self._result_()
@graken()
def _result_(self):
with self._choice():
with self._option():
self._value_expression_()
with self._option():
self._token('NULL')
self._error('expecting one of: NULL')
@graken()
def _cast_specification_(self):
self._token('CAST')
self._left_paren_()
self._cast_operand_()
self._token('AS')
self._cast_target_()
self._right_paren_()
@graken()
def _cast_operand_(self):
with self._choice():
with self._option():
self._value_expression_()
with self._option():
self._token('NULL')
self._error('expecting one of: NULL')
@graken()
def _cast_target_(self):
with self._choice():
with self._option():
self._schema_qualified_name_()
with self._option():
self._data_type_()
self._error('no available options')
@graken()
def _value_expression_(self):
with self._choice():
with self._option():
self._numeric_value_expression_()
with self._option():
self._string_value_expression_()
with self._option():
self._datetime_value_expression_()
with self._option():
self._interval_value_expression_()
self._error('no available options')
@graken()
def _value_expression_primary_(self):
with self._choice():
with self._option():
self._unsigned_value_specification_()
with self._option():
self._column_reference_()
with self._option():
self._set_function_specification_()
with self._option():
self._subquery_()
with self._option():
self._case_expression_()
with self._option():
self._left_paren_()
self._value_expression_()
self._right_paren_()
with self._option():
self._cast_specification_()
self._error('no available options')
@graken()
def _numeric_value_expression_(self):
with self._choice():
with self._option():
self._term_()
with self._option():
self._numeric_value_expression_()
self._plus_sign_()
self._term_()
with self._option():
self._numeric_value_expression_()
self._minus_sign_()
self._term_()
self._error('no available options')
@graken()
def _term_(self):
with self._choice():
with self._option():
self._factor_()
with self._option():
self._term_()
self._asterisk_()
self._factor_()
with self._option():
self._term_()
self._solidus_()
self._factor_()
self._error('no available options')
@graken()
def _factor_(self):
with self._optional():
self._sign_()
self._numeric_primary_()
@graken()
def _numeric_primary_(self):
with self._choice():
with self._option():
self._value_expression_primary_()
with self._option():
self._numeric_value_function_()
self._error('no available options')
@graken()
def _string_value_expression_(self):
with self._choice():
with self._option():
self._character_value_expression_()
with self._option():
self._bit_value_expression_()
self._error('no available options')
@graken()
def _character_value_expression_(self):
with self._choice():
with self._option():
self._concatenation_()
with self._option():
self._character_factor_()
self._error('no available options')
@graken()
def _concatenation_(self):
self._character_value_expression_()
self._concatenation_operator_()
self._character_factor_()
@graken()
def _character_factor_(self):
self._character_primary_()
with self._optional():
self._collate_clause_()
@graken()
def _character_primary_(self):
with self._choice():
with self._option():
self._value_expression_primary_()
with self._option():
self._string_value_function_()
self._error('no available options')
@graken()
def _bit_value_expression_(self):
with self._choice():
with self._option():
self._bit_concatenation_()
with self._option():
self._bit_factor_()
self._error('no available options')
@graken()
def _bit_concatenation_(self):
self._bit_value_expression_()
self._concatenation_operator_()
self._bit_factor_()
@graken()
def _bit_factor_(self):
self._bit_primary_()
@graken()
def _bit_primary_(self):
with self._choice():
with self._option():
self._value_expression_primary_()
with self._option():
self._string_value_function_()
self._error('no available options')
@graken()
def _datetime_value_expression_(self):
with self._choice():
with self._option():
self._datetime_term_()
with self._option():
self._interval_value_expression_()
self._plus_sign_()
self._datetime_term_()
with self._option():
self._datetime_value_expression_()
self._plus_sign_()
self._interval_term_()
with self._option():
self._datetime_value_expression_()
self._minus_sign_()
self._interval_term_()
self._error('no available options')
@graken()
def _datetime_term_(self):
self._datetime_factor_()
@graken()
def _datetime_factor_(self):
self._datetime_primary_()
with self._optional():
self._time_zone_()
@graken()
def _datetime_primary_(self):
with self._choice():
with self._option():
self._value_expression_primary_()
with self._option():
self._datetime_value_function_()
self._error('no available options')
@graken()
def _time_zone_(self):
self._token('AT')
self._time_zone_specifier_()
@graken()
def _time_zone_specifier_(self):
with self._choice():
with self._option():
self._token('LOCAL')
with self._option():
self._token('TIME')
self._token('ZONE')
self._interval_value_expression_()
self._error('expecting one of: LOCAL')
@graken()
def _interval_value_expression_(self):
with self._choice():
with self._option():
self._interval_term_()
with self._option():
self._interval_value_expression_1_()
self._plus_sign_()
self._interval_term_1_()
with self._option():
self._interval_value_expression_1_()
self._minus_sign_()
self._interval_term_1_()
with self._option():
self._left_paren_()
self._datetime_value_expression_()
self._minus_sign_()
self._datetime_term_()
self._right_paren_()
self._interval_qualifier_()
self._error('no available options')
@graken()
def _interval_term_(self):
with self._choice():
with self._option():
self._interval_factor_()
with self._option():
self._interval_term_2_()
self._asterisk_()
self._factor_()
with self._option():
self._interval_term_2_()
self._solidus_()
self._factor_()
with self._option():
self._term_()
self._asterisk_()
self._interval_factor_()
self._error('no available options')
@graken()
def _interval_factor_(self):
with self._optional():
self._sign_()
self._interval_primary_()
@graken()
def _interval_primary_(self):
self._value_expression_primary_()
with self._optional():
self._interval_qualifier_()
@graken()
def _interval_value_expression_1_(self):
self._interval_value_expression_()
@graken()
def _interval_term_1_(self):
self._interval_term_()
@graken()
def _interval_term_2_(self):
self._interval_term_()
@graken()
def _row_value_constructor_(self):
with self._choice():
with self._option():
self._row_value_constructor_element_()
with self._option():
self._left_paren_()
self._row_value_constructor_list_()
self._right_paren_()
with self._option():
self._subquery_()
self._error('no available options')
@graken()
def _row_value_constructor_list_(self):
def sep0():
self._token(',')
def block0():
self._row_value_constructor_element_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _row_value_constructor_element_(self):
with self._choice():
with self._option():
self._value_expression_()
with self._option():
self._token('NULL')
with self._option():
self._token('DEFAULT')
self._error('expecting one of: DEFAULT NULL')
@graken()
def _table_value_constructor_(self):
self._token('VALUES')
self._table_value_constructor_list_()
@graken()
def _table_value_constructor_list_(self):
def sep0():
self._token(',')
def block0():
self._row_value_constructor_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _table_expression_(self):
self._from_clause_()
with self._optional():
self._where_clause_()
with self._optional():
self._group_by_clause_()
with self._optional():
self._having_clause_()
@graken()
def _from_clause_(self):
self._token('FROM')
def sep0():
self._token(',')
def block0():
self._table_reference_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _joined_table_(self):
with self._choice():
with self._option():
self._cross_join_()
with self._option():
self._qualified_join_()
with self._option():
self._left_paren_()
self._joined_table_()
self._right_paren_()
self._error('no available options')
@graken()
def _cross_join_(self):
self._table_reference_()
self._token('CROSS')
self._token('JOIN')
self._table_reference_()
@graken()
def _qualified_join_(self):
self._table_reference_()
with self._optional():
self._token('NATURAL')
with self._optional():
self._join_type_()
self._token('JOIN')
self._table_reference_()
with self._optional():
self._join_specification_()
@graken()
def _join_specification_(self):
with self._choice():
with self._option():
self._join_condition_()
with self._option():
self._named_columns_join_()
self._error('no available options')
@graken()
def _join_condition_(self):
self._token('ON')
self._search_condition_()
@graken()
def _named_columns_join_(self):
self._token('USING')
self._left_paren_()
self._column_name_list_()
self._right_paren_()
@graken()
def _join_type_(self):
with self._choice():
with self._option():
self._token('INNER')
with self._option():
self._outer_join_type_()
with self._optional():
self._token('OUTER')
with self._option():
self._token('UNION')
self._error('expecting one of: INNER UNION')
@graken()
def _outer_join_type_(self):
with self._choice():
with self._option():
self._token('LEFT')
with self._option():
self._token('RIGHT')
with self._option():
self._token('FULL')
self._error('expecting one of: FULL LEFT RIGHT')
@graken()
def _where_clause_(self):
self._token('WHERE')
self._search_condition_()
@graken()
def _group_by_clause_(self):
self._token('GROUP')
self._token('BY')
self._grouping_column_reference_list_()
@graken()
def _grouping_column_reference_list_(self):
def sep0():
self._token(',')
def block0():
self._grouping_column_reference_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _grouping_column_reference_(self):
self._column_reference_()
with self._optional():
self._collate_clause_()
@graken()
def _having_clause_(self):
self._token('HAVING')
self._search_condition_()
@graken()
def _query_specification_(self):
self._token('SELECT')
with self._optional():
self._set_quantifier_()
self._select_list_()
self._table_expression_()
@graken()
def _select_list_(self):
with self._choice():
with self._option():
self._asterisk_()
with self._option():
def sep0():
self._token(',')
def block0():
self._select_sublist_()
self._positive_closure(block0, prefix=sep0)
self._error('no available options')
@graken()
def _select_sublist_(self):
with self._choice():
with self._option():
self._derived_column_()
with self._option():
self._qualifier_()
self._period_()
self._asterisk_()
self._error('no available options')
@graken()
def _derived_column_(self):
self._value_expression_()
with self._optional():
self._as_clause_()
@graken()
def _as_clause_(self):
with self._optional():
self._token('AS')
self._identifier_()
@graken()
def _query_expression_(self):
with self._choice():
with self._option():
self._non_join_query_expression_()
with self._option():
self._joined_table_()
self._error('no available options')
@graken()
def _non_join_query_expression_(self):
with self._choice():
with self._option():
self._non_join_query_term_()
with self._option():
self._query_expression_()
self._token('UNION')
with self._optional():
self._token('ALL')
with self._optional():
self._corresponding_spec_()
self._query_term_()
with self._option():
self._query_expression_()
self._token('EXCEPT')
with self._optional():
self._token('ALL')
with self._optional():
self._corresponding_spec_()
self._query_term_()
self._error('no available options')
@graken()
def _query_term_(self):
with self._choice():
with self._option():
self._non_join_query_term_()
with self._option():
self._joined_table_()
self._error('no available options')
@graken()
def _non_join_query_term_(self):
with self._choice():
with self._option():
self._non_join_query_primary_()
with self._option():
self._query_term_()
self._token('INTERSECT')
with self._optional():
self._token('ALL')
with self._optional():
self._corresponding_spec_()
self._query_primary_()
self._error('no available options')
@graken()
def _query_primary_(self):
with self._choice():
with self._option():
self._non_join_query_primary_()
with self._option():
self._joined_table_()
self._error('no available options')
@graken()
def _non_join_query_primary_(self):
with self._choice():
with self._option():
self._simple_table_()
with self._option():
self._left_paren_()
self._non_join_query_expression_()
self._right_paren_()
self._error('no available options')
@graken()
def _simple_table_(self):
with self._choice():
with self._option():
self._query_specification_()
with self._option():
self._table_value_constructor_()
with self._option():
self._explicit_table_()
self._error('no available options')
@graken()
def _explicit_table_(self):
self._token('TABLE')
self._table_name_()
@graken()
def _corresponding_spec_(self):
self._token('CORRESPONDING')
with self._optional():
self._token('BY')
self._left_paren_()
self._column_name_list_()
self._right_paren_()
@graken()
def _subquery_(self):
self._left_paren_()
self._query_expression_()
self._right_paren_()
@graken()
def _predicate_(self):
with self._choice():
with self._option():
self._comparison_predicate_()
with self._option():
self._between_predicate_()
with self._option():
self._in_predicate_()
with self._option():
self._like_predicate_()
with self._option():
self._null_predicate_()
with self._option():
self._quantified_comparison_predicate_()
with self._option():
self._exists_predicate_()
with self._option():
self._unique_predicate_()
with self._option():
self._match_predicate_()
with self._option():
self._overlaps_predicate_()
self._error('no available options')
@graken()
def _comparison_predicate_(self):
self._row_value_constructor_()
self._comp_op_()
self._row_value_constructor_()
@graken()
def _comp_op_(self):
with self._choice():
with self._option():
self._equals_operator_()
with self._option():
self._not_equals_operator_()
with self._option():
self._less_than_operator_()
with self._option():
self._greater_than_operator_()
with self._option():
self._less_than_or_equals_operator_()
with self._option():
self._greater_than_or_equals_operator_()
self._error('no available options')
@graken()
def _between_predicate_(self):
self._row_value_constructor_()
with self._optional():
self._token('NOT')
self._token('BETWEEN')
self._row_value_constructor_()
self._token('AND')
self._row_value_constructor_()
@graken()
def _in_predicate_(self):
self._row_value_constructor_()
with self._optional():
self._token('NOT')
self._token('IN')
self._in_predicate_value_()
@graken()
def _in_predicate_value_(self):
with self._choice():
with self._option():
self._subquery_()
with self._option():
self._left_paren_()
self._in_value_list_()
self._right_paren_()
self._error('no available options')
@graken()
def _in_value_list_(self):
self._value_expression_()
def block0():
self._comma_()
self._value_expression_()
self._positive_closure(block0)
@graken()
def _like_predicate_(self):
self._character_value_expression_()
with self._optional():
self._token('NOT')
self._token('LIKE')
self._character_value_expression_()
with self._optional():
self._token('ESCAPE')
self._character_value_expression_()
@graken()
def _null_predicate_(self):
self._row_value_constructor_()
self._token('IS')
with self._optional():
self._token('NOT')
self._token('NULL')
@graken()
def _quantified_comparison_predicate_(self):
self._row_value_constructor_()
self._comp_op_()
self._quantifier_()
self._subquery_()
@graken()
def _quantifier_(self):
with self._choice():
with self._option():
self._token('ALL')
with self._option():
self._some_()
self._error('expecting one of: ALL')
@graken()
def _some_(self):
with self._choice():
with self._option():
self._token('SOME')
with self._option():
self._token('ANY')
self._error('expecting one of: ANY SOME')
@graken()
def _exists_predicate_(self):
self._token('EXISTS')
self._subquery_()
@graken()
def _unique_predicate_(self):
self._token('UNIQUE')
self._subquery_()
@graken()
def _match_predicate_(self):
self._row_value_constructor_()
self._token('MATCH')
with self._optional():
self._token('UNIQUE')
with self._optional():
with self._choice():
with self._option():
self._token('PARTIAL')
with self._option():
self._token('FULL')
self._error('expecting one of: FULL PARTIAL')
self._subquery_()
@graken()
def _overlaps_predicate_(self):
self._row_value_constructor_()
self._token('OVERLAPS')
self._row_value_constructor_()
@graken()
def _search_condition_(self):
with self._choice():
with self._option():
self._boolean_term_()
with self._option():
self._search_condition_()
self._token('OR')
self._boolean_term_()
self._error('no available options')
@graken()
def _boolean_term_(self):
with self._choice():
with self._option():
self._boolean_factor_()
with self._option():
self._boolean_term_()
self._token('AND')
self._boolean_factor_()
self._error('no available options')
@graken()
def _boolean_factor_(self):
with self._optional():
self._token('NOT')
self._boolean_test_()
@graken()
def _boolean_test_(self):
self._boolean_primary_()
with self._optional():
self._token('IS')
with self._optional():
self._token('NOT')
self._truth_value_()
@graken()
def _truth_value_(self):
with self._choice():
with self._option():
self._token('TRUE')
with self._option():
self._token('FALSE')
with self._option():
self._token('UNKNOWN')
self._error('expecting one of: FALSE TRUE UNKNOWN')
@graken()
def _boolean_primary_(self):
with self._choice():
with self._option():
self._predicate_()
with self._option():
self._left_paren_()
self._search_condition_()
self._right_paren_()
self._error('no available options')
@graken()
def _interval_qualifier_(self):
with self._choice():
with self._option():
self._start_field_()
self._token('TO')
self._end_field_()
with self._option():
self._single_datetime_field_()
self._error('no available options')
@graken()
def _start_field_(self):
self._non_second_datetime_field_()
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
@graken()
def _end_field_(self):
with self._choice():
with self._option():
self._non_second_datetime_field_()
with self._option():
self._token('SECOND')
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
self._error('expecting one of: SECOND')
@graken()
def _single_datetime_field_(self):
with self._choice():
with self._option():
self._non_second_datetime_field_()
with self._optional():
self._left_paren_()
self._precision_()
self._right_paren_()
with self._option():
self._token('SECOND')
with self._optional():
self._left_paren_()
self._precision_()
with self._optional():
self._comma_()
self._precision_()
self._right_paren_()
self._error('expecting one of: SECOND')
@graken()
def _datetime_field_(self):
with self._choice():
with self._option():
self._non_second_datetime_field_()
with self._option():
self._token('SECOND')
self._error('expecting one of: SECOND')
@graken()
def _non_second_datetime_field_(self):
with self._choice():
with self._option():
self._token('YEAR')
with self._option():
self._token('MONTH')
with self._option():
self._token('DAY')
with self._option():
self._token('HOUR')
with self._option():
self._token('MINUTE')
self._error('expecting one of: DAY HOUR MINUTE MONTH YEAR')
@graken()
def _privileges_(self):
with self._choice():
with self._option():
self._token('ALL')
self._token('PRIVILEGES')
with self._option():
self._action_list_()
self._error('expecting one of: ALL')
@graken()
def _action_list_(self):
def sep0():
self._token(',')
def block0():
self._action_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _action_(self):
with self._choice():
with self._option():
self._token('SELECT')
with self._option():
self._token('DELETE')
with self._option():
self._token('INSERT')
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
with self._option():
self._token('UPDATE')
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
with self._option():
self._token('REFERENCES')
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
with self._option():
self._token('USAGE')
self._error('expecting one of: DELETE INSERT REFERENCES SELECT UPDATE USAGE')
@graken()
def _grantee_(self):
with self._choice():
with self._option():
self._token('PUBLIC')
with self._option():
self._identifier_()
self._error('expecting one of: PUBLIC')
@graken()
def _collate_clause_(self):
self._token('COLLATE')
self._schema_qualified_name_()
@graken()
def _constraint_name_definition_(self):
self._token('CONSTRAINT')
self._schema_qualified_name_()
@graken()
def _constraint_attributes_(self):
with self._choice():
with self._option():
self._constraint_check_time_()
with self._optional():
with self._optional():
self._token('NOT')
self._token('DEFERRABLE')
with self._option():
with self._optional():
self._token('NOT')
self._token('DEFERRABLE')
with self._optional():
self._constraint_check_time_()
self._error('expecting one of: DEFERRABLE NOT')
@graken()
def _constraint_check_time_(self):
with self._choice():
with self._option():
self._token('INITIALLY')
self._token('DEFERRED')
with self._option():
self._token('INITIALLY')
self._token('IMMEDIATE')
self._error('expecting one of: INITIALLY')
@graken()
def _schema_definition_(self):
self._token('CREATE')
self._token('SCHEMA')
self._schema_name_clause_()
with self._optional():
self._schema_character_set_specification_()
with self._optional():
def block0():
self._schema_element_()
self._positive_closure(block0)
@graken()
def _schema_name_clause_(self):
with self._choice():
with self._option():
self._schema_name_()
with self._option():
self._token('AUTHORIZATION')
self._identifier_()
with self._option():
self._schema_name_()
self._token('AUTHORIZATION')
self._identifier_()
self._error('no available options')
@graken()
def _schema_character_set_specification_(self):
self._token('DEFAULT')
self._token('CHARACTER')
self._token('SET')
self._character_set_name_()
@graken()
def _schema_element_(self):
with self._choice():
with self._option():
self._domain_definition_()
with self._option():
self._table_definition_()
with self._option():
self._view_definition_()
with self._option():
self._grant_statement_()
with self._option():
self._assertion_definition_()
with self._option():
self._character_set_definition_()
with self._option():
self._collation_definition_()
with self._option():
self._translation_definition_()
self._error('no available options')
@graken()
def _drop_schema_statement_(self):
self._token('DROP')
self._token('SCHEMA')
self._schema_name_()
self._drop_behavior_()
@graken()
def _drop_behavior_(self):
with self._choice():
with self._option():
self._token('CASCADE')
with self._option():
self._token('RESTRICT')
self._error('expecting one of: CASCADE RESTRICT')
@graken()
def _table_definition_(self):
self._token('CREATE')
with self._optional():
with self._group():
with self._choice():
with self._option():
self._token('GLOBAL')
with self._option():
self._token('LOCAL')
self._error('expecting one of: GLOBAL LOCAL')
self._token('TEMPORARY')
self._token('TABLE')
self._table_name_()
self._table_element_list_()
with self._optional():
self._token('ON')
self._token('COMMIT')
with self._group():
with self._choice():
with self._option():
self._token('DELETE')
with self._option():
self._token('PRESERVE')
self._error('expecting one of: DELETE PRESERVE')
self._token('ROWS')
@graken()
def _table_element_list_(self):
self._left_paren_()
def sep0():
self._token(',')
def block0():
self._table_element_()
self._positive_closure(block0, prefix=sep0)
self._right_paren_()
@graken()
def _table_element_(self):
with self._choice():
with self._option():
self._column_definition_()
with self._option():
self._table_constraint_definition_()
self._error('no available options')
@graken()
def _column_definition_(self):
self._identifier_()
with self._group():
with self._choice():
with self._option():
self._data_type_()
with self._option():
self._schema_qualified_name_()
self._error('no available options')
with self._optional():
self._default_clause_()
with self._optional():
def block1():
self._column_constraint_definition_()
self._positive_closure(block1)
with self._optional():
self._collate_clause_()
@graken()
def _column_constraint_definition_(self):
with self._optional():
self._constraint_name_definition_()
self._column_constraint_()
with self._optional():
self._constraint_attributes_()
@graken()
def _column_constraint_(self):
with self._choice():
with self._option():
self._token('NOT')
self._token('NULL')
with self._option():
self._unique_specification_()
with self._option():
self._references_specification_()
with self._option():
self._check_constraint_definition_()
self._error('expecting one of: NOT')
@graken()
def _default_clause_(self):
self._token('DEFAULT')
self._default_option_()
@graken()
def _default_option_(self):
with self._choice():
with self._option():
self._literal_()
with self._option():
self._datetime_value_function_()
with self._option():
self._token('USER')
with self._option():
self._token('CURRENT_USER')
with self._option():
self._token('SESSION_USER')
with self._option():
self._token('SYSTEM_USER')
with self._option():
self._token('NULL')
self._error('expecting one of: CURRENT_USER NULL SESSION_USER SYSTEM_USER USER')
@graken()
def _table_constraint_definition_(self):
with self._optional():
self._constraint_name_definition_()
self._table_constraint_()
with self._optional():
self._constraint_attributes_()
@graken()
def _table_constraint_(self):
with self._choice():
with self._option():
self._unique_constraint_definition_()
with self._option():
self._referential_constraint_definition_()
with self._option():
self._check_constraint_definition_()
self._error('no available options')
@graken()
def _unique_constraint_definition_(self):
self._unique_specification_()
self._left_paren_()
self._column_name_list_()
self._right_paren_()
@graken()
def _unique_specification_(self):
with self._choice():
with self._option():
self._token('UNIQUE')
with self._option():
self._token('PRIMARY')
self._token('KEY')
self._error('expecting one of: PRIMARY UNIQUE')
@graken()
def _referential_constraint_definition_(self):
self._token('FOREIGN')
self._token('KEY')
self._left_paren_()
self._column_name_list_()
self._right_paren_()
self._references_specification_()
@graken()
def _references_specification_(self):
self._token('REFERENCES')
self._referenced_table_and_columns_()
with self._optional():
self._token('MATCH')
self._match_type_()
with self._optional():
self._referential_triggered_action_()
@graken()
def _match_type_(self):
with self._choice():
with self._option():
self._token('FULL')
with self._option():
self._token('PARTIAL')
self._error('expecting one of: FULL PARTIAL')
@graken()
def _referenced_table_and_columns_(self):
self._table_name_()
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
@graken()
def _referential_triggered_action_(self):
with self._choice():
with self._option():
self._update_rule_()
with self._optional():
self._delete_rule_()
with self._option():
self._delete_rule_()
with self._optional():
self._update_rule_()
self._error('no available options')
@graken()
def _update_rule_(self):
self._token('ON')
self._token('UPDATE')
self._referential_action_()
@graken()
def _delete_rule_(self):
self._token('ON')
self._token('DELETE')
self._referential_action_()
@graken()
def _referential_action_(self):
with self._choice():
with self._option():
self._token('CASCADE')
with self._option():
self._token('SET')
self._token('NULL')
with self._option():
self._token('SET')
self._token('DEFAULT')
with self._option():
self._token('NO')
self._token('ACTION')
self._error('expecting one of: CASCADE NO SET')
@graken()
def _check_constraint_definition_(self):
self._token('CHECK')
self._left_paren_()
self._search_condition_()
self._right_paren_()
@graken()
def _alter_table_statement_(self):
self._token('ALTER')
self._token('TABLE')
self._table_name_()
self._alter_table_action_()
@graken()
def _alter_table_action_(self):
with self._choice():
with self._option():
self._add_column_definition_()
with self._option():
self._alter_column_definition_()
with self._option():
self._drop_column_definition_()
with self._option():
self._add_table_constraint_definition_()
with self._option():
self._drop_table_constraint_definition_()
self._error('no available options')
@graken()
def _add_column_definition_(self):
self._token('ADD')
with self._optional():
self._token('COLUMN')
self._column_definition_()
@graken()
def _alter_column_definition_(self):
self._token('ALTER')
with self._optional():
self._token('COLUMN')
self._identifier_()
self._alter_column_action_()
@graken()
def _alter_column_action_(self):
with self._choice():
with self._option():
self._set_column_default_clause_()
with self._option():
self._drop_column_default_clause_()
self._error('no available options')
@graken()
def _set_column_default_clause_(self):
self._token('SET')
self._default_clause_()
@graken()
def _drop_column_default_clause_(self):
self._token('DROP')
self._token('DEFAULT')
@graken()
def _drop_column_definition_(self):
self._token('DROP')
with self._optional():
self._token('COLUMN')
self._identifier_()
self._drop_behavior_()
@graken()
def _add_table_constraint_definition_(self):
self._token('ADD')
self._table_constraint_definition_()
@graken()
def _drop_table_constraint_definition_(self):
self._token('DROP')
self._token('CONSTRAINT')
self._schema_qualified_name_()
self._drop_behavior_()
@graken()
def _drop_table_statement_(self):
self._token('DROP')
self._token('TABLE')
self._table_name_()
self._drop_behavior_()
@graken()
def _view_definition_(self):
self._token('CREATE')
self._token('VIEW')
self._table_name_()
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
self._token('AS')
self._query_expression_()
with self._optional():
self._token('WITH')
with self._optional():
self._levels_clause_()
self._token('CHECK')
self._token('OPTION')
@graken()
def _levels_clause_(self):
with self._choice():
with self._option():
self._token('CASCADED')
with self._option():
self._token('LOCAL')
self._error('expecting one of: CASCADED LOCAL')
@graken()
def _drop_view_statement_(self):
self._token('DROP')
self._token('VIEW')
self._table_name_()
self._drop_behavior_()
@graken()
def _domain_definition_(self):
self._token('CREATE')
self._token('DOMAIN')
self._schema_qualified_name_()
with self._optional():
self._token('AS')
self._data_type_()
with self._optional():
self._default_clause_()
with self._optional():
def block0():
self._domain_constraint_()
self._positive_closure(block0)
with self._optional():
self._collate_clause_()
@graken()
def _domain_constraint_(self):
with self._optional():
self._constraint_name_definition_()
self._check_constraint_definition_()
with self._optional():
self._constraint_attributes_()
@graken()
def _alter_domain_statement_(self):
self._token('ALTER')
self._token('DOMAIN')
self._schema_qualified_name_()
self._alter_domain_action_()
@graken()
def _alter_domain_action_(self):
with self._choice():
with self._option():
self._set_domain_default_clause_()
with self._option():
self._drop_domain_default_clause_()
with self._option():
self._add_domain_constraint_definition_()
with self._option():
self._drop_domain_constraint_definition_()
self._error('no available options')
@graken()
def _set_domain_default_clause_(self):
self._token('SET')
self._default_clause_()
@graken()
def _drop_domain_default_clause_(self):
self._token('DROP')
self._token('DEFAULT')
@graken()
def _add_domain_constraint_definition_(self):
self._token('ADD')
self._domain_constraint_()
@graken()
def _drop_domain_constraint_definition_(self):
self._token('DROP')
self._token('CONSTRAINT')
self._schema_qualified_name_()
@graken()
def _drop_domain_statement_(self):
self._token('DROP')
self._token('DOMAIN')
self._schema_qualified_name_()
self._drop_behavior_()
@graken()
def _character_set_definition_(self):
self._token('CREATE')
self._token('CHARACTER')
self._token('SET')
self._character_set_name_()
with self._optional():
self._token('AS')
self._character_set_source_()
with self._optional():
with self._choice():
with self._option():
self._collate_clause_()
with self._option():
self._limited_collation_definition_()
self._error('no available options')
@graken()
def _character_set_source_(self):
self._token('GET')
self._character_set_name_()
@graken()
def _limited_collation_definition_(self):
self._token('COLLATION')
self._token('FROM')
self._collation_source_()
@graken()
def _drop_character_set_statement_(self):
self._token('DROP')
self._token('CHARACTER')
self._token('SET')
self._character_set_name_()
@graken()
def _collation_definition_(self):
self._token('CREATE')
self._token('COLLATION')
self._schema_qualified_name_()
self._token('FOR')
self._character_set_name_()
self._token('FROM')
self._collation_source_()
with self._optional():
self._pad_attribute_()
@graken()
def _pad_attribute_(self):
with self._choice():
with self._option():
self._token('NO')
self._token('PAD')
with self._option():
self._token('PAD')
self._token('SPACE')
self._error('expecting one of: NO PAD')
@graken()
def _collation_source_(self):
with self._choice():
with self._option():
self._collating_sequence_definition_()
with self._option():
self._translation_collation_()
self._error('no available options')
@graken()
def _collating_sequence_definition_(self):
with self._choice():
with self._option():
self._external_collation_()
with self._option():
self._schema_qualified_name_()
with self._option():
self._token('DESC')
self._left_paren_()
self._schema_qualified_name_()
self._right_paren_()
with self._option():
self._token('DEFAULT')
self._error('expecting one of: DEFAULT')
@graken()
def _translation_collation_(self):
self._token('TRANSLATION')
self._schema_qualified_name_()
with self._optional():
self._token('THEN')
self._token('COLLATION')
self._schema_qualified_name_()
@graken()
def _external_collation_(self):
self._token('EXTERNAL')
self._left_paren_()
self._quote_()
self._schema_qualified_name_()
self._quote_()
self._right_paren_()
@graken()
def _drop_collation_statement_(self):
self._token('DROP')
self._token('COLLATION')
self._schema_qualified_name_()
@graken()
def _translation_definition_(self):
self._token('CREATE')
self._token('TRANSLATION')
self._schema_qualified_name_()
self._token('FOR')
self._character_set_name_()
self._token('TO')
self._character_set_name_()
self._token('FROM')
self._translation_specification_()
@graken()
def _translation_specification_(self):
with self._choice():
with self._option():
self._external_translation_()
with self._option():
self._token('IDENTITY')
with self._option():
self._schema_qualified_name_()
self._error('expecting one of: IDENTITY')
@graken()
def _external_translation_(self):
self._token('EXTERNAL')
self._left_paren_()
self._quote_()
self._schema_qualified_name_()
self._quote_()
self._right_paren_()
@graken()
def _drop_translation_statement_(self):
self._token('DROP')
self._token('TRANSLATION')
self._schema_qualified_name_()
@graken()
def _assertion_definition_(self):
self._token('CREATE')
self._token('ASSERTION')
self._schema_qualified_name_()
self._assertion_check_()
with self._optional():
self._constraint_attributes_()
@graken()
def _assertion_check_(self):
self._token('CHECK')
self._left_paren_()
self._search_condition_()
self._right_paren_()
@graken()
def _drop_assertion_statement_(self):
self._token('DROP')
self._token('ASSERTION')
self._schema_qualified_name_()
@graken()
def _grant_statement_(self):
self._token('GRANT')
self._privileges_()
self._token('ON')
self._object_name_()
self._token('TO')
def sep0():
self._token(',')
def block0():
self._grantee_()
self._positive_closure(block0, prefix=sep0)
with self._optional():
self._token('WITH')
self._token('GRANT')
self._token('OPTION')
@graken()
def _object_name_(self):
with self._choice():
with self._option():
with self._optional():
self._token('TABLE')
self._table_name_()
with self._option():
self._token('DOMAIN')
self._schema_qualified_name_()
with self._option():
self._token('COLLATION')
self._schema_qualified_name_()
with self._option():
self._token('CHARACTER')
self._token('SET')
self._character_set_name_()
with self._option():
self._token('TRANSLATION')
self._schema_qualified_name_()
self._error('no available options')
@graken()
def _revoke_statement_(self):
self._token('REVOKE')
with self._optional():
self._token('GRANT')
self._token('OPTION')
self._token('FOR')
self._privileges_()
self._token('ON')
self._object_name_()
self._token('FROM')
def sep0():
self._token(',')
def block0():
self._grantee_()
self._positive_closure(block0, prefix=sep0)
self._drop_behavior_()
@graken()
def _sql_schema_statement_(self):
with self._choice():
with self._option():
self._sql_schema_definition_statement_()
with self._option():
self._sql_schema_manipulation_statement_()
self._error('no available options')
@graken()
def _sql_schema_definition_statement_(self):
with self._choice():
with self._option():
self._schema_definition_()
with self._option():
self._table_definition_()
with self._option():
self._view_definition_()
with self._option():
self._grant_statement_()
with self._option():
self._domain_definition_()
with self._option():
self._character_set_definition_()
with self._option():
self._collation_definition_()
with self._option():
self._translation_definition_()
with self._option():
self._assertion_definition_()
self._error('no available options')
@graken()
def _sql_schema_manipulation_statement_(self):
with self._choice():
with self._option():
self._drop_schema_statement_()
with self._option():
self._alter_table_statement_()
with self._option():
self._drop_table_statement_()
with self._option():
self._drop_view_statement_()
with self._option():
self._revoke_statement_()
with self._option():
self._alter_domain_statement_()
with self._option():
self._drop_domain_statement_()
with self._option():
self._drop_character_set_statement_()
with self._option():
self._drop_collation_statement_()
with self._option():
self._drop_translation_statement_()
with self._option():
self._drop_assertion_statement_()
self._error('no available options')
@graken()
def _sql_transaction_statement_(self):
with self._choice():
with self._option():
self._set_transaction_statement_()
with self._option():
self._set_constraints_mode_statement_()
with self._option():
self._commit_statement_()
with self._option():
self._rollback_statement_()
self._error('no available options')
@graken()
def _sql_connection_statement_(self):
with self._choice():
with self._option():
self._connect_statement_()
with self._option():
self._set_connection_statement_()
with self._option():
self._disconnect_statement_()
self._error('no available options')
@graken()
def _sql_session_statement_(self):
with self._choice():
with self._option():
self._set_catalog_statement_()
with self._option():
self._set_schema_statement_()
with self._option():
self._set_names_statement_()
with self._option():
self._set_session_authorization_identifier_statement_()
with self._option():
self._set_local_time_zone_statement_()
self._error('no available options')
@graken()
def _order_by_clause_(self):
self._token('ORDER')
self._token('BY')
self._sort_specification_list_()
@graken()
def _sort_specification_list_(self):
def sep0():
self._token(',')
def block0():
self._sort_specification_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _sort_specification_(self):
self._sort_key_()
with self._optional():
self._collate_clause_()
with self._optional():
self._ordering_specification_()
@graken()
def _sort_key_(self):
with self._choice():
with self._option():
self._identifier_()
with self._option():
self._unsigned_integer_()
self._error('no available options')
@graken()
def _ordering_specification_(self):
with self._choice():
with self._option():
self._token('ASC')
with self._option():
self._token('DESC')
self._error('expecting one of: ASC DESC')
@graken()
def _delete_statement_searched_(self):
self._token('DELETE')
self._token('FROM')
self._table_name_()
with self._optional():
self._token('WHERE')
self._search_condition_()
@graken()
def _insert_statement_(self):
self._token('INSERT')
self._token('INTO')
self._table_name_()
self._insert_columns_and_source_()
@graken()
def _insert_columns_and_source_(self):
with self._choice():
with self._option():
with self._optional():
self._left_paren_()
self._column_name_list_()
self._right_paren_()
self._query_expression_()
with self._option():
self._token('DEFAULT')
self._token('VALUES')
self._error('expecting one of: DEFAULT')
@graken()
def _set_clause_list_(self):
def sep0():
self._token(',')
def block0():
self._set_clause_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _set_clause_(self):
self._identifier_()
self._equals_operator_()
self._update_source_()
@graken()
def _update_source_(self):
with self._choice():
with self._option():
self._value_expression_()
with self._option():
self._token('NULL')
with self._option():
self._token('DEFAULT')
self._error('expecting one of: DEFAULT NULL')
@graken()
def _update_statement_searched_(self):
self._token('UPDATE')
self._table_name_()
self._token('SET')
self._set_clause_list_()
with self._optional():
self._token('WHERE')
self._search_condition_()
@graken()
def _temporary_table_declaration_(self):
self._token('DECLARE')
self._token('LOCAL')
self._token('TEMPORARY')
self._token('TABLE')
self._qualified_local_table_name_()
self._table_element_list_()
with self._optional():
self._token('ON')
self._token('COMMIT')
with self._group():
with self._choice():
with self._option():
self._token('PRESERVE')
with self._option():
self._token('DELETE')
self._error('expecting one of: DELETE PRESERVE')
self._token('ROWS')
@graken()
def _set_transaction_statement_(self):
self._token('SET')
self._token('TRANSACTION')
def sep0():
self._token(',')
def block0():
self._transaction_mode_()
self._positive_closure(block0, prefix=sep0)
@graken()
def _transaction_mode_(self):
with self._choice():
with self._option():
self._isolation_level_()
with self._option():
self._transaction_access_mode_()
with self._option():
self._diagnostics_size_()
self._error('no available options')
@graken()
def _transaction_access_mode_(self):
with self._choice():
with self._option():
self._token('READ')
self._token('ONLY')
with self._option():
self._token('READ')
self._token('WRITE')
self._error('expecting one of: READ')
@graken()
def _isolation_level_(self):
self._token('ISOLATION')
self._token('LEVEL')
self._level_of_isolation_()
@graken()
def _level_of_isolation_(self):
with self._choice():
with self._option():
self._token('READ')
self._token('UNCOMMITTED')
with self._option():
self._token('READ')
self._token('COMMITTED')
with self._option():
self._token('REPEATABLE')
self._token('READ')
with self._option():
self._token('SERIALIZABLE')
self._error('expecting one of: READ REPEATABLE SERIALIZABLE')
@graken()
def _diagnostics_size_(self):
self._token('DIAGNOSTICS')
self._token('SIZE')
self._simple_value_specification_()
@graken()
def _set_constraints_mode_statement_(self):
self._token('SET')
self._token('CONSTRAINTS')
self._constraint_name_list_()
with self._group():
with self._choice():
with self._option():
self._token('DEFERRED')
with self._option():
self._token('IMMEDIATE')
self._error('expecting one of: DEFERRED IMMEDIATE')
@graken()
def _constraint_name_list_(self):
with self._choice():
with self._option():
self._token('ALL')
with self._option():
def sep0():
self._token(',')
def block0():
self._schema_qualified_name_()
self._positive_closure(block0, prefix=sep0)
self._error('expecting one of: ALL')
@graken()
def _commit_statement_(self):
self._token('COMMIT')
with self._optional():
self._token('WORK')
@graken()
def _rollback_statement_(self):
self._token('ROLLBACK')
with self._optional():
self._token('WORK')
@graken()
def _connect_statement_(self):
self._token('CONNECT')
self._token('TO')
self._connection_target_()
@graken()
def _connection_target_(self):
with self._choice():
with self._option():
self._simple_value_specification_()
with self._optional():
self._token('AS')
self._connection_name_()
with self._optional():
self._token('USER')
self._simple_value_specification_()
with self._option():
self._token('DEFAULT')
self._error('expecting one of: DEFAULT')
@graken()
def _set_connection_statement_(self):
self._token('SET')
self._token('CONNECTION')
self._connection_object_()
@graken()
def _connection_object_(self):
with self._choice():
with self._option():
self._token('DEFAULT')
with self._option():
self._connection_name_()
self._error('expecting one of: DEFAULT')
@graken()
def _disconnect_statement_(self):
self._token('DISCONNECT')
self._disconnect_object_()
@graken()
def _disconnect_object_(self):
with self._choice():
with self._option():
self._connection_object_()
with self._option():
self._token('ALL')
with self._option():
self._token('CURRENT')
self._error('expecting one of: ALL CURRENT')
@graken()
def _set_catalog_statement_(self):
self._token('SET')
self._token('CATALOG')
self._value_specification_()
@graken()
def _set_schema_statement_(self):
self._token('SET')
self._token('SCHEMA')
self._value_specification_()
@graken()
def _set_names_statement_(self):
self._token('SET')
self._token('NAMES')
self._value_specification_()
@graken()
def _set_session_authorization_identifier_statement_(self):
self._token('SET')
self._token('SESSION')
self._token('AUTHORIZATION')
self._value_specification_()
@graken()
def _set_local_time_zone_statement_(self):
self._token('SET')
self._token('TIME')
self._token('ZONE')
self._set_time_zone_value_()
@graken()
def _set_time_zone_value_(self):
with self._choice():
with self._option():
self._interval_value_expression_()
with self._option():
self._token('LOCAL')
self._error('expecting one of: LOCAL')
@graken()
def _direct_sql_statement_(self):
self._directly_executable_statement_()
self._semicolon_()
@graken()
def _directly_executable_statement_(self):
with self._choice():
with self._option():
self._direct_sql_data_statement_()
with self._option():
self._sql_schema_statement_()
with self._option():
self._sql_transaction_statement_()
with self._option():
self._sql_connection_statement_()
with self._option():
self._sql_session_statement_()
self._error('no available options')
@graken()
def _direct_sql_data_statement_(self):
with self._choice():
with self._option():
self._delete_statement_searched_()
with self._option():
self._direct_select_statement_multiple_rows_()
with self._option():
self._insert_statement_()
with self._option():
self._update_statement_searched_()
with self._option():
self._temporary_table_declaration_()
self._error('no available options')
@graken()
def _direct_select_statement_multiple_rows_(self):
self._query_expression_()
with self._optional():
self._order_by_clause_()
@graken()
def _start_(self):
self._direct_sql_statement_()
self._check_eof()
class SqlSemantics(object):
def digit(self, ast):
return ast
def double_quote(self, ast):
return ast
def quote(self, ast):
return ast
def left_paren(self, ast):
return ast
def right_paren(self, ast):
return ast
def asterisk(self, ast):
return ast
def plus_sign(self, ast):
return ast
def comma(self, ast):
return ast
def minus_sign(self, ast):
return ast
def period(self, ast):
return ast
def solidus(self, ast):
return ast
def colon(self, ast):
return ast
def semicolon(self, ast):
return ast
def less_than_operator(self, ast):
return ast
def equals_operator(self, ast):
return ast
def greater_than_operator(self, ast):
return ast
def question_mark(self, ast):
return ast
def underscore(self, ast):
return ast
def regular_identifier(self, ast):
return ast
def delimited_identifier(self, ast):
return ast
def delimited_identifier_body(self, ast):
return ast
def not_equals_operator(self, ast):
return ast
def greater_than_or_equals_operator(self, ast):
return ast
def less_than_or_equals_operator(self, ast):
return ast
def concatenation_operator(self, ast):
return ast
def literal(self, ast):
return ast
def unsigned_literal(self, ast):
return ast
def general_literal(self, ast):
return ast
def character_string_literal(self, ast):
return ast
def character_representation(self, ast):
return ast
def national_character_string_literal(self, ast):
return ast
def bit_string_literal(self, ast):
return ast
def hex_string_literal(self, ast):
return ast
def bit(self, ast):
return ast
def hexit(self, ast):
return ast
def signed_numeric_literal(self, ast):
return ast
def unsigned_numeric_literal(self, ast):
return ast
def exact_numeric_literal(self, ast):
return ast
def sign(self, ast):
return ast
def approximate_numeric_literal(self, ast):
return ast
def signed_integer(self, ast):
return ast
def unsigned_integer(self, ast):
return ast
def datetime_literal(self, ast):
return ast
def date_literal(self, ast):
return ast
def time_literal(self, ast):
return ast
def timestamp_literal(self, ast):
return ast
def date_string(self, ast):
return ast
def time_string(self, ast):
return ast
def timestamp_string(self, ast):
return ast
def time_zone_interval(self, ast):
return ast
def date_value(self, ast):
return ast
def time_value(self, ast):
return ast
def interval_literal(self, ast):
return ast
def interval_string(self, ast):
return ast
def year_month_literal(self, ast):
return ast
def day_time_literal(self, ast):
return ast
def day_time_interval(self, ast):
return ast
def time_interval(self, ast):
return ast
def years_value(self, ast):
return ast
def months_value(self, ast):
return ast
def days_value(self, ast):
return ast
def hours_value(self, ast):
return ast
def minutes_value(self, ast):
return ast
def seconds_value(self, ast):
return ast
def datetime_value(self, ast):
return ast
def identifier(self, ast):
return ast
def identifier_list(self, ast):
return ast
def actual_identifier(self, ast):
return ast
def table_name(self, ast):
return ast
def qualified_local_table_name(self, ast):
return ast
def schema_name(self, ast):
return ast
def schema_qualified_name(self, ast):
return ast
def parameter_name(self, ast):
return ast
def character_set_name(self, ast):
return ast
def connection_name(self, ast):
return ast
def data_type(self, ast):
return ast
def character_string_type(self, ast):
return ast
def national_character_string_type(self, ast):
return ast
def bit_string_type(self, ast):
return ast
def numeric_type(self, ast):
return ast
def exact_numeric_type(self, ast):
return ast
def approximate_numeric_type(self, ast):
return ast
def length(self, ast):
return ast
def precision(self, ast):
return ast
def scale(self, ast):
return ast
def datetime_type(self, ast):
return ast
def interval_type(self, ast):
return ast
def value_specification(self, ast):
return ast
def unsigned_value_specification(self, ast):
return ast
def general_value_specification(self, ast):
return ast
def simple_value_specification(self, ast):
return ast
def parameter_specification(self, ast):
return ast
def indicator_parameter(self, ast):
return ast
def table_reference(self, ast):
return ast
def column_name_list(self, ast):
return ast
def column_reference(self, ast):
return ast
def qualifier(self, ast):
return ast
def set_function_specification(self, ast):
return ast
def general_set_function(self, ast):
return ast
def set_function_type(self, ast):
return ast
def set_quantifier(self, ast):
return ast
def numeric_value_function(self, ast):
return ast
def position_expression(self, ast):
return ast
def length_expression(self, ast):
return ast
def char_length_expression(self, ast):
return ast
def octet_length_expression(self, ast):
return ast
def bit_length_expression(self, ast):
return ast
def extract_expression(self, ast):
return ast
def extract_field(self, ast):
return ast
def time_zone_field(self, ast):
return ast
def extract_source(self, ast):
return ast
def string_value_function(self, ast):
return ast
def character_value_function(self, ast):
return ast
def character_substring_function(self, ast):
return ast
def fold(self, ast):
return ast
def form_of_use_conversion(self, ast):
return ast
def character_translation(self, ast):
return ast
def trim_function(self, ast):
return ast
def trim_operands(self, ast):
return ast
def trim_specification(self, ast):
return ast
def bit_substring_function(self, ast):
return ast
def start_position(self, ast):
return ast
def string_length(self, ast):
return ast
def datetime_value_function(self, ast):
return ast
def current_time_value_function(self, ast):
return ast
def current_timestamp_value_function(self, ast):
return ast
def case_expression(self, ast):
return ast
def case_abbreviation(self, ast):
return ast
def case_specification(self, ast):
return ast
def simple_case(self, ast):
return ast
def searched_case(self, ast):
return ast
def simple_when_clause(self, ast):
return ast
def searched_when_clause(self, ast):
return ast
def else_clause(self, ast):
return ast
def result(self, ast):
return ast
def cast_specification(self, ast):
return ast
def cast_operand(self, ast):
return ast
def cast_target(self, ast):
return ast
def value_expression(self, ast):
return ast
def value_expression_primary(self, ast):
return ast
def numeric_value_expression(self, ast):
return ast
def term(self, ast):
return ast
def factor(self, ast):
return ast
def numeric_primary(self, ast):
return ast
def string_value_expression(self, ast):
return ast
def character_value_expression(self, ast):
return ast
def concatenation(self, ast):
return ast
def character_factor(self, ast):
return ast
def character_primary(self, ast):
return ast
def bit_value_expression(self, ast):
return ast
def bit_concatenation(self, ast):
return ast
def bit_factor(self, ast):
return ast
def bit_primary(self, ast):
return ast
def datetime_value_expression(self, ast):
return ast
def datetime_term(self, ast):
return ast
def datetime_factor(self, ast):
return ast
def datetime_primary(self, ast):
return ast
def time_zone(self, ast):
return ast
def time_zone_specifier(self, ast):
return ast
def interval_value_expression(self, ast):
return ast
def interval_term(self, ast):
return ast
def interval_factor(self, ast):
return ast
def interval_primary(self, ast):
return ast
def interval_value_expression_1(self, ast):
return ast
def interval_term_1(self, ast):
return ast
def interval_term_2(self, ast):
return ast
def row_value_constructor(self, ast):
return ast
def row_value_constructor_list(self, ast):
return ast
def row_value_constructor_element(self, ast):
return ast
def table_value_constructor(self, ast):
return ast
def table_value_constructor_list(self, ast):
return ast
def table_expression(self, ast):
return ast
def from_clause(self, ast):
return ast
def joined_table(self, ast):
return ast
def cross_join(self, ast):
return ast
def qualified_join(self, ast):
return ast
def join_specification(self, ast):
return ast
def join_condition(self, ast):
return ast
def named_columns_join(self, ast):
return ast
def join_type(self, ast):
return ast
def outer_join_type(self, ast):
return ast
def where_clause(self, ast):
return ast
def group_by_clause(self, ast):
return ast
def grouping_column_reference_list(self, ast):
return ast
def grouping_column_reference(self, ast):
return ast
def having_clause(self, ast):
return ast
def query_specification(self, ast):
return ast
def select_list(self, ast):
return ast
def select_sublist(self, ast):
return ast
def derived_column(self, ast):
return ast
def as_clause(self, ast):
return ast
def query_expression(self, ast):
return ast
def non_join_query_expression(self, ast):
return ast
def query_term(self, ast):
return ast
def non_join_query_term(self, ast):
return ast
def query_primary(self, ast):
return ast
def non_join_query_primary(self, ast):
return ast
def simple_table(self, ast):
return ast
def explicit_table(self, ast):
return ast
def corresponding_spec(self, ast):
return ast
def subquery(self, ast):
return ast
def predicate(self, ast):
return ast
def comparison_predicate(self, ast):
return ast
def comp_op(self, ast):
return ast
def between_predicate(self, ast):
return ast
def in_predicate(self, ast):
return ast
def in_predicate_value(self, ast):
return ast
def in_value_list(self, ast):
return ast
def like_predicate(self, ast):
return ast
def null_predicate(self, ast):
return ast
def quantified_comparison_predicate(self, ast):
return ast
def quantifier(self, ast):
return ast
def some(self, ast):
return ast
def exists_predicate(self, ast):
return ast
def unique_predicate(self, ast):
return ast
def match_predicate(self, ast):
return ast
def overlaps_predicate(self, ast):
return ast
def search_condition(self, ast):
return ast
def boolean_term(self, ast):
return ast
def boolean_factor(self, ast):
return ast
def boolean_test(self, ast):
return ast
def truth_value(self, ast):
return ast
def boolean_primary(self, ast):
return ast
def interval_qualifier(self, ast):
return ast
def start_field(self, ast):
return ast
def end_field(self, ast):
return ast
def single_datetime_field(self, ast):
return ast
def datetime_field(self, ast):
return ast
def non_second_datetime_field(self, ast):
return ast
def privileges(self, ast):
return ast
def action_list(self, ast):
return ast
def action(self, ast):
return ast
def grantee(self, ast):
return ast
def collate_clause(self, ast):
return ast
def constraint_name_definition(self, ast):
return ast
def constraint_attributes(self, ast):
return ast
def constraint_check_time(self, ast):
return ast
def schema_definition(self, ast):
return ast
def schema_name_clause(self, ast):
return ast
def schema_character_set_specification(self, ast):
return ast
def schema_element(self, ast):
return ast
def drop_schema_statement(self, ast):
return ast
def drop_behavior(self, ast):
return ast
def table_definition(self, ast):
return ast
def table_element_list(self, ast):
return ast
def table_element(self, ast):
return ast
def column_definition(self, ast):
return ast
def column_constraint_definition(self, ast):
return ast
def column_constraint(self, ast):
return ast
def default_clause(self, ast):
return ast
def default_option(self, ast):
return ast
def table_constraint_definition(self, ast):
return ast
def table_constraint(self, ast):
return ast
def unique_constraint_definition(self, ast):
return ast
def unique_specification(self, ast):
return ast
def referential_constraint_definition(self, ast):
return ast
def references_specification(self, ast):
return ast
def match_type(self, ast):
return ast
def referenced_table_and_columns(self, ast):
return ast
def referential_triggered_action(self, ast):
return ast
def update_rule(self, ast):
return ast
def delete_rule(self, ast):
return ast
def referential_action(self, ast):
return ast
def check_constraint_definition(self, ast):
return ast
def alter_table_statement(self, ast):
return ast
def alter_table_action(self, ast):
return ast
def add_column_definition(self, ast):
return ast
def alter_column_definition(self, ast):
return ast
def alter_column_action(self, ast):
return ast
def set_column_default_clause(self, ast):
return ast
def drop_column_default_clause(self, ast):
return ast
def drop_column_definition(self, ast):
return ast
def add_table_constraint_definition(self, ast):
return ast
def drop_table_constraint_definition(self, ast):
return ast
def drop_table_statement(self, ast):
return ast
def view_definition(self, ast):
return ast
def levels_clause(self, ast):
return ast
def drop_view_statement(self, ast):
return ast
def domain_definition(self, ast):
return ast
def domain_constraint(self, ast):
return ast
def alter_domain_statement(self, ast):
return ast
def alter_domain_action(self, ast):
return ast
def set_domain_default_clause(self, ast):
return ast
def drop_domain_default_clause(self, ast):
return ast
def add_domain_constraint_definition(self, ast):
return ast
def drop_domain_constraint_definition(self, ast):
return ast
def drop_domain_statement(self, ast):
return ast
def character_set_definition(self, ast):
return ast
def character_set_source(self, ast):
return ast
def limited_collation_definition(self, ast):
return ast
def drop_character_set_statement(self, ast):
return ast
def collation_definition(self, ast):
return ast
def pad_attribute(self, ast):
return ast
def collation_source(self, ast):
return ast
def collating_sequence_definition(self, ast):
return ast
def translation_collation(self, ast):
return ast
def external_collation(self, ast):
return ast
def drop_collation_statement(self, ast):
return ast
def translation_definition(self, ast):
return ast
def translation_specification(self, ast):
return ast
def external_translation(self, ast):
return ast
def drop_translation_statement(self, ast):
return ast
def assertion_definition(self, ast):
return ast
def assertion_check(self, ast):
return ast
def drop_assertion_statement(self, ast):
return ast
def grant_statement(self, ast):
return ast
def object_name(self, ast):
return ast
def revoke_statement(self, ast):
return ast
def sql_schema_statement(self, ast):
return ast
def sql_schema_definition_statement(self, ast):
return ast
def sql_schema_manipulation_statement(self, ast):
return ast
def sql_transaction_statement(self, ast):
return ast
def sql_connection_statement(self, ast):
return ast
def sql_session_statement(self, ast):
return ast
def order_by_clause(self, ast):
return ast
def sort_specification_list(self, ast):
return ast
def sort_specification(self, ast):
return ast
def sort_key(self, ast):
return ast
def ordering_specification(self, ast):
return ast
def delete_statement_searched(self, ast):
return ast
def insert_statement(self, ast):
return ast
def insert_columns_and_source(self, ast):
return ast
def set_clause_list(self, ast):
return ast
def set_clause(self, ast):
return ast
def update_source(self, ast):
return ast
def update_statement_searched(self, ast):
return ast
def temporary_table_declaration(self, ast):
return ast
def set_transaction_statement(self, ast):
return ast
def transaction_mode(self, ast):
return ast
def transaction_access_mode(self, ast):
return ast
def isolation_level(self, ast):
return ast
def level_of_isolation(self, ast):
return ast
def diagnostics_size(self, ast):
return ast
def set_constraints_mode_statement(self, ast):
return ast
def constraint_name_list(self, ast):
return ast
def commit_statement(self, ast):
return ast
def rollback_statement(self, ast):
return ast
def connect_statement(self, ast):
return ast
def connection_target(self, ast):
return ast
def set_connection_statement(self, ast):
return ast
def connection_object(self, ast):
return ast
def disconnect_statement(self, ast):
return ast
def disconnect_object(self, ast):
return ast
def set_catalog_statement(self, ast):
return ast
def set_schema_statement(self, ast):
return ast
def set_names_statement(self, ast):
return ast
def set_session_authorization_identifier_statement(self, ast):
return ast
def set_local_time_zone_statement(self, ast):
return ast
def set_time_zone_value(self, ast):
return ast
def direct_sql_statement(self, ast):
return ast
def directly_executable_statement(self, ast):
return ast
def direct_sql_data_statement(self, ast):
return ast
def direct_select_statement_multiple_rows(self, ast):
return ast
def start(self, ast):
return ast
def main(
filename,
startrule,
trace=False,
whitespace=None,
nameguard=None,
comments_re='/\\*[\\s\\S]*?\\*/',
eol_comments_re='--.*?$',
ignorecase=True,
left_recursion=True,
**kwargs):
with open(filename) as f:
text = f.read()
whitespace = whitespace or '\\s+'
parser = SqlParser(parseinfo=False)
ast = parser.parse(
text,
startrule,
filename=filename,
trace=trace,
whitespace=whitespace,
nameguard=nameguard,
ignorecase=ignorecase,
**kwargs)
return ast
if __name__ == '__main__':
import json
ast = generic_main(main, SqlParser, name='Sql')
print('AST:')
print(ast)
print()
print('JSON:')
print(json.dumps(ast, indent=2))
print()
| mit | 5,738,808,319,765,428,000 | 26.315351 | 93 | 0.502264 | false |
RyanSkraba/beam | .test-infra/junitxml_report.py | 1 | 2089 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Parses and extracts data from JUnitXML format files.
Example usage, comparing nosetests and pytest test collection:
$ cd sdks/python
$ rm *.xml
$ tox --recreate -e py27-gcp
$ tox --recreate -e py27-gcp-pytest
$ python3 ../../.test-infra/junitxml_report.py nosetests*.xml | sort -u > nosetests.out
$ python3 ../../.test-infra/junitxml_report.py pytest*.xml | sort -u > pytest.out
$ diff -u nosetests.out pytest.out | less
"""
import sys
import xml.etree.ElementTree as et
def print_testsuite(testsuite):
assert testsuite.tag == 'testsuite'
for testcase in testsuite:
assert testcase.tag == 'testcase'
attrib = testcase.attrib
status = ''
for child in testcase:
if child.tag == 'skipped':
assert status == ''
status = 'S'
elif child.tag in ['system-err', 'system-out']:
pass
else:
raise NotImplementedError('tag not supported: %s' % child.tag)
print('%s.%s %s' % (attrib['classname'], attrib['name'], status))
def process_xml(filename):
tree = et.parse(filename)
root = tree.getroot()
if root.tag == 'testsuites':
for testsuite in root:
print_testsuite(testsuite)
else:
print_testsuite(root)
def main():
for filename in sys.argv[1:]:
process_xml(filename)
if __name__ == '__main__':
main() | apache-2.0 | -5,577,910,759,703,215,000 | 30.19403 | 87 | 0.695548 | false |
procool/mygw | web/apps/admin/views.py | 1 | 1195 | import logging
import datetime
from sqlalchemy import func, and_, or_, not_
from flask import url_for, session
from misc.mixins import myTemplateView, JSONView
from utils.arp_list import get_mac_by_ip
from models.all_models import InetEther
from models.session import session
from utils.server.http_client import HTTPClient
from libs.pfctl import PFCtl
from auth import LoginRequiredMixin, LoginRequiredRedirectMixin
class adminView(LoginRequiredMixin, myTemplateView):
template='admin/admin-ajax.tpl'
class statusView(LoginRequiredMixin, myTemplateView):
template='admin/status-ajax.tpl'
class shutdownView(LoginRequiredMixin, JSONView):
__ctlsrv = HTTPClient(port=6999)
def get_context_data(self, **kwargs):
context = super(shutdownView, self).get_context_data(**kwargs)
cmd = self.__cmd == 'poweroff' and 'poweroff' or 'reboot'
r = self.__ctlsrv.call_handler('system/%s' % cmd)
context['result'] = r
return context
def dispatch(self, request, command, *args, **kwargs):
self.__cmd = command.lower()
return super(shutdownView, self).dispatch(self, request, *args, **kwargs)
| bsd-2-clause | 7,223,317,303,329,078,000 | 28.146341 | 81 | 0.706276 | false |
skipmodea1/plugin.video.xbmctorrent | resources/site-packages/xbmcswift2/listitem.py | 1 | 7446 | '''
xbmcswift2.listitem
------------------
This module contains the ListItem class, which acts as a wrapper
for xbmcgui.ListItem.
:copyright: (c) 2012 by Jonathan Beluch
:license: GPLv3, see LICENSE for more details.
'''
from xbmcswift2 import xbmcgui
class ListItem(object):
'''A wrapper for the xbmcgui.ListItem class. The class keeps track
of any set properties that xbmcgui doesn't expose getters for.
'''
def __init__(self, label=None, label2=None, icon=None, thumbnail=None,
path=None):
'''Defaults are an emtpy string since xbmcgui.ListItem will not
accept None.
'''
kwargs = {
'label': label,
'label2': label2,
'iconImage': icon,
'thumbnailImage': thumbnail,
'path': path,
}
#kwargs = dict((key, val) for key, val in locals().items() if val is
#not None and key != 'self')
kwargs = dict((key, val) for key, val in kwargs.items()
if val is not None)
self._listitem = xbmcgui.ListItem(**kwargs)
# xbmc doesn't make getters available for these properties so we'll
# keep track on our own
self._icon = icon
self._path = path
self._thumbnail = thumbnail
self._context_menu_items = []
self.is_folder = True
self._played = False
def __repr__(self):
return ("<ListItem '%s'>" % self.label).encode('utf-8')
def __str__(self):
return ('%s (%s)' % (self.label, self.path)).encode('utf-8')
def get_context_menu_items(self):
'''Returns the list of currently set context_menu items.'''
return self._context_menu_items
def add_context_menu_items(self, items, replace_items=False):
'''Adds context menu items. If replace_items is True all
previous context menu items will be removed.
'''
for label, action in items:
assert isinstance(label, basestring)
assert isinstance(action, basestring)
if replace_items:
self._context_menu_items = []
self._context_menu_items.extend(items)
self._listitem.addContextMenuItems(items, replace_items)
# dirty hack
self._listitem.setInfo("mediatype", "video")
def get_label(self):
'''Sets the listitem's label'''
return self._listitem.getLabel()
def set_label(self, label):
'''Returns the listitem's label'''
return self._listitem.setLabel(label)
label = property(get_label, set_label)
def get_label2(self):
'''Returns the listitem's label2'''
return self._listitem.getLabel2()
def set_label2(self, label):
'''Sets the listitem's label2'''
return self._listitem.setLabel2(label)
label2 = property(get_label2, set_label2)
def is_selected(self):
'''Returns True if the listitem is selected.'''
return self._listitem.isSelected()
def select(self, selected_status=True):
'''Sets the listitems selected status to the provided value.
Defaults to True.
'''
return self._listitem.select(selected_status)
selected = property(is_selected, select)
def set_info(self, type, info_labels):
'''Sets the listitems info'''
return self._listitem.setInfo(type, info_labels)
def get_property(self, key):
'''Returns the property associated with the given key'''
return self._listitem.getProperty(key)
def set_property(self, key, value):
'''Sets a property for the given key and value'''
return self._listitem.setProperty(key, value)
def add_stream_info(self, stream_type, stream_values):
'''Adds stream details'''
return self._listitem.addStreamInfo(stream_type, stream_values)
def get_icon(self):
'''Returns the listitem's icon image'''
return self._icon
def set_icon(self, icon):
'''Sets the listitem's icon image'''
self._icon = icon
return self._listitem.setIconImage(icon)
icon = property(get_icon, set_icon)
def get_thumbnail(self):
'''Returns the listitem's thumbnail image'''
return self._thumbnail
def set_thumbnail(self, thumbnail):
'''Sets the listitem's thumbnail image'''
self._thumbnail = thumbnail
return self._listitem.setThumbnailImage(thumbnail)
thumbnail = property(get_thumbnail, set_thumbnail)
def get_path(self):
'''Returns the listitem's path'''
return self._path
def set_path(self, path):
'''Sets the listitem's path'''
self._path = path
return self._listitem.setPath(path)
path = property(get_path, set_path)
def get_is_playable(self):
'''Returns True if the listitem is playable, False if it is a
directory
'''
return not self.is_folder
def set_is_playable(self, is_playable):
'''Sets the listitem's playable flag'''
value = 'false'
if is_playable:
value = 'true'
self.set_property('isPlayable', value)
self.is_folder = not is_playable
playable = property(get_is_playable, set_is_playable)
def set_played(self, was_played):
'''Sets the played status of the listitem. Used to
differentiate between a resolved video versus a playable item.
Has no effect on XBMC, it is strictly used for xbmcswift2.
'''
self._played = was_played
def get_played(self):
'''Returns True if the video was played.'''
return self._played
def as_tuple(self):
'''Returns a tuple of list item properties:
(path, the wrapped xbmcgui.ListItem, is_folder)
'''
return self.path, self._listitem, self.is_folder
def as_xbmc_listitem(self):
'''Returns the wrapped xbmcgui.ListItem'''
return self._listitem
@classmethod
def from_dict(cls, label=None, label2=None, icon=None, thumbnail=None,
path=None, selected=None, info=None, properties=None,
context_menu=None, replace_context_menu=False,
is_playable=None, info_type='video', stream_info=None):
'''A ListItem constructor for setting a lot of properties not
available in the regular __init__ method. Useful to collect all
the properties in a dict and then use the **dct to call this
method.
'''
listitem = cls(label, label2, icon, thumbnail, path)
if selected is not None:
listitem.select(selected)
if info:
listitem.set_info(info_type, info)
if is_playable:
listitem.set_is_playable(True)
listitem.set_info("video", {"mediatype": "video"})
if properties:
# Need to support existing tuples, but prefer to have a dict for
# properties.
if hasattr(properties, 'items'):
properties = properties.items()
for key, val in properties:
listitem.set_property(key, val)
if stream_info:
for stream_type, stream_values in stream_info.items():
listitem.add_stream_info(stream_type, stream_values)
if context_menu:
listitem.add_context_menu_items(context_menu, replace_context_menu)
return listitem
| gpl-3.0 | -4,435,313,053,114,563,600 | 32.241071 | 79 | 0.60274 | false |
markkorput/py2030 | tests/test_omxvideo.py | 1 | 7634 | #!/usr/bin/env python
import unittest
from py2030.components.omxvideo import OmxVideo
from py2030.event_manager import EventManager
class TestOmxVideo(unittest.TestCase):
def test_init(self):
omxvideo = OmxVideo()
self.assertEqual(omxvideo.player, None)
self.assertIsNone(omxvideo.event_manager)
def test_args_option(self):
# default; black background (to hide console) and disable OSD
self.assertEqual(OmxVideo().args, ['--no-osd', '-b'])
# customizable through 'args' option
args = ['--no-osd', '-adev', 'both', '-b', '--loop']
omxvideo = OmxVideo({'args': args})
self.assertEqual(omxvideo.args, args)
def test_setup(self):
omxvideo = OmxVideo()
em = EventManager()
omxvideo.setup(em)
self.assertEqual(omxvideo.event_manager, em)
def test_setup_doesnt_require_event_manager(self):
omxvideo = OmxVideo()
omxvideo.setup()
self.assertIsNone(omxvideo.event_manager)
def test_input_event_play(self):
omxvideo = OmxVideo({'input_events': {'play_event': 'play'}})
em = EventManager()
self.assertEqual(len(em.get('play_event')), 0) # no yet registered
omxvideo.setup(em)
self.assertEqual(len(em.get('play_event')), 1) # registered
self.assertEqual(omxvideo.playEvent._fireCount, 0) # not fire
omxvideo.event_manager.fire('play_event')
self.assertEqual(omxvideo.playEvent._fireCount, 1) # fired
omxvideo.destroy()
self.assertEqual(len(em.get('play_event')), 0) # unregistered
def test_input_event_pause(self):
omxvideo = OmxVideo({'input_events': {'pause_event': 'pause'}})
em = EventManager()
self.assertEqual(len(em.get('pause_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('pause_event')), 1) # registered
self.assertEqual(omxvideo.pauseEvent._fireCount, 0)
omxvideo.event_manager.fire('pause_event')
self.assertEqual(omxvideo.pauseEvent._fireCount, 1)
omxvideo.destroy()
self.assertEqual(len(em.get('pause_event')), 0) # unregistered
def test_input_event_toggle(self):
omxvideo = OmxVideo({'input_events': {'toggle_event': 'toggle'}})
em = EventManager()
self.assertEqual(len(em.get('toggle_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('toggle_event')), 1) # registered
self.assertEqual(omxvideo.toggleEvent._fireCount, 0)
omxvideo.event_manager.fire('toggle_event')
self.assertEqual(omxvideo.toggleEvent._fireCount, 1)
omxvideo.destroy()
self.assertEqual(len(em.get('toggle_event')), 0) # unregistered
def test_input_event_stop(self):
omxvideo = OmxVideo({'input_events': {'stop_event': 'stop'}})
em = EventManager()
self.assertEqual(len(em.get('stop_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('stop_event')), 1) # registered
self.assertEqual(omxvideo.stopEvent._fireCount, 0)
omxvideo.event_manager.fire('stop_event')
self.assertEqual(omxvideo.stopEvent._fireCount, 1)
omxvideo.destroy()
self.assertEqual(len(em.get('stop_event')), 0) # unregistered
def test_input_event_start(self):
omxvideo = OmxVideo({'input_events': {'start_event': 'start'}})
em = EventManager()
self.assertEqual(len(em.get('start_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('start_event')), 1) # registered
self.assertEqual(omxvideo.startEvent._fireCount, 0)
omxvideo.event_manager.fire('start_event') # fire without params
self.assertEqual(omxvideo.startEvent._fireCount, 1) # performed
omxvideo.event_manager.get('start_event').fire(3) # fire with number param
self.assertEqual(omxvideo.startEvent._fireCount, 2) # performed again
omxvideo.destroy()
self.assertEqual(len(em.get('start_event')), 0) # unregistered
def test_input_event_load(self):
omxvideo = OmxVideo({'input_events': {'load_event': 'load'}, 'playlist': ['1', '2', '3', '4']})
em = EventManager()
self.assertEqual(len(em.get('load_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('load_event')), 1) # registered
self.assertEqual(omxvideo.loadEvent._fireCount, 0)
omxvideo.event_manager.fire('load_event') # fire without params
self.assertEqual(omxvideo.loadEvent._fireCount, 1) # performed
omxvideo.event_manager.get('load_event').fire(3) # fire with number param
self.assertEqual(omxvideo.loadEvent._fireCount, 2) # performed again
omxvideo.destroy()
self.assertEqual(len(em.get('load_event')), 0) # unregistered
def test_input_event_seek(self):
omxvideo = OmxVideo({'input_events': {'seek_event': 'seek'}})
em = EventManager()
self.assertEqual(len(em.get('seek_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('seek_event')), 1) # registered
self.assertEqual(omxvideo.seekEvent._fireCount, 0)
omxvideo.event_manager.fire('seek_event') # fire without params
self.assertEqual(omxvideo.seekEvent._fireCount, 1) # performed
omxvideo.event_manager.get('seek_event').fire(3) # fire with number param
self.assertEqual(omxvideo.seekEvent._fireCount, 2) # performed again
omxvideo.destroy()
self.assertEqual(len(em.get('seek_event')), 0) # unregistered
def test_multiple_input_events(self):
input_events = {
'play_event1': 'play',
'play_event2': 'play',
'pause_event1': 'pause',
'pause_event2': 'pause',
'toggle_event1': 'toggle',
'toggle_event2': 'toggle',
'stop_event1': 'stop',
'stop_event2': 'stop',
'load_event1': 'load',
'load_event2': 'load',
'start_event1': 'start',
'start_event2': 'start',
'seek_event1': 'seek',
'seek_event2': 'seek'
}
omxvideo = OmxVideo({'input_events': input_events, 'playlist': ['1']})
em = EventManager()
for name in input_events.keys():
self.assertEqual(len(em.get(name)), 0) # not yet registered
omxvideo.setup(em)
for name in input_events.keys():
self.assertEqual(len(em.get(name)), 1) # registered
self.assertEqual(omxvideo.playEvent._fireCount, 0)
self.assertEqual(omxvideo.pauseEvent._fireCount, 0)
self.assertEqual(omxvideo.toggleEvent._fireCount, 0)
self.assertEqual(omxvideo.stopEvent._fireCount, 0)
self.assertEqual(omxvideo.startEvent._fireCount, 0)
self.assertEqual(omxvideo.loadEvent._fireCount, 0)
self.assertEqual(omxvideo.seekEvent._fireCount, 0)
for name in input_events.keys():
omxvideo.event_manager.fire(name)
self.assertEqual(omxvideo.playEvent._fireCount, 4) # the two 'start' actions also call play
self.assertEqual(omxvideo.pauseEvent._fireCount, 2)
self.assertEqual(omxvideo.toggleEvent._fireCount, 2)
self.assertEqual(omxvideo.stopEvent._fireCount, 2)
self.assertEqual(omxvideo.startEvent._fireCount, 2)
self.assertEqual(omxvideo.loadEvent._fireCount, 4) # the two start actions also load
self.assertEqual(omxvideo.seekEvent._fireCount, 2)
omxvideo.destroy()
for name in input_events.keys():
self.assertEqual(len(em.get(name)), 0) # unregistered
| mit | -3,664,988,029,826,936,300 | 43.127168 | 103 | 0.631124 | false |
hcuffy/concourse | concourse-driver-python/tests/utils_tests.py | 1 | 2664 | # Copyright (c) 2015 Cinchapi Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nose.tools import *
import string
import random
from concourse.utils import *
class TestUtils(object):
@staticmethod
def generate_random_string(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def test_convert_string_roundtrip(self):
orig = TestUtils.generate_random_string()
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_tag_roundtrip(self):
orig = Tag.create(TestUtils.generate_random_string())
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_int_roundtrip(self):
orig = 100
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_long_roundtrip(self):
orig = 2147483648
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_link_roundtrip(self):
orig = Link.to(2147483648)
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_boolean_roundtrip(self):
orig = False
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_float_roundtrip(self):
orig = 3.14353
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_find_in_kwargs_bad_key(self):
value = find_in_kwargs_by_alias('foo', {})
assert_is_none(value)
def test_find_in_kwargs_criteria(self):
kwargs = {
'ccl': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_equals('foo', value)
kwargs = {
'query': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_equals('foo', value)
kwargs = {
'where': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_equals('foo', value)
kwargs = {
'foo': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_is_none(value)
| apache-2.0 | -8,572,895,711,197,043,000 | 33.153846 | 85 | 0.646772 | false |
gumblex/zhconv | zhconv/zhconv.py | 1 | 19496 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module implements a simple conversion and localization between simplified and traditional Chinese using tables from MediaWiki.
It doesn't contains a segmentation function and uses maximal forward matching, so it's simple.
For a complete and accurate solution, see OpenCC.
For Chinese segmentation, see Jieba.
>>> print(convert('我幹什麼不干你事。', 'zh-cn'))
我干什么不干你事。
>>> print(convert('人体内存在很多微生物', 'zh-tw'))
人體內存在很多微生物
Support MediaWiki's convertion format:
>>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk'))
在現代,機械計算機的應用已經完全被電子計算機所取代
>>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw'))
資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。
>>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg'))
张国荣曾在英国利兹大学学习。
"""
# Only Python3 can pass the doctest here due to unicode problems.
__version__ = '1.4.1'
import os
import sys
import re
import json
try:
from pkg_resources import resource_stream
get_module_res = lambda *res: resource_stream(__name__, os.path.join(*res))
except ImportError:
get_module_res = lambda *res: open(os.path.normpath(
os.path.join(os.getcwd(), os.path.dirname(__file__), *res)), 'rb')
# Locale fallback order lookup dictionary
Locales = {
'zh-cn': ('zh-cn', 'zh-hans', 'zh-sg', 'zh'),
'zh-hk': ('zh-hk', 'zh-hant', 'zh-tw', 'zh'),
'zh-tw': ('zh-tw', 'zh-hant', 'zh-hk', 'zh'),
'zh-sg': ('zh-sg', 'zh-hans', 'zh-cn', 'zh'),
'zh-my': ('zh-my', 'zh-sg', 'zh-hans', 'zh-cn', 'zh'),
'zh-mo': ('zh-mo', 'zh-hk', 'zh-hant', 'zh-tw', 'zh'),
'zh-hant': ('zh-hant', 'zh-tw', 'zh-hk', 'zh'),
'zh-hans': ('zh-hans', 'zh-cn', 'zh-sg', 'zh'),
'zh': ('zh',) # special value for no conversion
}
_DEFAULT_DICT = "zhcdict.json"
DICTIONARY = _DEFAULT_DICT
zhcdicts = None
dict_zhcn = None
dict_zhsg = None
dict_zhtw = None
dict_zhhk = None
pfsdict = {}
RE_langconv = re.compile(r'(-\{|\}-)')
RE_splitflag = re.compile(r'\s*\|\s*')
RE_splitmap = re.compile(r'\s*;\s*')
RE_splituni = re.compile(r'\s*=>\s*')
RE_splitpair = re.compile(r'\s*:\s*')
def loaddict(filename=DICTIONARY):
"""
Load the dictionary from a specific JSON file.
"""
global zhcdicts
if zhcdicts:
return
if filename == _DEFAULT_DICT:
zhcdicts = json.loads(get_module_res(filename).read().decode('utf-8'))
else:
with open(filename, 'rb') as f:
zhcdicts = json.loads(f.read().decode('utf-8'))
zhcdicts['SIMPONLY'] = frozenset(zhcdicts['SIMPONLY'])
zhcdicts['TRADONLY'] = frozenset(zhcdicts['TRADONLY'])
def getdict(locale):
"""
Generate or get convertion dict cache for certain locale.
Dictionaries are loaded on demand.
"""
global zhcdicts, dict_zhcn, dict_zhsg, dict_zhtw, dict_zhhk, pfsdict
if zhcdicts is None:
loaddict(DICTIONARY)
if locale == 'zh-cn':
if dict_zhcn:
got = dict_zhcn
else:
dict_zhcn = zhcdicts['zh2Hans'].copy()
dict_zhcn.update(zhcdicts['zh2CN'])
got = dict_zhcn
elif locale == 'zh-tw':
if dict_zhtw:
got = dict_zhtw
else:
dict_zhtw = zhcdicts['zh2Hant'].copy()
dict_zhtw.update(zhcdicts['zh2TW'])
got = dict_zhtw
elif locale == 'zh-hk' or locale == 'zh-mo':
if dict_zhhk:
got = dict_zhhk
else:
dict_zhhk = zhcdicts['zh2Hant'].copy()
dict_zhhk.update(zhcdicts['zh2HK'])
got = dict_zhhk
elif locale == 'zh-sg' or locale == 'zh-my':
if dict_zhsg:
got = dict_zhsg
else:
dict_zhsg = zhcdicts['zh2Hans'].copy()
dict_zhsg.update(zhcdicts['zh2SG'])
got = dict_zhsg
elif locale == 'zh-hans':
got = zhcdicts['zh2Hans']
elif locale == 'zh-hant':
got = zhcdicts['zh2Hant']
else:
got = {}
if locale not in pfsdict:
pfsdict[locale] = getpfset(got)
return got
def getpfset(convdict):
pfset = []
for word in convdict:
for ch in range(len(word)):
pfset.append(word[:ch+1])
return frozenset(pfset)
def issimp(s, full=False):
"""
Detect text is whether Simplified Chinese or Traditional Chinese.
Returns True for Simplified; False for Traditional; None for unknown.
If full=False, it returns once first simplified- or traditional-only
character is encountered, so it's for quick and rough identification;
else, it compares the count and returns the most likely one.
Use `is` (True/False/None) to check the result.
`s` must be unicode (Python 2) or str (Python 3), or you'll get None.
"""
if zhcdicts is None:
loaddict(DICTIONARY)
simp, trad = 0, 0
if full:
for ch in s:
if ch in zhcdicts['SIMPONLY']:
simp += 1
elif ch in zhcdicts['TRADONLY']:
trad += 1
if simp > trad:
return True
elif simp < trad:
return False
else:
return None
else:
for ch in s:
if ch in zhcdicts['SIMPONLY']:
return True
elif ch in zhcdicts['TRADONLY']:
return False
return None
def fallback(locale, mapping):
for l in Locales[locale]:
if l in mapping:
return mapping[l]
return convert(tuple(mapping.values())[0], locale)
def convtable2dict(convtable, locale, update=None):
"""
Convert a list of conversion dict to a dict for a certain locale.
>>> sorted(convtable2dict([{'zh-hk': '列斯', 'zh-hans': '利兹', 'zh': '利兹', 'zh-tw': '里茲'}, {':uni': '巨集', 'zh-cn': '宏'}], 'zh-cn').items())
[('列斯', '利兹'), ('利兹', '利兹'), ('巨集', '宏'), ('里茲', '利兹')]
"""
rdict = update.copy() if update else {}
for r in convtable:
if ':uni' in r:
if locale in r:
rdict[r[':uni']] = r[locale]
elif locale[:-1] == 'zh-han':
if locale in r:
for word in r.values():
rdict[word] = r[locale]
else:
v = fallback(locale, r)
for word in r.values():
rdict[word] = v
return rdict
def tokenize(s, locale, update=None):
"""
Tokenize `s` according to corresponding locale dictionary.
Don't use this for serious text processing.
"""
zhdict = getdict(locale)
pfset = pfsdict[locale]
if update:
zhdict = zhdict.copy()
zhdict.update(update)
newset = set()
for word in update:
for ch in range(len(word)):
newset.add(word[:ch+1])
pfset = pfset | newset
ch = []
N = len(s)
pos = 0
while pos < N:
i = pos
frag = s[pos]
maxword = None
maxpos = 0
while i < N and frag in pfset:
if frag in zhdict:
maxword = frag
maxpos = i
i += 1
frag = s[pos:i+1]
if maxword is None:
maxword = s[pos]
pos += 1
else:
pos = maxpos + 1
ch.append(maxword)
return ch
def convert(s, locale, update=None):
"""
Main convert function.
:param s: must be `unicode` (Python 2) or `str` (Python 3).
:param locale: should be one of ``('zh-hans', 'zh-hant', 'zh-cn', 'zh-sg'
'zh-tw', 'zh-hk', 'zh-my', 'zh-mo')``.
:param update: a dict which updates the conversion table, eg.
``{'from1': 'to1', 'from2': 'to2'}``
>>> print(convert('我幹什麼不干你事。', 'zh-cn'))
我干什么不干你事。
>>> print(convert('我幹什麼不干你事。', 'zh-cn', {'不干': '不幹'}))
我干什么不幹你事。
>>> print(convert('人体内存在很多微生物', 'zh-tw'))
人體內存在很多微生物
"""
if locale == 'zh' or locale not in Locales:
# "no conversion"
return s
zhdict = getdict(locale)
pfset = pfsdict[locale]
newset = set()
if update:
# TODO: some sort of caching
#zhdict = zhdict.copy()
#zhdict.update(update)
newset = set()
for word in update:
for ch in range(len(word)):
newset.add(word[:ch+1])
#pfset = pfset | newset
ch = []
N = len(s)
pos = 0
while pos < N:
i = pos
frag = s[pos]
maxword = None
maxpos = 0
while i < N and (frag in pfset or frag in newset):
if update and frag in update:
maxword = update[frag]
maxpos = i
elif frag in zhdict:
maxword = zhdict[frag]
maxpos = i
i += 1
frag = s[pos:i+1]
if maxword is None:
maxword = s[pos]
pos += 1
else:
pos = maxpos + 1
ch.append(maxword)
return ''.join(ch)
def convert_for_mw(s, locale, update=None):
"""
Recognizes MediaWiki's human conversion format.
Use locale='zh' for no conversion.
Reference: (all tests passed)
https://zh.wikipedia.org/wiki/Help:高级字词转换语法
https://www.mediawiki.org/wiki/Writing_systems/Syntax
>>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk'))
在現代,機械計算機的應用已經完全被電子計算機所取代
>>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw'))
資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。
>>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-hant'))
張國榮曾在英國里茲大學學習。
>>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg'))
张国荣曾在英国利兹大学学习。
>>> convert_for_mw('-{zh-hant:;\\nzh-cn:}-', 'zh-tw') == ''
True
>>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-tw'))
毫米(毫公分),符號mm,是長度單位和降雨量單位,港澳和大陸稱為毫米(台灣亦有使用,但較常使用名稱為毫公分)。
>>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-cn'))
毫米(毫公分),符号mm,是长度单位和降雨量单位,台湾作公釐或公厘。
>>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘', 'zh-hk')) # unbalanced test
毫米(毫公分),符號mm,是長度單位和降雨量單位,台灣作公釐或公厘
>>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-tw'))
報頭的「參攷消息」四字摘自魯迅筆跡,「攷」是「考」的異體字,讀音kǎo,與「考」字相同。
>>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-cn'))
报头的“參攷消息”四字摘自鲁迅笔迹,“參”是“参”的繁体字,读音cān,与简体的“参”字相同;“攷”是“考”的异体字,读音kǎo,与“考”字相同。
>>> print(convert_for_mw('{{Col-break}}-->', 'zh-hant'))
{{Col-break}}-->
"""
ch = []
rules = []
ruledict = update.copy() if update else {}
nested = 0
block = ''
for frag in RE_langconv.split(s):
if frag == '-{':
nested += 1
block += frag
elif frag == '}-':
if not nested:
# bogus }-
ch.append(frag)
continue
block += frag
nested -= 1
if nested:
continue
newrules = []
delim = RE_splitflag.split(block[2:-2].strip(' \t\n\r\f\v;'))
if len(delim) == 1:
flag = None
mapping = RE_splitmap.split(delim[0])
else:
flag = RE_splitmap.split(delim[0].strip(' \t\n\r\f\v;'))
mapping = RE_splitmap.split(delim[1])
rule = {}
for m in mapping:
uni = RE_splituni.split(m)
if len(uni) == 1:
pair = RE_splitpair.split(uni[0])
else:
if rule:
newrules.append(rule)
rule = {':uni': uni[0]}
else:
rule[':uni'] = uni[0]
pair = RE_splitpair.split(uni[1])
if len(pair) == 1:
rule['zh'] = convert_for_mw(pair[0], 'zh', ruledict)
else:
rule[pair[0]] = convert_for_mw(pair[1], pair[0], ruledict)
newrules.append(rule)
if not flag:
ch.append(fallback(locale, newrules[0]))
elif any(ch in flag for ch in 'ATRD-HN'):
for f in flag:
# A: add rule for convert code (all text convert)
# H: Insert a conversion rule without output
if f in ('A', 'H'):
for r in newrules:
if not r in rules:
rules.append(r)
if f == 'A':
if ':uni' in r:
if locale in r:
ch.append(r[locale])
else:
ch.append(convert(r[':uni'], locale))
else:
ch.append(fallback(locale, newrules[0]))
# -: remove convert
elif f == '-':
for r in newrules:
try:
rules.remove(r)
except ValueError:
pass
# D: convert description (useless)
#elif f == 'D':
#ch.append('; '.join(': '.join(x) for x in newrules[0].items()))
# T: title convert (useless)
# R: raw content (implied above)
# N: current variant name (useless)
#elif f == 'N':
#ch.append(locale)
ruledict = convtable2dict(rules, locale, update)
else:
fblimit = frozenset(flag) & frozenset(Locales[locale])
limitedruledict = update.copy() if update else {}
for r in rules:
if ':uni' in r:
if locale in r:
limitedruledict[r[':uni']] = r[locale]
else:
v = None
for l in Locales[locale]:
if l in r and l in fblimit:
v = r[l]
break
for word in r.values():
limitedruledict[word] = v if v else convert(word, locale)
ch.append(convert(delim[1], locale, limitedruledict))
block = ''
elif nested:
block += frag
else:
ch.append(convert(frag, locale, ruledict))
if nested:
# unbalanced
ch.append(convert_for_mw(block + '}-'*nested, locale, ruledict))
return ''.join(ch)
def test_convert_mw(locale, update=None):
s = ('英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学\n'
'-{zh-hans:计算机; zh-hant:電腦;}-\n'
'-{H|巨集=>zh-cn:宏;}-\n'
'测试:巨集、宏\n'
'-{简体字繁體字}-\n'
'北-{}-韓、北朝-{}-鲜\n'
'-{H|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试:博客、網誌、部落格\n'
'-{A|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试:博客、網誌、部落格\n'
'-{H|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试1:博客、網誌、部落格\n'
'-{-|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试2:博客、網誌、部落格\n'
'-{T|zh-cn:汤姆·汉克斯; zh-hk:湯·漢斯; zh-tw:湯姆·漢克斯;}-\n'
'-{D|zh-cn:汤姆·汉克斯; zh-hk:湯·漢斯; zh-tw:湯姆·漢克斯;}-\n'
'-{H|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试1:-{zh;zh-hans;zh-hant|博客、網誌、部落格}-\n'
'测试2:-{zh;zh-cn;zh-hk|博客、網誌、部落格}-')
return convert_for_mw(s, locale, update)
def main():
"""
Simple stdin/stdout interface.
"""
if len(sys.argv) == 2 and sys.argv[1] in Locales:
locale = sys.argv[1]
convertfunc = convert
elif len(sys.argv) == 3 and sys.argv[1] == '-w' and sys.argv[2] in Locales:
locale = sys.argv[2]
convertfunc = convert_for_mw
else:
thisfile = __file__ if __name__ == '__main__' else 'python -mzhconv'
print("usage: %s [-w] {zh-cn|zh-tw|zh-hk|zh-sg|zh-hans|zh-hant|zh} < input > output" % thisfile)
sys.exit(1)
loaddict()
ln = sys.stdin.readline()
while ln:
l = ln.rstrip('\r\n')
if sys.version_info[0] < 3:
l = unicode(l, 'utf-8')
res = convertfunc(l, locale)
if sys.version_info[0] < 3:
print(res.encode('utf-8'))
else:
print(res)
ln = sys.stdin.readline()
if __name__ == '__main__':
main()
| mit | 6,765,431,804,391,552,000 | 34.587866 | 198 | 0.505967 | false |
jmah/cinesync_python | cinesync/event_handler.py | 1 | 1815 | import cinesync
import sys, os
from optparse import OptionParser
class EventHandler:
def __init__(self, argv=sys.argv, stdin=sys.stdin):
try:
self.session = cinesync.Session.load(stdin)
except Exception:
self.session = None
parser = OptionParser()
parser.add_option('--key')
parser.add_option('--save-format')
parser.add_option('--save-dir')
parser.add_option('--url')
(options, rest_args) = parser.parse_args(argv[1:])
if options.key is None: raise cinesync.CineSyncError('--key argument is required')
if options.save_format is None: raise cinesync.CineSyncError('--save-format argument is required')
self.session_key = options.key if options.key != cinesync.OFFLINE_KEY else None
self.save_format = options.save_format
self.save_ext = { 'JPEG': 'jpg', 'PNG': 'png' }[self.save_format]
self.save_parent = options.save_dir
self.url = options.url
def is_offline(self):
return self.session_key == None
def saved_frame_path(self, media_file, frame):
if self.save_parent is None: return None
if not media_file.annotations[frame].drawing_objects: return None
base = '%s-%05d' % (media_file.name, frame)
i = 1; p2 = None
while True:
p = p2
p2, i = self.__saved_frame_ver_path(base, i)
if not os.path.exists(p2):
return p
def __saved_frame_ver_path(self, base, version):
v = ' (%d)' % version if version > 1 else ''
basename = base + v + '.' + self.save_ext
return (os.path.join(self.save_parent, basename), version + 1)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
| bsd-3-clause | 8,719,700,762,281,527,000 | 33.245283 | 106 | 0.592837 | false |
StephenLujan/Naith | game/plugins/cullaabb/aabb.py | 1 | 10362 | # Copyright Tom SF Haines
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from pandac.PandaModules import *
class AABB:
"""Defines an axis aligned bounding box."""
def __init__(self,low,high):
self.x = [low[0],high[0]]
self.y = [low[1],high[1]]
self.z = [low[2],high[2]]
self.bounds = [self.x,self.y,self.z]
self.centre = (0.5*(self.x[0]+self.x[1]),0.5*(self.y[0]+self.y[1]),0.5*(self.z[0]+self.z[1]))
self.volume = (self.x[1]-self.x[0]) * (self.y[1]-self.y[0]) * (self.z[1]-self.z[0])
def within(self,node):
"""Given a NodePath returns True if its in the AABB, False if it isn't."""
pos = node.getPos(render)
return pos[0]>=self.x[0] and pos[0]<=self.x[1] and pos[1]>=self.y[0] and pos[1]<=self.y[1] and pos[2]>=self.z[0] and pos[2]<=self.z[1]
def __str__(self):
return '{'+str(self.x)+','+str(self.y)+','+str(self.z)+'}'
aabbLambda = 1e-3
aabbCutCost = 2.0
class SetAABB:
"""A set of AABB's - uses a kd tree with hieuristic dimension detection to subdivide at each point. Each level keeps a list of aabb's that cross the dividing line."""
def __init__(self,aabbs):
"""Given a list of AABB's."""
# Work out what happens for dividing on each dimension - sort by the AABB's centres and then select the centre aabb by volume, then try dividing by the sides & centre of the centre aabb and count how many nodes are intercepted with a cost for offsetting too far - select the dimension division with the least divided nodes...
# Get half the volume...
totVolume = sum(map(lambda x:x.volume,aabbs))
halfVolume = totVolume*0.5
# Variables we are finding the best option for...
bestDimension = 0
bestCutPoint = 0.0
bestCost = 1e20
bestLow = []
bestMid = aabbs
bestHigh = []
# Try each dimension, with multiple centre choice, store the best...
for dim in xrange(3):
byDim = sorted(aabbs,key=lambda x: x.centre[dim])
centre = 0
volume = 0.0
while centre+1<len(byDim) and volume<halfVolume:
volume += byDim[centre].volume
centre += 1
options = (byDim[centre].bounds[dim][0]-aabbLambda, byDim[centre].centre[dim], byDim[centre].bounds[dim][1]+aabbLambda)
for cutPoint in options:
cost = 0.0
lowVol = 0.0
highVol = 0.0
low = []
mid = []
high = []
for aabb in byDim:
if aabb.bounds[dim][1]<cutPoint:
lowVol += aabb.volume
low.append(aabb)
elif aabb.bounds[dim][0]>cutPoint:
highVol += aabb.volume
high.append(aabb)
else:
cost += aabb.volume*aabbCutCost
mid.append(aabb)
cost += math.fabs(lowVol-highVol)
if cost<bestCost:
bestDimension = dim
bestCutPoint = cutPoint
bestCost = cost
bestLow = low
bestMid = mid
bestHigh = high
# We have our bests - we now make this actual object, and then recurse to make the full tree...
zeroCount = 0
if len(bestLow)==0: zeroCount += 1
if len(bestHigh)==0: zeroCount += 1
if zeroCount!=2:
self.leaf = True
self.data = bestLow + bestMid + bestHigh
else:
self.leaf = False
self.splitDim = bestDimension
self.split = bestCutPoint
self.low = SetAABB(bestLow)
if len(bestMid)!=0:
self.mid = SetAABB(bestMid)
else:
self.mid = None
self.high = SetAABB(bestHigh)
def within(self,node):
"""Returns an AABB that contains the given node, or None is none do."""
if self.leaf:
for aabb in self.data:
if aabb.within(node):
return aabb
return None
else:
if self.mid:
res = self.mid.within(node)
if res!=None: return res
if node.getPos(render)[self.splitDim]<self.split:
res = self.low.within(node)
if res!=None: return res
else:
res = self.high.within(node)
if res!=None: return res
return None
class Portal:
"""Defines a portal by its 4 vertices."""
def __init__(self):
self.verts = [(1.0,0.0,1.0),(-1.0,0.0,1.0),(-1.0,0.0,-1.0),(1.0,0.0,-1.0)]
self.aabb1 = None
self.aabb2 = None
def fromFace(self,aabb,dim,side):
"""Setup the portal from a face of the given aabb - you specify the dim of the face, with side==False meaning the low side and side==True meaning the high side. Will be anti-clockwise looking at it from inside the cube."""
if side:
side = 1
else:
side = 0
# Define square2d, remap it to 3D coordinates based on dim and side...
square2d = [(0,0),(0,1),(1,1),(1,0)]
def incDim(base):
ret = [0,0,0]
ret[(dim+1)%3] = base[0]
ret[(dim+2)%3] = base[1]
ret[dim] = side
return ret
square3d = map(incDim,square2d)
# Extract the 4 coordinates...
self.verts = []
for index in square3d:
coord = map(lambda d: aabb.bounds[d][index[d]],xrange(3))
self.verts.append(coord)
# If needed reorder them so its anticlockwise ordering from the view of the centre of the aabb...
offsetC = map(lambda x: map(lambda a,b: a-b,x,aabb.centre),self.verts)
ind = sum(map(lambda i:offsetC[1][i]*(offsetC[0][(i+1)%3]*offsetC[2][(i+2)%3] - offsetC[0][(i+2)%3]*offsetC[2][(i+1)%3]),xrange(3)))
if ind<0.0:
self.verts = [self.verts[0],self.verts[3],self.verts[2],self.verts[1]]
def setupPortal(self,portal,portalNode,flip):
if flip:
order = [0,3,2,2]
else:
order = [0,1,2,3]
c = map(lambda i:sum(map(lambda x:x[i],self.verts))/4.0,xrange(3))
portalNode.setPos(render,Vec3(c[0],c[1],c[2]))
portal.clearVertices()
for o in order:
portal.addVertex(Point3(self.verts[o][0] - c[0],self.verts[o][1] - c[1],self.verts[o][2] - c[2]))
def findPortals(aabbs,overlap = 1e-3):
"""Given a list of AABB's this finds all intersections and creates portals. To store the portals creates a variable in each aabb, portals = [[[],[]],[[],[]],[[],[]]] - first index is dimension, second index is low (0) and high (1), final list is all portals using that face. Returns the portals as a list. Will throw an error if the geometry is bad. Will modify the dimensions of the given aabb's to account for overlap."""
# Before we start add the portal variable to each aabb...
for aabb in aabbs:
aabb.portals = [[[],[]],[[],[]],[[],[]]]
# We process each dimension seperatly - this first loop is over the dimensions...
ret = []
for dim in xrange(3):
otherDim = [0,1,2]
del otherDim[dim]
# Iterate all aabbs and create a push event and pop event for each - push it on when you reach the minimum, pop it when you get to the maximum. (True,d,aabb) to push, (False,d,aabb) to pop...
events = []
for aabb in aabbs:
events.append((True,aabb.bounds[dim][0],aabb))
events.append((False,aabb.bounds[dim][1],aabb))
# Sort the events...
events.sort(key=lambda x: x[1])
# Iterate through the events in sequence - each time a aabb is pushed or popped check if it intercepts a face larger than it - if so add the relevant portal... (Partial interception is considered an error as it results in ambiguous behaviour. Multiple interception is also not allowed as its an entire face that intercepts from our point of view. (Larger face can have multiple intercepts of course.))
state = dict() # Index by id of aabb's
for event in events:
if not event[0]:
# Pop event - remove its aabb from the state...
del state[id(event[2])]
# Check event aabb against existing aabbs for being the smaller face...
done = False
for key,aabb in state.iteritems():
# Verify that the sorting dimension is not contained, i.e. they overlap so a portal can be created...
if (event[2].bounds[dim][0]>aabb.bounds[dim][0]) == (event[2].bounds[dim][1]<aabb.bounds[dim][1]):
continue
# Check if bounds overlap, done such that we can detect corner overlaps...
withinCount = [0,0,0]
for od in otherDim:
if event[2].bounds[od][0]>aabb.bounds[od][0] and event[2].bounds[od][0]<aabb.bounds[od][1]:
withinCount[od] += 1
if event[2].bounds[od][1]>aabb.bounds[od][0] and event[2].bounds[od][1]<aabb.bounds[od][1]:
withinCount[od] += 1
if sum(withinCount)==4:
if done:
raise Exception('Double interception - each culling aabb face can only intrecept one other cube as a fully contained face')
done = True
# We have an interception - update the relevant aabb to have only the slightest overlap then create the portal and finally arrange for all the links...
if event[0]:
event[2].bounds[dim][0] = aabb.bounds[dim][1] - overlap
evSide = 0
else:
event[2].bounds[dim][1] = aabb.bounds[dim][0] + overlap
evSide = 1
portal = Portal()
portal.fromFace(event[2],dim,not event[0])
ret.append(portal)
portal.aabb1 = event[2]
portal.aabb2 = aabb
event[2].portals[dim][evSide].append(portal)
aabb.portals[dim][(evSide+1)%2].append(portal)
elif len(filter(lambda x:x>0,withinCount))==2:
exp = 'Partial interception - culling aabbs can not intecept at corners/edges due to undefinable behaviour - must only overlap with one face fully contained within another.'
exp += ' dimension = ' + str(dim) + '; within = ' + str(withinCount) + '; '
exp += str(event[2]) + ' against ' + str(aabb)
raise Exception(exp)
if event[0]:
# Push event - add the events aabb to the state...
state[id(event[2])] = event[2]
return ret
| apache-2.0 | 1,652,840,935,139,971,000 | 36.817518 | 425 | 0.615422 | false |
ryanss/holidays.py | holidays/countries/mexico.py | 1 | 4211 | # -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Author: ryanss <[email protected]> (c) 2014-2017
# dr-prodigy <[email protected]> (c) 2017-2020
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
from dateutil.relativedelta import relativedelta as rd, MO
from holidays.constants import FRI, SAT, SUN
from holidays.constants import JAN, FEB, MAR, MAY, SEP, NOV, DEC
from holidays.holiday_base import HolidayBase
class Mexico(HolidayBase):
def __init__(self, **kwargs):
self.country = 'MX'
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
# New Year's Day
name = "Año Nuevo [New Year's Day]"
self[date(year, JAN, 1)] = name
if self.observed and date(year, JAN, 1).weekday() == SUN:
self[date(year, JAN, 1) + rd(days=+1)] = name + " (Observed)"
# The next year's observed New Year's Day can be in this year
# when it falls on a Friday (Jan 1st is a Saturday)
if self.observed and date(year, DEC, 31).weekday() == FRI:
self[date(year, DEC, 31)] = name + " (Observed)"
# Constitution Day
name = "Día de la Constitución [Constitution Day]"
if self.observed and year >= 2007:
self[date(year, FEB, 1) + rd(weekday=MO(+1))] = \
name + " (Observed)"
if year >= 1917:
self[date(year, FEB, 5)] = name
# Benito Juárez's birthday
name = "Natalicio de Benito Juárez [Benito Juárez's birthday]"
if self.observed and year >= 2007:
self[date(year, MAR, 1) + rd(weekday=MO(+3))] = \
name + " (Observed)"
if year >= 1917:
self[date(year, MAR, 21)] = name
# Labor Day
if year >= 1923:
name = "Día del Trabajo [Labour Day]"
self[date(year, MAY, 1)] = name
if self.observed and date(year, MAY, 1).weekday() == SAT:
self[date(year, MAY, 1) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, MAY, 1).weekday() == SUN:
self[date(year, MAY, 1) + rd(days=+1)] = name + " (Observed)"
# Independence Day
name = "Día de la Independencia [Independence Day]"
self[date(year, SEP, 16)] = name
if self.observed and date(year, SEP, 16).weekday() == SAT:
self[date(year, SEP, 16) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, SEP, 16).weekday() == SUN:
self[date(year, SEP, 16) + rd(days=+1)] = name + " (Observed)"
# Revolution Day
name = "Día de la Revolución [Revolution Day]"
if self.observed and year >= 2007:
self[date(year, NOV, 1) + rd(weekday=MO(+3))] = \
name + " (Observed)"
if year >= 1917:
self[date(year, NOV, 20)] = name
# Change of Federal Government
# Every six years--next observance 2018
name = "Transmisión del Poder Ejecutivo Federal"
name += " [Change of Federal Government]"
if year >= 1970 and (2096 - year) % 6 == 0:
self[date(year, DEC, 1)] = name
if self.observed and date(year, DEC, 1).weekday() == SAT:
self[date(year, DEC, 1) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, DEC, 1).weekday() == SUN:
self[date(year, DEC, 1) + rd(days=+1)] = name + " (Observed)"
# Christmas
self[date(year, DEC, 25)] = "Navidad [Christmas]"
if self.observed and date(year, DEC, 25).weekday() == SAT:
self[date(year, DEC, 25) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, DEC, 25).weekday() == SUN:
self[date(year, DEC, 25) + rd(days=+1)] = name + " (Observed)"
class MX(Mexico):
pass
class MEX(Mexico):
pass
| mit | -2,919,136,386,831,123,500 | 37.888889 | 78 | 0.562619 | false |
Mausy5043/ubundiagd | daemon98.py | 1 | 4199 | #!/usr/bin/env python
# Based on previous work by
# Charles Menguy (see: http://stackoverflow.com/questions/10217067/implementing-a-full-python-unix-style-daemon-process)
# and Sander Marechal (see: http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/)
# Adapted by M.Hendrix [2015] (deprecated)
# daemon98.py uploads data to the server.
import syslog, traceback
import os, sys, shutil, glob, time, commands
from libdaemon import Daemon
import ConfigParser
import subprocess
DEBUG = False
leaf = os.path.realpath(__file__).split('/')[-2]
class MyDaemon(Daemon):
def run(self):
iniconf = ConfigParser.ConfigParser()
inisection = "98"
home = os.path.expanduser('~')
s = iniconf.read(home + '/' + leaf + '/config.ini')
if DEBUG: print "config file : ", s
if DEBUG: print iniconf.items(inisection)
reportTime = iniconf.getint(inisection, "reporttime")
cycles = iniconf.getint(inisection, "cycles")
samplesperCycle = iniconf.getint(inisection, "samplespercycle")
flock = iniconf.get(inisection, "lockfile")
samples = samplesperCycle * cycles # total number of samples averaged
sampleTime = reportTime/samplesperCycle # time [s] between samples
cycleTime = samples * sampleTime # time [s] per cycle
myname = os.uname()[1]
mount_path = '/srv/array1/dataspool/'
remote_path = mount_path + myname
remote_lock = remote_path + '/client.lock'
while True:
try:
startTime=time.time()
if os.path.exists(remote_path):
do_mv_data(remote_path)
else:
if DEBUG:print remote_path + " not available"
waitTime = sampleTime - (time.time() - startTime) - (startTime%sampleTime)
if (waitTime > 0):
if DEBUG:print "Waiting {0} s".format(waitTime)
time.sleep(waitTime)
except Exception as e:
if DEBUG:
print "Unexpected error:"
print e.message
syslog.syslog(syslog.LOG_ALERT,e.__doc__)
syslog_trace(traceback.format_exc())
raise
def do_mv_data(rpath):
hostlock = rpath + '/host.lock'
clientlock = rpath + '/client.lock'
count_internal_locks=1
# wait 5 seconds for processes to finish
time.sleep(5)
while os.path.isfile(hostlock):
if DEBUG:print "hostlock exists"
# wait while the server has locked the directory
time.sleep(1)
# server already sets the client.lock. Do it anyway.
lock(clientlock)
# prevent race conditions
while os.path.isfile(hostlock):
if DEBUG:print "hostlock exists. WTF?"
# wait while the server has locked the directory
time.sleep(1)
while (count_internal_locks > 0):
time.sleep(1)
count_internal_locks=0
for fname in glob.glob(r'/tmp/' + leaf + '/*.lock'):
count_internal_locks += 1
if DEBUG:print "{0} internal locks exist".format(count_internal_locks)
for fname in glob.glob(r'/tmp/' + leaf + '/*.csv'):
if os.path.isfile(clientlock) and not (os.path.isfile(rpath + "/" + os.path.split(fname)[1])):
if DEBUG:print "moving data " + fname
shutil.move(fname, rpath)
unlock(clientlock)
if DEBUG:print "unlocked..."
def lock(fname):
fd = open(fname, 'a').close()
def unlock(fname):
if os.path.isfile(fname):
os.remove(fname)
def syslog_trace(trace):
# Log a python stack trace to syslog
log_lines = trace.split('\n')
for line in log_lines:
if line:
syslog.syslog(syslog.LOG_ALERT,line)
if __name__ == "__main__":
daemon = MyDaemon('/tmp/' + leaf + '/98.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'foreground' == sys.argv[1]:
# assist with debugging.
print "Debug-mode started. Use <Ctrl>+C to stop."
DEBUG = True
if DEBUG:
logtext = "Daemon logging is ON"
syslog.syslog(syslog.LOG_DEBUG, logtext)
daemon.run()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: {0!s} start|stop|restart|foreground".format(sys.argv[0])
sys.exit(2)
| mit | 4,576,095,632,037,689,300 | 29.875 | 120 | 0.6392 | false |
daniele-athome/kontalk-legacy-xmppserver | kontalk/xmppserver/component/s2s.py | 1 | 9624 | # -*- coding: utf-8 -*-
"""Kontalk XMPP s2s component."""
"""
Kontalk XMPP server
Copyright (C) 2014 Kontalk Devteam <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from twisted.application import strports
from twisted.python import randbytes
from twisted.words.protocols.jabber import jid, xmlstream, error
from twisted.words.xish import domish
from wokkel import component, server
from zope.interface import Interface, implements
from kontalk.xmppserver import log, util, keyring, storage, xmlstream2
class IS2SService(Interface):
def validateConnection(thisHost, otherHost, sid, key):
pass
def dispatch(xs, stanza):
pass
class S2SService(object):
"""S2S service. Used by S2S component class."""
implements(IS2SService)
def __init__(self, config, router):
self.config = config
self.defaultDomain = config['network']
self.domains = set()
self.domains.add(self.defaultDomain)
self.secret = randbytes.secureRandom(16).encode('hex')
self.router = router
self._outgoingStreams = {}
self._outgoingQueues = {}
self._outgoingConnecting = set()
self.serial = 0
def outgoingInitialized(self, xs):
thisHost = xs.thisEntity.host
otherHost = xs.otherEntity.host
log.debug("Outgoing connection %d from %r to %r established" %
(xs.serial, thisHost, otherHost))
self._outgoingStreams[thisHost, otherHost] = xs
xs.addObserver(xmlstream.STREAM_END_EVENT,
lambda _: self.outgoingDisconnected(xs))
if (thisHost, otherHost) in self._outgoingQueues:
for element in self._outgoingQueues[thisHost, otherHost]:
xs.send(element)
del self._outgoingQueues[thisHost, otherHost]
def outgoingDisconnected(self, xs):
thisHost = xs.thisEntity.host
otherHost = xs.otherEntity.host
log.debug("Outgoing connection %d from %r to %r disconnected" %
(xs.serial, thisHost, otherHost))
del self._outgoingStreams[thisHost, otherHost]
def initiateOutgoingStream(self, thisHost, otherHost):
"""
Initiate an outgoing XMPP server-to-server connection.
"""
def resetConnecting(_):
self._outgoingConnecting.remove((thisHost, otherHost))
if (thisHost, otherHost) in self._outgoingConnecting:
return
authenticator = server.XMPPServerConnectAuthenticator(thisHost,
otherHost,
self.secret)
factory = server.DeferredS2SClientFactory(authenticator)
factory.addBootstrap(xmlstream.STREAM_AUTHD_EVENT,
self.outgoingInitialized)
factory.logTraffic = self.logTraffic
self._outgoingConnecting.add((thisHost, otherHost))
d = server.initiateS2S(factory)
d.addBoth(resetConnecting)
return d
def validateConnection(self, thisHost, otherHost, sid, key):
"""
Validate an incoming XMPP server-to-server connection.
"""
log.debug("validating connection from %s (sid=%r)" % (otherHost, sid))
def connected(xs):
# Set up stream for immediate disconnection.
def disconnect(_):
xs.transport.loseConnection()
xs.addObserver(xmlstream.STREAM_AUTHD_EVENT, disconnect)
xs.addObserver(xmlstream.INIT_FAILED_EVENT, disconnect)
authenticator = server.XMPPServerVerifyAuthenticator(thisHost, otherHost,
sid, key)
factory = server.DeferredS2SClientFactory(authenticator)
factory.addBootstrap(xmlstream.STREAM_CONNECTED_EVENT, connected)
factory.logTraffic = self.logTraffic
return server.initiateS2S(factory)
def send(self, stanza):
"""
Send stanza to the proper XML Stream.
This uses addressing embedded in the stanza to find the correct stream
to forward the stanza to.
"""
otherHost = jid.internJID(stanza["to"]).host
thisHost = jid.internJID(stanza["from"]).host
if (thisHost, otherHost) not in self._outgoingStreams:
# There is no connection with the destination (yet). Cache the
# outgoing stanza until the connection has been established.
# XXX: If the connection cannot be established, the queue should
# be emptied at some point.
if (thisHost, otherHost) not in self._outgoingQueues:
self._outgoingQueues[(thisHost, otherHost)] = []
self._outgoingQueues[(thisHost, otherHost)].append(stanza)
self.initiateOutgoingStream(thisHost, otherHost)
else:
self._outgoingStreams[(thisHost, otherHost)].send(stanza)
def dispatch(self, xs, stanza):
"""
Send a stanza to the router, checking some stuff first.
"""
util.resetNamespace(stanza, xs.namespace)
stanzaFrom = stanza.getAttribute('from')
stanzaTo = stanza.getAttribute('to')
if not stanzaFrom or not stanzaTo:
xs.sendStreamError(error.StreamError('improper-addressing'))
else:
try:
sender = jid.internJID(stanzaFrom)
jid.internJID(stanzaTo)
except jid.InvalidFormat:
log.debug("Dropping error stanza with malformed JID")
log.debug("sender = %s, otherEntity = %s" % (sender.full(), xs.otherEntity.full()))
if sender.host != xs.otherEntity.host and sender.host != self.defaultDomain:
xs.sendStreamError(error.StreamError('invalid-from'))
else:
self.router.send(stanza)
class S2SComponent(xmlstream2.SocketComponent):
"""
Kontalk server-to-server component.
L{StreamManager} is for the connection with the router.
"""
def __init__(self, config):
router_cfg = config['router']
for key in ('socket', 'host', 'port'):
if key not in router_cfg:
router_cfg[key] = None
xmlstream2.SocketComponent.__init__(self, router_cfg['socket'], router_cfg['host'], router_cfg['port'], router_cfg['jid'], router_cfg['secret'])
self.config = config
self.logTraffic = config['debug']
self.network = config['network']
self.servername = config['host']
def setup(self):
storage.init(self.config['database'])
self.keyring = keyring.Keyring(storage.MySQLNetworkStorage(), self.config['fingerprint'], self.servername)
self.service = S2SService(self.config, self)
self.service.logTraffic = self.logTraffic
self.sfactory = server.XMPPS2SServerFactory(self.service)
self.sfactory.logTraffic = self.logTraffic
return strports.service('tcp:' + str(self.config['bind'][1]) +
':interface=' + str(self.config['bind'][0]), self.sfactory)
""" Connection with router """
def _authd(self, xs):
component.Component._authd(self, xs)
log.debug("connected to router.")
# bind to the default route
bind = domish.Element((None, 'bind'))
bind.addElement((None, 'default'))
self.send(bind)
self.xmlstream.addObserver("/bind", self.consume)
self.xmlstream.addObserver("/presence", self.dispatch)
self.xmlstream.addObserver("/iq", self.dispatch)
self.xmlstream.addObserver("/message", self.dispatch)
def consume(self, stanza):
stanza.consumed = True
log.debug("consuming stanza %s" % (stanza.toXml(), ))
def dispatch(self, stanza):
"""Handle incoming stanza from router to the proper server stream."""
if not stanza.consumed:
stanza.consumed = True
log.debug("incoming stanza from router %s" % (stanza.toXml().encode('utf-8'), ))
util.resetNamespace(stanza, component.NS_COMPONENT_ACCEPT)
stanza['from'] = self.resolveJID(stanza['from']).full()
to = stanza.getAttribute('to')
if to is not None:
sender = jid.JID(to)
if sender.host == self.network or sender.host in self.keyring.hostlist():
log.debug("stanza is for %s - resolver/c2s/net is down?" % (sender.host, ))
else:
self.service.send(stanza)
def _disconnected(self, reason):
component.Component._disconnected(self, reason)
log.debug("lost connection to router (%s)" % (reason, ))
def resolveJID(self, _jid):
"""Transform host attribute of JID from server name to network name."""
if isinstance(_jid, jid.JID):
return jid.JID(tuple=(_jid.user, self.network, _jid.resource))
else:
_jid = jid.JID(_jid)
_jid.host = self.network
return _jid
| gpl-3.0 | -651,443,148,847,546,400 | 36.447471 | 152 | 0.62448 | false |
Polarcraft/KbveBot | commands/timeuntil.py | 1 | 2038 | # Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import dateutil
import dateutil.parser
import datetime
from helpers import arguments
from helpers.command import Command
@Command(['timeuntil', 'timetill'], ['config'])
def cmd(send, msg, args):
"""Reports the difference between now and some specified time.
Syntax: {command} <time>
"""
parser = arguments.ArgParser(args['config'])
parser.add_argument('date', nargs='*', action=arguments.DateParser)
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
if not cmdargs.date:
send("Time until when?")
return
delta = dateutil.relativedelta.relativedelta(cmdargs.date, datetime.datetime.now())
diff = "%s is " % cmdargs.date.strftime("%x")
if delta.years:
diff += "%d years " % (delta.years)
if delta.months:
diff += "%d months " % (delta.months)
if delta.days:
diff += "%d days " % (delta.days)
if delta.hours:
diff += "%d hours " % (delta.hours)
if delta.minutes:
diff += "%d minutes " % (delta.minutes)
if delta.seconds:
diff += "%d seconds " % (delta.seconds)
diff += "away"
send(diff)
| gpl-2.0 | 7,107,087,741,139,688,000 | 36.740741 | 134 | 0.679588 | false |
seewindcn/tortoisehg | src/tortoisehg/util/menuthg.py | 1 | 11553 | # menuthg.py - TortoiseHg shell extension menu
#
# Copyright 2009 Steve Borho <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
import os
from mercurial import hg, ui, node, error
from tortoisehg.util.i18n import _ as gettext
from tortoisehg.util import cachethg, paths, hglib
def _(msgid):
return {'id': msgid, 'str': gettext(msgid).encode('utf-8')}
thgcmenu = {
'commit': { 'label': _('Commit...'),
'help': _('Commit changes in repository'),
'icon': 'menucommit.ico'},
'init': { 'label': _('Create Repository Here'),
'help': _('Create a new repository'),
'icon': 'menucreaterepos.ico'},
'clone': { 'label': _('Clone...'),
'help': _('Create clone here from source'),
'icon': 'menuclone.ico'},
'status': { 'label': _('File Status'),
'help': _('Repository status & changes'),
'icon': 'menushowchanged.ico'},
'add': { 'label': _('Add Files...'),
'help': _('Add files to version control'),
'icon': 'menuadd.ico'},
'revert': { 'label': _('Revert Files...'),
'help': _('Revert file changes'),
'icon': 'menurevert.ico'},
'forget': { 'label': _('Forget Files...'),
'help': _('Remove files from version control'),
'icon': 'menurevert.ico'},
'remove': { 'label': _('Remove Files...'),
'help': _('Remove files from version control'),
'icon': 'menudelete.ico'},
'rename': { 'label': _('Rename File'),
'help': _('Rename file or directory'),
'icon': 'general.ico'},
'workbench': { 'label': _('Workbench'),
'help': _('View change history in repository'),
'icon': 'menulog.ico'},
'log': { 'label': _('File History'),
'help': _('View change history of selected files'),
'icon': 'menulog.ico'},
'shelve': { 'label': _('Shelve Changes'),
'help': _('Move changes between working dir and patch'),
'icon': 'menucommit.ico'},
'synch': { 'label': _('Synchronize'),
'help': _('Synchronize with remote repository'),
'icon': 'menusynch.ico'},
'serve': { 'label': _('Web Server'),
'help': _('Start web server for this repository'),
'icon': 'proxy.ico'},
'update': { 'label': _('Update...'),
'help': _('Update working directory'),
'icon': 'menucheckout.ico'},
'thgstatus': { 'label': _('Update Icons'),
'help': _('Update icons for this repository'),
'icon': 'refresh_overlays.ico'},
'userconf': { 'label': _('Global Settings'),
'help': _('Configure user wide settings'),
'icon': 'settings_user.ico'},
'repoconf': { 'label': _('Repository Settings'),
'help': _('Configure repository settings'),
'icon': 'settings_repo.ico'},
'shellconf': { 'label': _('Explorer Extension Settings'),
'help': _('Configure Explorer extension'),
'icon': 'settings_user.ico'},
'about': { 'label': _('About TortoiseHg'),
'help': _('Show About Dialog'),
'icon': 'menuabout.ico'},
'vdiff': { 'label': _('Diff to parent'),
'help': _('View changes using GUI diff tool'),
'icon': 'TortoiseMerge.ico'},
'hgignore': { 'label': _('Edit Ignore Filter'),
'help': _('Edit repository ignore filter'),
'icon': 'ignore.ico'},
'guess': { 'label': _('Guess Renames'),
'help': _('Detect renames and copies'),
'icon': 'detect_rename.ico'},
'grep': { 'label': _('Search History'),
'help': _('Search file revisions for patterns'),
'icon': 'menurepobrowse.ico'},
'dndsynch': { 'label': _('DnD Synchronize'),
'help': _('Synchronize with dragged repository'),
'icon': 'menusynch.ico'}}
_ALWAYS_DEMOTE_ = ('about', 'userconf', 'repoconf')
class TortoiseMenu(object):
def __init__(self, menutext, helptext, hgcmd, icon=None, state=True):
self.menutext = menutext
self.helptext = helptext
self.hgcmd = hgcmd
self.icon = icon
self.state = state
def isSubmenu(self):
return False
def isSep(self):
return False
class TortoiseSubmenu(TortoiseMenu):
def __init__(self, menutext, helptext, menus=[], icon=None):
TortoiseMenu.__init__(self, menutext, helptext, None, icon)
self.menus = menus[:]
def add_menu(self, menutext, helptext, hgcmd, icon=None, state=True):
self.menus.append(TortoiseMenu(menutext, helptext,
hgcmd, icon, state))
def add_sep(self):
self.menus.append(TortoiseMenuSep())
def get_menus(self):
return self.menus
def append(self, entry):
self.menus.append(entry)
def isSubmenu(self):
return True
class TortoiseMenuSep(object):
hgcmd = '----'
def isSubmenu(self):
return False
def isSep(self):
return True
class thg_menu(object):
def __init__(self, ui, promoted, name = "TortoiseHg"):
self.menus = [[]]
self.ui = ui
self.name = name
self.sep = [False]
self.promoted = promoted
def add_menu(self, hgcmd, icon=None, state=True):
if hgcmd in self.promoted:
pos = 0
else:
pos = 1
while len(self.menus) <= pos: #add Submenu
self.menus.append([])
self.sep.append(False)
if self.sep[pos]:
self.sep[pos] = False
self.menus[pos].append(TortoiseMenuSep())
self.menus[pos].append(TortoiseMenu(
thgcmenu[hgcmd]['label']['str'],
thgcmenu[hgcmd]['help']['str'], hgcmd,
thgcmenu[hgcmd]['icon'], state))
def add_sep(self):
self.sep = [True for _s in self.sep]
def get(self):
menu = self.menus[0][:]
for submenu in self.menus[1:]:
menu.append(TortoiseSubmenu(self.name, 'Mercurial', submenu, "hg.ico"))
menu.append(TortoiseMenuSep())
return menu
def __iter__(self):
return iter(self.get())
def open_repo(path):
root = paths.find_root(path)
if root:
try:
repo = hg.repository(ui.ui(), path=root)
return repo
except error.RepoError:
pass
except StandardError, e:
print "error while opening repo %s:" % path
print e
return None
class menuThg:
"""shell extension that adds context menu items"""
def __init__(self, internal=False):
self.name = "TortoiseHg"
promoted = []
pl = ui.ui().config('tortoisehg', 'promoteditems', 'commit,log')
for item in pl.split(','):
item = item.strip()
if item:
promoted.append(item)
if internal:
for item in thgcmenu.keys():
promoted.append(item)
for item in _ALWAYS_DEMOTE_:
if item in promoted:
promoted.remove(item)
self.promoted = promoted
def get_commands_dragdrop(self, srcfiles, destfolder):
"""
Get a list of commands valid for the current selection.
Commands are instances of TortoiseMenu, TortoiseMenuSep or TortoiseMenu
"""
# we can only accept dropping one item
if len(srcfiles) > 1:
return []
# open repo
drag_repo = None
drop_repo = None
drag_path = srcfiles[0]
drag_repo = open_repo(drag_path)
if not drag_repo:
return []
if drag_repo and drag_repo.root != drag_path:
return [] # dragged item must be a hg repo root directory
drop_repo = open_repo(destfolder)
menu = thg_menu(drag_repo.ui, self.promoted, self.name)
menu.add_menu('clone')
if drop_repo:
menu.add_menu('dndsynch')
return menu
def get_norepo_commands(self, cwd, files):
menu = thg_menu(ui.ui(), self.promoted, self.name)
menu.add_menu('clone')
menu.add_menu('init')
menu.add_menu('userconf')
menu.add_sep()
menu.add_menu('about')
menu.add_sep()
return menu
def get_commands(self, repo, cwd, files):
"""
Get a list of commands valid for the current selection.
Commands are instances of TortoiseMenu, TortoiseMenuSep or TortoiseMenu
"""
states = set()
onlyfiles = len(files) > 0
hashgignore = False
for f in files:
if not os.path.isfile(f):
onlyfiles = False
if f.endswith('.hgignore'):
hashgignore = True
states.update(cachethg.get_states(f, repo))
if not files:
states.update(cachethg.get_states(cwd, repo))
if cachethg.ROOT in states and len(states) == 1:
states.add(cachethg.MODIFIED)
changed = bool(states & set([cachethg.ADDED, cachethg.MODIFIED]))
modified = cachethg.MODIFIED in states
clean = cachethg.UNCHANGED in states
tracked = changed or modified or clean
new = bool(states & set([cachethg.UNKNOWN, cachethg.IGNORED]))
menu = thg_menu(repo.ui, self.promoted, self.name)
if changed or cachethg.UNKNOWN in states or 'qtip' in repo['.'].tags():
menu.add_menu('commit')
if hashgignore or new and len(states) == 1:
menu.add_menu('hgignore')
if changed or cachethg.UNKNOWN in states:
menu.add_menu('status')
# Visual Diff (any extdiff command)
has_vdiff = repo.ui.config('tortoisehg', 'vdiff', 'vdiff') != ''
if has_vdiff and modified:
menu.add_menu('vdiff')
if len(files) == 0 and cachethg.UNKNOWN in states:
menu.add_menu('guess')
elif len(files) == 1 and tracked: # needs ico
menu.add_menu('rename')
if files and new:
menu.add_menu('add')
if files and tracked:
menu.add_menu('remove')
if files and changed:
menu.add_menu('revert')
menu.add_sep()
if tracked:
menu.add_menu(files and 'log' or 'workbench')
if len(files) == 0:
menu.add_sep()
menu.add_menu('grep')
menu.add_sep()
menu.add_menu('synch')
menu.add_menu('serve')
menu.add_sep()
menu.add_menu('clone')
if repo.root != cwd:
menu.add_menu('init')
# add common menu items
menu.add_sep()
menu.add_menu('userconf')
if tracked:
menu.add_menu('repoconf')
menu.add_menu('about')
menu.add_sep()
return menu
| gpl-2.0 | 6,529,017,472,903,865,000 | 33.281899 | 83 | 0.515624 | false |
google/transperf | outparser.py | 1 | 14206 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Processes transperf outputs including pcap files and kernel log files.
"""
import csv
import logging
import re
from scapy.all import PcapReader
LOG = logging.getLogger('transperf/outparser')
class Visitor(object):
"""The abstract base class for all the classes that process output files.
Visitors are added to transperf to process logs and pcap files in one pass,
*hopefully* with O(1) memory.
Visitor methods are called in the following sequence:
1) begin()
2) visit_conn()
3) visit_packet()
4) visit_klog()
5) end()
"""
def begin(self, exp, exp_dir, rcv_ip):
"""Called when the visitor should start a new experiment.
Args:
exp: The experiment object.
exp_dir: The experiment output directory.
rcv_ip: The receiver's IP address.
"""
pass
def end(self):
"""Called when all the output entries are passed to the visitor."""
pass
def visit_conn(self, ip, port, tool, cc, params, start, dur, tput):
"""Called for each connection.
Args:
ip: The ip address of the connection.
port: The port of the connection.
tool: The tool used in the experiment.
cc: The congestion control algorithm.
params: Parameters used for cc.
start: The relative start time in seconds.
dur: The duration of the connection in seconds.
tput: The throughput reported by the benchmarking
application (e.g., netperf).
"""
pass
def visit_klog(self, time, line, match):
"""Called for a new klog line.
The log lines of each connection are sorted by time, but the lines of
different connections can interleave.
Args:
time: Time of the log entry relative to the start time of the
experiment.
line: The raw content of the log file.
match: The dictionary of all grouped regex matches.
"""
pass
def visit_strmr_log(self, time, pline):
"""Called for a new streamer log line.
The log lines of each connection are sorted by time.
Args:
time: Time of the log entry relative to the start time of the
experiment.
pline: The dictionary of all relvant parsed fields of a log line.
"""
pass
def visit_ss_log(self, time, data):
"""Called for a new ss log entry.
The log entries are sorted by time.
Args:
time: The time of the log entry when run ss command.
data: The dictionary of all relvant parsed fields of a log line.
"""
pass
def visit_packet(self, time, packet):
"""Called when there is a new packet available to be processed.
The packets of each connection are sorted by time but packets of
different connections can interleave..
Args:
time: Captured time relative to the start time of the experiment.
packet: The packet parsed by scapy.
"""
pass
def visit_metric(self, metric):
"""Called when a metric is available to be processed.
Args:
metric: The metric of type metric.Metric.
"""
pass
class SsLog(object):
"""Parses ss logs and provides the flows of the experiment.
Attributes:
__readers: The ss log file readers.
__entries: The most recent read entry from each log file. We keep this
list to make sure the entries are yielded sorted by time.
"""
def __init__(self, log_paths):
self.__readers = [open(path) for path in log_paths]
self.__times = [0] * len(log_paths)
self.__entries = [None] * len(log_paths)
def __read_sslog(self, i):
"""Read the next entry in file.
Args:
i: The index of the file reader.
Returns:
The next entry in file f. None if there is no entry.
"""
f = self.__readers[i]
if not f:
return None
time = self.__times[i]
line = f.readline()
if not line:
return None
while line.startswith('# '):
self.__times[i] = time = float(line[2:])
f.readline()
line = f.readline()
if not line:
return None
data = {}
port = line.strip()
port = int(port[port.rfind(':') + 1:])
data['port'] = port
line = f.readline()
if not line:
return None
stat = line.strip().split()
for item in stat:
if item.startswith('bytes_acked:'):
data['bytes_acked'] = int(item[item.rfind(':') + 1:])
elif item.startswith('retrans:'):
data['retrans'] = int(item[item.rfind('/') + 1:])
elif item.startswith('data_segs_out:'):
data['data_segs_out'] = int(item[item.rfind(':') + 1:])
elif item.startswith('rtt:'):
data['rtt'] = (
float(item[item.find(':') + 1:item.rfind('/')]) / 1000
)
elif item.startswith('unacked:'):
data['unacked'] = int(item[item.find(':') + 1:])
return time, data
def __next_entry(self):
"""Returns the next entry ordered by time.
Returns:
The next entry. None if there is no entry.
"""
min_time = -1
min_index = -1
for i, entry in enumerate(self.__entries):
# If the reader has finished reading entries, check the next slot.
if not self.__readers[i]:
continue
# Fill the holes.
if not entry:
entry = self.__read_sslog(i)
self.__entries[i] = entry
# If entry is not set, it means that there is no entry in the
# reader. So, we can remove the reader.
if not entry:
self.__readers[i] = None
continue
entry_time = entry[0]
if min_index == -1 or entry_time < min_time:
min_index = i
min_time = entry_time
if min_index == -1:
return None
entry = self.__entries[min_index]
self.__entries[min_index] = None
return entry
def entries(self):
"""Entries stored in the ss log files.
Yields:
A tuple in the form of (relative time in sec, entry).
"""
min_time = -1
while True:
entry = self.__next_entry()
if not entry:
break
if min_time == -1:
min_time = entry[0]
yield (entry[0] - min_time, entry[1])
class Pcap(object):
"""Parses pcap files and provides the flows of the experiment.
Attributes:
__readers: The pcap readers.
__packets: The most recent read packet from each pcap file. We keep this
list to make sure the packets are yielded sorted by time.
"""
def __init__(self, pcap_paths):
self.__readers = [PcapReader(path) for path in pcap_paths]
self.__packets = [None] * len(pcap_paths)
def __next_packet(self):
"""Returns the next packet ordered by time.
Returns:
The next packet. None if there is no packet.
"""
min_time = -1
min_index = -1
for i, pkt in enumerate(self.__packets):
# If the reader has finished reading packets, check the next slot.
if not self.__readers[i]:
continue
# Fill the holes.
if not pkt:
self.__packets[i] = pkt = self.__readers[i].read_packet()
# If pkt is not set, it means that there is no packet in the reader.
# So, we can remove the reader.
if not pkt:
self.__readers[i] = None
continue
if min_index == -1 or pkt.time < min_time:
min_index = i
min_time = pkt.time
if min_index == -1:
return None
pkt = self.__packets[min_index]
self.__packets[min_index] = None
return pkt
def packets(self):
"""Packets stored in the pcap files.
Yields:
A tuple in the form of (relative time in sec, raw packet, ip, tcp).
"""
min_time = -1
while True:
pkt = self.__next_packet()
if not pkt:
break
if min_time == -1:
min_time = pkt.time
yield (pkt.time - min_time, pkt)
# These are regular expressions to parse congestion control output in
# kern-debug.log.
_LOG_PATTERNS = [
# BBR:
re.compile((
r'\w+\s+\d+\s+\d{2}:\d{2}:\d{2}\s+(\w[\w\d\-]+)\s+kernel:\s+'
r'\[\s*(?P<ts>[\d\.]+)\] BBR '
r'(?P<ip>(\d{1,3}\.){3}\d{1,3}):(?P<port>\d{1,6})\s+'
r'(?P<ack>[\d,]+):(?P<fack>\d+)\s+'
r'(?P<castate>\S)\s+(?P<mode>\S)\s+'
r'(?P<snd_cwnd>\d+)\s+'
r'br\s+(?P<extra_acked>\d+)\s+'
r'cr\s+(?P<crtt>-?\d+)\s+'
r'rtt\s+(?P<rtt>-?\d+)\s+'
r'd\s+(?P<rs_delivered>-?\d+)\s+'
r'i\s+(?P<interval_us>-?\d+)\s+'
r'mrtt\s+(?P<mrtt>-?\d+)\s+'
r'(?P<rs_app_limited>\S)bw\s+(?P<sample_bw>\d+)\s+'
r'bw\s+(?P<bw>\d+)\s+'
r'lb\s+(?P<unused1>\d+)\s+'
r'ib\s+(?P<interval_bw>\d+)\s+'
r'qb\s+(?P<pacing_bw>\d+)\s+'
r'a\s+(?P<acked>\d+)\s+'
r'if\s+(?P<inflight>\d+)\s+'
r'(?P<unused2>\S)\s+'
r'(?P<round_start>\S)\s+'
r'dl\s+(?P<tp_delivered>\d+)\s+'
r'l\s+(?P<tp_loss>\d+)\s+'
r'al\s+(?P<tp_app_limited>\d+)\s+'
r'#\s+(?P<unused3>\d+)\s+'
r't\s+(?P<targetcw>\d+)\s+'
r'(?P<reord_seen>r|\.)\s+'
r'(?P<prev_ca_state>O|D|C|R|L)\s+'
r'lr\s+(?P<lr_x1000>-?\d+)\s+'
r'er\s+(?P<ecn_x1000>-?\d+)\s+'
r'ea\s+(?P<ecn_alpha_x1000>-?\d+)\s+'
r'bwl\s+(?P<bw_lo>-?\d+)\s+'
r'il\s+(?P<inflight_lo>-?\d+)\s+'
r'ih\s+(?P<inflight_hi>-?\d+)\s+'
r'c\s+(?P<bw_probe_up_cnt>-?\d+)\s+'
r'v\s+(?P<version>-?\d+)\s+'
r'(?P<debug_event>[\S])\s+'
r'(?P<cycle_idx>\d+)\s+'
r'(?P<ack_phase>I|R|B|F|A)\s+'
r'(?P<bw_probe_samples>Y|N)'
)),
]
class KernLog(object):
"""Parses kern-debug.log files.
Attributes:
__log_paths: The paths of kernel log files.
"""
def __init__(self, log_paths):
self.__log_paths = log_paths
def lines(self):
"""Yields a tuple for each log entry.
Yields:
Tuples in the form of: (timestamp in sec, raw line, parsed line)
"""
min_ts = {}
for path in self.__log_paths:
f = open(path)
for l in f:
# All log patterns must have "ts" and "port" fields.
m = None
for p in _LOG_PATTERNS:
m = p.match(l.strip())
if m:
break
if not m:
LOG.debug('cannot match log line: %s', l)
continue
mdict = m.groupdict()
if 'ts' not in mdict or 'port' not in mdict:
LOG.debug('no port or timestamp in log line: %s', l)
continue
ts = float(mdict['ts'])
# Make timestamps relative to the timestamp of the first
# entry of this port in the log file.
port = mdict['port']
if port not in min_ts:
min_ts[port] = ts
ts = 0
else:
ts -= min_ts[port]
yield (ts, l, m.groupdict())
class ConnInfo(object):
"""Parses the exp_dir/conn.info file.
This file is dumped by the sender and includes a line per connection.
"""
def __init__(self, cinfo_files):
self.__port_infos = {}
for f in cinfo_files:
lines = open(f).readlines()
for l in lines:
l = l.strip()
port, conn_info = l.split('=', 1)
self.__port_infos[int(port)] = conn_info.split(',', 6)
def conn_info(self, port):
"""Connection information of the given port."""
return self.__port_infos[port]
def ports(self):
"""Ports that exist in the conn.info files."""
return self.__port_infos.keys()
class RecvInfo(object):
"""Parses the recv.info file that is dumped by receiver.
This file only contains the IP address of the receiver.
"""
def __init__(self, rcvinf_file):
f = open(rcvinf_file)
self.ip = f.readlines()[0].strip()
f.close()
class ExpInfo(object):
"""Parses the exp.info file that is dumped by the orchestrator.
This file contains a readable string representation of the experiment.
"""
def __init__(self, expinf_file):
f = open(expinf_file)
self.__lines = f.readlines()
f.close()
def info(self):
"""Returns the lines in the exp.info file."""
return self.__lines
def fields(self):
"""Returns a dictionary of experiment parameters and their values."""
field_dict = {}
for l in self.__lines:
p, v = l.strip().split('=', 1)
field_dict[p] = v
return field_dict
| apache-2.0 | -1,043,250,681,706,200,200 | 29.815618 | 80 | 0.518795 | false |
mnahm5/django-estore | Lib/site-packages/awscli/customizations/ec2/protocolarg.py | 1 | 1400 | # Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
This customization allows the user to specify the values "tcp", "udp",
or "icmp" as values for the --protocol parameter. The actual Protocol
parameter of the operation accepts only integer protocol numbers.
"""
def _fix_args(params, **kwargs):
key_name = 'Protocol'
if key_name in params:
if params[key_name] == 'tcp':
params[key_name] = '6'
elif params[key_name] == 'udp':
params[key_name] = '17'
elif params[key_name] == 'icmp':
params[key_name] = '1'
elif params[key_name] == 'all':
params[key_name] = '-1'
def register_protocol_args(cli):
cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',
_fix_args)
cli.register('before-parameter-build.ec2.ReplaceNetworkAclEntry',
_fix_args)
| mit | -8,376,659,984,141,722,000 | 36.837838 | 73 | 0.664286 | false |
nce/sedater | sedater/test/test_options.py | 1 | 2109 | # ./sedater/test/test_options.py
# Author: Ulli Goschler <[email protected]>
# Created: Mon, 05.10.2015 - 12:59:56
# Modified: Thu, 10.12.2015 - 19:41:38
import unittest
from sedater.options import CLIParser
class TestCommandLineParameters(unittest.TestCase):
def setUp(self):
self.cli = CLIParser()
def test_default_settings(self):
self.cli.parseForSedater(['foo'])
self.assertFalse(self.cli.args.csv_headers)
self.assertFalse(self.cli.args.left_calibration)
self.assertFalse(self.cli.args.right_calibration)
self.assertFalse(self.cli.args.output_dir)
def test_toggle_csv_header(self):
self.cli.parseForSedater(['foo'])
self.assertFalse(self.cli.args.csv_headers)
self.cli.parseForSedater(['-c', 'foo'])
self.assertTrue(self.cli.args.csv_headers)
self.cli.parseForSedater(['--csv-headers', 'foo'])
self.assertTrue(self.cli.args.csv_headers)
def test_left_calibration_file(self):
ref = res = 'foobar'
self.cli.parseForSedater(['-l', ref, 'barfoo'])
self.assertEquals(self.cli.args.left_calibration, res)
self.cli.parseForSedater(['--left-calibration', ref, 'barfoo'])
self.assertEquals(self.cli.args.left_calibration, res)
def test_right_calibration_file(self):
ref = res = 'foobar'
self.cli.parseForSedater(['-r', ref, 'barfoo'])
self.assertEquals(self.cli.args.right_calibration, res)
self.cli.parseForSedater(['--right-calibration', ref, 'barfoo'])
self.assertEquals(self.cli.args.right_calibration, res)
def test_output_dir(self):
ref = res = 'foobar'
self.cli.parseForSedater(['-o', ref, 'barfoo'])
self.assertEquals(self.cli.args.output_dir, res)
self.cli.parseForSedater(['--output-dir', ref, 'barfoo'])
self.assertEquals(self.cli.args.output_dir, res)
def test_input_source_arguments(self):
ref = res = ['foo', 'bar', 'foobar', 'barfoo']
self.cli.parseForSedater(ref)
self.assertEquals(self.cli.args.inputSource, ref)
| mit | 4,717,097,843,753,090,000 | 41.18 | 72 | 0.655287 | false |
JohnReid/biopsy | Python/site_dpm/check-programs/map_factors.py | 1 | 2225 | #
# Copyright John Reid 2009
#
tp_indices = [
1,
6,
14,
17,
24,
28
]
def go_ids_for_genes(genes):
import biopsy.identifiers.biomart as B
for row in B.quick_query(
dataset='mmusculus_gene_ensembl',
attributes=[
'ensembl_gene_id',
#'go_cellular_component_id',
'go_biological_process_id',
#'go_molecular_function_id'
],
filters=[('ensembl_gene_id', ','.join(genes))],
):
yield row
def genes_for_go_id(go_id):
import biopsy.identifiers.biomart as B
for row in B.quick_query(
dataset='mmusculus_gene_ensembl',
attributes=['ensembl_gene_id'],
filters=[('go', go_id)],
):
yield row[0]
from boost.graph import Graph
class LabelledGraph(Graph):
def __init__(self):
Graph.__init__(self)
self.labels = Graph.add_vertex_property(self, name='label', type='string')
self.vertex_map = {}
def add_labelled_vertex(self, label):
if label in self.vertices:
raise RuntimeError('Vertex for "%s" already in graph' % label)
v = self.add_vertex()
self.labels[v] = label
self.vertex_map[label] = v
return v
def get_vertex(self, label):
if label in self.vertex_map:
return self.vertex_map[label]
else:
return self.add_labelled_vertex(label)
def create_graph(factors, pssm_map):
"""
Create a bipartite graph representing which matrices map onto the factors.
"""
import boost.graph as bgl
g = LabelledGraph()
vertices = {}
for f in factors:
for matrix, domain in pssm_map.iteritems():
if f in domain:
g.add_edge(g.get_vertex(matrix), g.get_vertex(f))
return g
for tp_index in tp_indices:
tp = transcriptional_programs[tp_index]
print tp_index
print tp.tp_factors
g = create_graph(tp.tp_factors, pssm_map)
graphviz_file = 'tp-%03d-factors.dot' % tp_index
svg_file = 'tp-%03d-factors.svg' % tp_index
g.write_graphviz(graphviz_file)
os.system('dot %s -Tsvg -o %s' % (graphviz_file, svg_file))
| mit | 6,979,182,918,673,666,000 | 24.872093 | 82 | 0.571236 | false |
jeffh/describe | describe/flags.py | 1 | 5303 | """ flags.py - Various constants that have special meaning in describe.
INIT - Represents a matcher be instanciated for initialization purposes only
NO_ARG - Represents no argument. This is Noner than None.
"""
__all__ = (
'NO_ARG', 'NO_ARGS', 'ANY_ARG', 'ANYTHING', 'ANY_ARGS', 'ANY_KWARGS', 'is_flag',
'params_match',
)
class Flag(object):
def __init__(self, name):
self.name = name
def __callable__(self):
return self
def __repr__(self):
return 'flag(%s)' % self.name
INIT = Flag('INIT')
NO_ARG = Flag('NO_ARG')
NO_KWARGS = NO_ARGS = Flag('NO_ARGS')
# used for argument matching
ANY_ARG = Flag('ANY_ARG')
ANYTHING = Flag('ANYTHING')
ANY_ARGS = Flag('ANY_ARGS')
ANY_KWARGS = Flag('ANY_KWARGS')
class DynamicFlag(object):
def __repr__(self):
return getattr(self, 'name', self.__class__.__name__.lower())
def validate(self, argument):
raise NotImplemented()
class Subclasses(DynamicFlag):
def __init__(self, cls):
self.cls = cls
def validate(self, argument):
try:
return issubclass(argument, self.cls)
except TypeError:
return False
class InstanceOf(DynamicFlag):
def __init__(self, cls):
self.cls = cls
def validate(self, argument):
return isinstance(argument, self.cls)
class Contains(DynamicFlag):
def __init__(self, item):
self.item = item
def validate(self, argument):
try:
return self.item in list(argument)
except TypeError:
return False
class IncludesPairs(DynamicFlag):
def __init__(self, **kwargs):
self.kwargs = kwargs
def validate(self, argument):
for key, value in self.kwargs.items():
try:
if argument[key] != value:
return False
except (IndexError, KeyError, TypeError):
return False
return True
class _Callable(DynamicFlag):
def __call__(self):
return self
def validate(self, argument):
return callable(argument)
Callable = _Callable()
class _AmountCompare(DynamicFlag):
def __init__(self, size):
self.size = size
def validate(self, argument):
try:
return self.cmp(argument, self.size)
except TypeError:
return False
def cmp(self, arg, value):
raise NotImplemented()
class LengthOf(_AmountCompare):
def cmp(self, arg, value):
return len(arg) == self.size
class AtLeast(DynamicFlag):
def cmp(self, arg, value):
return arg < self.size
class AtLeastEqual(DynamicFlag):
def cmp(self, arg, value):
return arg <= self.size
class AtMost(DynamicFlag):
def cmp(self, arg, value):
return arg > self.size
class AtMostEqual(DynamicFlag):
def cmp(self, arg, value):
return arg >= self.size
def is_flag(value):
try:
return issubclass(value, Flag) or issubclass(value, DynamicFlag)
except TypeError:
return isinstance(value, Flag) or isinstance(value, DynamicFlag)
def __arg_is(arg, *flags):
if arg in flags:
return True
try:
tuple(arg)
except TypeError:
return False
if tuple(arg) in set((f,) for f in flags):
return True
return False
def args_match(actual_args, expected_args):
if __arg_is(expected_args, ANYTHING, ANY_ARGS):
return True
if __arg_is(expected_args, NO_ARG, NO_ARGS):
return not list(actual_args)
if len(actual_args) != len(expected_args):
return False
for aarg, earg in zip(actual_args, expected_args):
assert earg not in (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS), 'expected_args cannot have a list containing any of the following: (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS)'
if aarg == earg or earg is ANY_ARG:
continue
if isinstance(earg, DynamicFlag):
if earg.validate(aarg):
continue
return False
return True
def kwargs_match(actual_args, expected_args):
if __arg_is(expected_args, ANYTHING, ANY_KWARGS):
return True
if __arg_is(expected_args, NO_ARG, NO_KWARGS):
return not list(actual_args)
if len(actual_args) != len(expected_args):
return False
for (akey, aarg), (ekey, earg) in zip(sorted(actual_args.items()), sorted(expected_args.items())):
assert earg not in (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS), 'expected_args cannot have a list containing any of the following: (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS)'
if akey != ekey:
return False
if aarg == earg or earg is ANY_ARG:
continue
if isinstance(earg, DynamicFlag):
if earg.validate(aarg):
continue
return False
return True
def params_match(actual_args, actual_kwargs, expected_args, expected_kwargs):
if __arg_is(expected_args, ANYTHING, ANY_ARGS) or __arg_is(expected_kwargs, ANYTHING, ANY_KWARGS):
return True
return args_match(actual_args, expected_args) and kwargs_match(actual_kwargs, expected_kwargs)
| mit | 4,358,649,939,190,283,000 | 27.461111 | 171 | 0.593815 | false |
hawkowl/axiom | axiom/dependency.py | 1 | 10496 | # Copright 2008 Divmod, Inc. See LICENSE file for details.
# -*- test-case-name: axiom.test.test_dependency -*-
"""
A dependency management system for items.
"""
import sys, itertools
from zope.interface.advice import addClassAdvisor
from epsilon.structlike import record
from axiom.item import Item
from axiom.attributes import reference, boolean, AND
from axiom.errors import ItemNotFound, DependencyError, UnsatisfiedRequirement
#There is probably a cleaner way to do this.
_globalDependencyMap = {}
def dependentsOf(cls):
deps = _globalDependencyMap.get(cls, None)
if deps is None:
return []
else:
return [d[0] for d in deps]
##Totally ripping off z.i
def dependsOn(itemType, itemCustomizer=None, doc='',
indexed=True, whenDeleted=reference.NULLIFY):
"""
This function behaves like L{axiom.attributes.reference} but with
an extra behaviour: when this item is installed (via
L{axiom.dependency.installOn} on a target item, the
type named here will be instantiated and installed on the target
as well.
For example::
class Foo(Item):
counter = integer()
thingIDependOn = dependsOn(Baz, lambda baz: baz.setup())
@param itemType: The Item class to instantiate and install.
@param itemCustomizer: A callable that accepts the item installed
as a dependency as its first argument. It will be called only if
an item is created to satisfy this dependency.
@return: An L{axiom.attributes.reference} instance.
"""
frame = sys._getframe(1)
locals = frame.f_locals
# Try to make sure we were called from a class def.
if (locals is frame.f_globals) or ('__module__' not in locals):
raise TypeError("dependsOn can be used only from a class definition.")
ref = reference(reftype=itemType, doc=doc, indexed=indexed, allowNone=True,
whenDeleted=whenDeleted)
if "__dependsOn_advice_data__" not in locals:
addClassAdvisor(_dependsOn_advice)
locals.setdefault('__dependsOn_advice_data__', []).append(
(itemType, itemCustomizer, ref))
return ref
def _dependsOn_advice(cls):
if cls in _globalDependencyMap:
print "Double advising of %s. dependency map from first time: %s" % (
cls, _globalDependencyMap[cls])
#bail if we end up here twice, somehow
return cls
for itemType, itemCustomizer, ref in cls.__dict__[
'__dependsOn_advice_data__']:
classDependsOn(cls, itemType, itemCustomizer, ref)
del cls.__dependsOn_advice_data__
return cls
def classDependsOn(cls, itemType, itemCustomizer, ref):
_globalDependencyMap.setdefault(cls, []).append(
(itemType, itemCustomizer, ref))
class _DependencyConnector(Item):
"""
I am a connector between installed items and their targets.
"""
installee = reference(doc="The item installed.")
target = reference(doc="The item installed upon.")
explicitlyInstalled = boolean(doc="Whether this item was installed"
"explicitly (and thus whether or not it"
"should be automatically uninstalled when"
"nothing depends on it)")
def installOn(self, target):
"""
Install this object on the target along with any powerup
interfaces it declares. Also track that the object now depends on
the target, and the object was explicitly installed (and therefore
should not be uninstalled by subsequent uninstallation operations
unless it is explicitly removed).
"""
_installOn(self, target, True)
def _installOn(self, target, __explicitlyInstalled=False):
depBlob = _globalDependencyMap.get(self.__class__, [])
dependencies, itemCustomizers, refs = (map(list, zip(*depBlob))
or ([], [], []))
#See if any of our dependencies have been installed already
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target == target):
if dc.installee.__class__ in dependencies:
i = dependencies.index(dc.installee.__class__)
refs[i].__set__(self, dc.installee)
del dependencies[i], itemCustomizers[i], refs[i]
if (dc.installee.__class__ == self.__class__
and self.__class__ in set(
itertools.chain([blob[0][0] for blob in
_globalDependencyMap.values()]))):
#Somebody got here before we did... let's punt
raise DependencyError("An instance of %r is already "
"installed on %r." % (self.__class__,
target))
#The rest we'll install
for i, cls in enumerate(dependencies):
it = cls(store=self.store)
if itemCustomizers[i] is not None:
itemCustomizers[i](it)
_installOn(it, target, False)
refs[i].__set__(self, it)
#And now the connector for our own dependency.
dc = self.store.findUnique(
_DependencyConnector,
AND(_DependencyConnector.target==target,
_DependencyConnector.installee==self,
_DependencyConnector.explicitlyInstalled==__explicitlyInstalled),
None)
assert dc is None, "Dependency connector already exists, wtf are you doing?"
_DependencyConnector(store=self.store, target=target,
installee=self,
explicitlyInstalled=__explicitlyInstalled)
target.powerUp(self)
callback = getattr(self, "installed", None)
if callback is not None:
callback()
def uninstallFrom(self, target):
"""
Remove this object from the target, as well as any dependencies
that it automatically installed which were not explicitly
"pinned" by calling "install", and raising an exception if
anything still depends on this.
"""
#did this class powerup on any interfaces? powerdown if so.
target.powerDown(self)
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target==target):
if dc.installee is self:
dc.deleteFromStore()
for item in installedUniqueRequirements(self, target):
uninstallFrom(item, target)
callback = getattr(self, "uninstalled", None)
if callback is not None:
callback()
def installedOn(self):
"""
If this item is installed on another item, return the install
target. Otherwise return None.
"""
try:
return self.store.findUnique(_DependencyConnector,
_DependencyConnector.installee == self
).target
except ItemNotFound:
return None
def installedDependents(self, target):
"""
Return an iterable of things installed on the target that
require this item.
"""
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target == target):
depends = dependentsOf(dc.installee.__class__)
if self.__class__ in depends:
yield dc.installee
def installedUniqueRequirements(self, target):
"""
Return an iterable of things installed on the target that this item
requires and are not required by anything else.
"""
myDepends = dependentsOf(self.__class__)
#XXX optimize?
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target==target):
if dc.installee is self:
#we're checking all the others not ourself
continue
depends = dependentsOf(dc.installee.__class__)
if self.__class__ in depends:
raise DependencyError(
"%r cannot be uninstalled from %r, "
"%r still depends on it" % (self, target, dc.installee))
for cls in myDepends[:]:
#If one of my dependencies is required by somebody
#else, leave it alone
if cls in depends:
myDepends.remove(cls)
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target==target):
if (dc.installee.__class__ in myDepends
and not dc.explicitlyInstalled):
yield dc.installee
def installedRequirements(self, target):
"""
Return an iterable of things installed on the target that this
item requires.
"""
myDepends = dependentsOf(self.__class__)
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target == target):
if dc.installee.__class__ in myDepends:
yield dc.installee
def onlyInstallPowerups(self, target):
"""
Deprecated - L{Item.powerUp} now has this functionality.
"""
target.powerUp(self)
class requiresFromSite(
record('powerupInterface defaultFactory siteDefaultFactory',
defaultFactory=None,
siteDefaultFactory=None)):
"""
A read-only descriptor that will return the site store's powerup for a
given item.
@ivar powerupInterface: an L{Interface} describing the powerup that the
site store should be adapted to.
@ivar defaultFactory: a 1-argument callable that takes the site store and
returns a value for this descriptor. This is invoked in cases where the
site store does not provide a default factory of its own, and this
descriptor is retrieved from an item in a store with a parent.
@ivar siteDefaultFactory: a 1-argument callable that takes the site store
and returns a value for this descriptor. This is invoked in cases where
this descriptor is retrieved from an item in a store without a parent.
"""
def _invokeFactory(self, defaultFactory, siteStore):
if defaultFactory is None:
raise UnsatisfiedRequirement()
return defaultFactory(siteStore)
def __get__(self, oself, type=None):
"""
Retrieve the value of this dependency from the site store.
"""
siteStore = oself.store.parent
if siteStore is not None:
pi = self.powerupInterface(siteStore, None)
if pi is None:
pi = self._invokeFactory(self.defaultFactory, siteStore)
else:
pi = self._invokeFactory(self.siteDefaultFactory, oself.store)
return pi
| mit | -1,192,944,152,766,764,800 | 35.318339 | 80 | 0.634909 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.