code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
from __future__ import division
from sympy import (Basic, Symbol, sin, cos, exp, sqrt, Rational, Float, re, pi,
sympify, Add, Mul, Pow, Mod, I, log, S, Max, symbols, oo, Integer,
sign, im, nan, Dummy, factorial, comp, refine
)
from sympy.core.compatibility import long, range
from sympy.utilities.iterables import cartes
from sympy.utilities.pytest import XFAIL, raises
from sympy.utilities.randtest import verify_numerically
a, c, x, y, z = symbols('a,c,x,y,z')
b = Symbol("b", positive=True)
def same_and_same_prec(a, b):
# stricter matching for Floats
return a == b and a._prec == b._prec
def test_bug1():
assert re(x) != x
x.series(x, 0, 1)
assert re(x) != x
def test_Symbol():
e = a*b
assert e == a*b
assert a*b*b == a*b**2
assert a*b*b + c == c + a*b**2
assert a*b*b - c == -c + a*b**2
x = Symbol('x', complex=True, real=False)
assert x.is_imaginary is None # could be I or 1 + I
x = Symbol('x', complex=True, imaginary=False)
assert x.is_real is None # could be 1 or 1 + I
x = Symbol('x', real=True)
assert x.is_complex
x = Symbol('x', imaginary=True)
assert x.is_complex
x = Symbol('x', real=False, imaginary=False)
assert x.is_complex is None # might be a non-number
def test_arit0():
p = Rational(5)
e = a*b
assert e == a*b
e = a*b + b*a
assert e == 2*a*b
e = a*b + b*a + a*b + p*b*a
assert e == 8*a*b
e = a*b + b*a + a*b + p*b*a + a
assert e == a + 8*a*b
e = a + a
assert e == 2*a
e = a + b + a
assert e == b + 2*a
e = a + b*b + a + b*b
assert e == 2*a + 2*b**2
e = a + Rational(2) + b*b + a + b*b + p
assert e == 7 + 2*a + 2*b**2
e = (a + b*b + a + b*b)*p
assert e == 5*(2*a + 2*b**2)
e = (a*b*c + c*b*a + b*a*c)*p
assert e == 15*a*b*c
e = (a*b*c + c*b*a + b*a*c)*p - Rational(15)*a*b*c
assert e == Rational(0)
e = Rational(50)*(a - a)
assert e == Rational(0)
e = b*a - b - a*b + b
assert e == Rational(0)
e = a*b + c**p
assert e == a*b + c**5
e = a/b
assert e == a*b**(-1)
e = a*2*2
assert e == 4*a
e = 2 + a*2/2
assert e == 2 + a
e = 2 - a - 2
assert e == -a
e = 2*a*2
assert e == 4*a
e = 2/a/2
assert e == a**(-1)
e = 2**a**2
assert e == 2**(a**2)
e = -(1 + a)
assert e == -1 - a
e = Rational(1, 2)*(1 + a)
assert e == Rational(1, 2) + a/2
def test_div():
e = a/b
assert e == a*b**(-1)
e = a/b + c/2
assert e == a*b**(-1) + Rational(1)/2*c
e = (1 - b)/(b - 1)
assert e == (1 + -b)*((-1) + b)**(-1)
def test_pow():
n1 = Rational(1)
n2 = Rational(2)
n5 = Rational(5)
e = a*a
assert e == a**2
e = a*a*a
assert e == a**3
e = a*a*a*a**Rational(6)
assert e == a**9
e = a*a*a*a**Rational(6) - a**Rational(9)
assert e == Rational(0)
e = a**(b - b)
assert e == Rational(1)
e = (a + Rational(1) - a)**b
assert e == Rational(1)
e = (a + b + c)**n2
assert e == (a + b + c)**2
assert e.expand() == 2*b*c + 2*a*c + 2*a*b + a**2 + c**2 + b**2
e = (a + b)**n2
assert e == (a + b)**2
assert e.expand() == 2*a*b + a**2 + b**2
e = (a + b)**(n1/n2)
assert e == sqrt(a + b)
assert e.expand() == sqrt(a + b)
n = n5**(n1/n2)
assert n == sqrt(5)
e = n*a*b - n*b*a
assert e == Rational(0)
e = n*a*b + n*b*a
assert e == 2*a*b*sqrt(5)
assert e.diff(a) == 2*b*sqrt(5)
assert e.diff(a) == 2*b*sqrt(5)
e = a/b**2
assert e == a*b**(-2)
assert sqrt(2*(1 + sqrt(2))) == (2*(1 + 2**Rational(1, 2)))**Rational(1, 2)
x = Symbol('x')
y = Symbol('y')
assert ((x*y)**3).expand() == y**3 * x**3
assert ((x*y)**-3).expand() == y**-3 * x**-3
assert (x**5*(3*x)**(3)).expand() == 27 * x**8
assert (x**5*(-3*x)**(3)).expand() == -27 * x**8
assert (x**5*(3*x)**(-3)).expand() == Rational(1, 27) * x**2
assert (x**5*(-3*x)**(-3)).expand() == -Rational(1, 27) * x**2
# expand_power_exp
assert (x**(y**(x + exp(x + y)) + z)).expand(deep=False) == \
x**z*x**(y**(x + exp(x + y)))
assert (x**(y**(x + exp(x + y)) + z)).expand() == \
x**z*x**(y**x*y**(exp(x)*exp(y)))
n = Symbol('n', even=False)
k = Symbol('k', even=True)
o = Symbol('o', odd=True)
assert (-1)**x == (-1)**x
assert (-1)**n == (-1)**n
assert (-2)**k == 2**k
assert (-1)**k == 1
def test_pow2():
# x**(2*y) is always (x**y)**2 but is only (x**2)**y if
# x.is_positive or y.is_integer
# let x = 1 to see why the following are not true.
assert (-x)**Rational(2, 3) != x**Rational(2, 3)
assert (-x)**Rational(5, 7) != -x**Rational(5, 7)
assert ((-x)**2)**Rational(1, 3) != ((-x)**Rational(1, 3))**2
assert sqrt(x**2) != x
def test_pow3():
assert sqrt(2)**3 == 2 * sqrt(2)
assert sqrt(2)**3 == sqrt(8)
def test_pow_E():
assert 2**(y/log(2)) == S.Exp1**y
assert 2**(y/log(2)/3) == S.Exp1**(y/3)
assert 3**(1/log(-3)) != S.Exp1
assert (3 + 2*I)**(1/(log(-3 - 2*I) + I*pi)) == S.Exp1
assert (4 + 2*I)**(1/(log(-4 - 2*I) + I*pi)) == S.Exp1
assert (3 + 2*I)**(1/(log(-3 - 2*I, 3)/2 + I*pi/log(3)/2)) == 9
assert (3 + 2*I)**(1/(log(3 + 2*I, 3)/2)) == 9
# every time tests are run they will affirm with a different random
# value that this identity holds
while 1:
b = x._random()
r, i = b.as_real_imag()
if i:
break
assert verify_numerically(b**(1/(log(-b) + sign(i)*I*pi).n()), S.Exp1)
def test_pow_issue_3516():
assert 4**Rational(1, 4) == sqrt(2)
def test_pow_im():
for m in (-2, -1, 2):
for d in (3, 4, 5):
b = m*I
for i in range(1, 4*d + 1):
e = Rational(i, d)
assert (b**e - b.n()**e.n()).n(2, chop=1e-10) == 0
e = Rational(7, 3)
assert (2*x*I)**e == 4*2**Rational(1, 3)*(I*x)**e # same as Wolfram Alpha
im = symbols('im', imaginary=True)
assert (2*im*I)**e == 4*2**Rational(1, 3)*(I*im)**e
args = [I, I, I, I, 2]
e = Rational(1, 3)
ans = 2**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
args = [I, I, I, 2]
e = Rational(1, 3)
ans = 2**e*(-I)**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
args.append(-3)
ans = (6*I)**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
args.append(-1)
ans = (-6*I)**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
args = [I, I, 2]
e = Rational(1, 3)
ans = (-2)**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
args.append(-3)
ans = (6)**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
args.append(-1)
ans = (-6)**e
assert Mul(*args, evaluate=False)**e == ans
assert Mul(*args)**e == ans
assert Mul(Pow(-1, Rational(3, 2), evaluate=False), I, I) == I
assert Mul(I*Pow(I, S.Half, evaluate=False)) == (-1)**Rational(3, 4)
def test_real_mul():
assert Float(0) * pi * x == Float(0)
assert set((Float(1) * pi * x).args) == set([Float(1), pi, x])
def test_ncmul():
A = Symbol("A", commutative=False)
B = Symbol("B", commutative=False)
C = Symbol("C", commutative=False)
assert A*B != B*A
assert A*B*C != C*B*A
assert A*b*B*3*C == 3*b*A*B*C
assert A*b*B*3*C != 3*b*B*A*C
assert A*b*B*3*C == 3*A*B*C*b
assert A + B == B + A
assert (A + B)*C != C*(A + B)
assert C*(A + B)*C != C*C*(A + B)
assert A*A == A**2
assert (A + B)*(A + B) == (A + B)**2
assert A**-1 * A == 1
assert A/A == 1
assert A/(A**2) == 1/A
assert A/(1 + A) == A/(1 + A)
assert set((A + B + 2*(A + B)).args) == \
set([A, B, 2*(A + B)])
def test_ncpow():
x = Symbol('x', commutative=False)
y = Symbol('y', commutative=False)
z = Symbol('z', commutative=False)
a = Symbol('a')
b = Symbol('b')
c = Symbol('c')
assert (x**2)*(y**2) != (y**2)*(x**2)
assert (x**-2)*y != y*(x**2)
assert 2**x*2**y != 2**(x + y)
assert 2**x*2**y*2**z != 2**(x + y + z)
assert 2**x*2**(2*x) == 2**(3*x)
assert 2**x*2**(2*x)*2**x == 2**(4*x)
assert exp(x)*exp(y) != exp(y)*exp(x)
assert exp(x)*exp(y)*exp(z) != exp(y)*exp(x)*exp(z)
assert exp(x)*exp(y)*exp(z) != exp(x + y + z)
assert x**a*x**b != x**(a + b)
assert x**a*x**b*x**c != x**(a + b + c)
assert x**3*x**4 == x**7
assert x**3*x**4*x**2 == x**9
assert x**a*x**(4*a) == x**(5*a)
assert x**a*x**(4*a)*x**a == x**(6*a)
def test_powerbug():
x = Symbol("x")
assert x**1 != (-x)**1
assert x**2 == (-x)**2
assert x**3 != (-x)**3
assert x**4 == (-x)**4
assert x**5 != (-x)**5
assert x**6 == (-x)**6
assert x**128 == (-x)**128
assert x**129 != (-x)**129
assert (2*x)**2 == (-2*x)**2
def test_Mul_doesnt_expand_exp():
x = Symbol('x')
y = Symbol('y')
assert exp(x)*exp(y) == exp(x)*exp(y)
assert 2**x*2**y == 2**x*2**y
assert x**2*x**3 == x**5
assert 2**x*3**x == 6**x
assert x**(y)*x**(2*y) == x**(3*y)
assert sqrt(2)*sqrt(2) == 2
assert 2**x*2**(2*x) == 2**(3*x)
assert sqrt(2)*2**Rational(1, 4)*5**Rational(3, 4) == 10**Rational(3, 4)
assert (x**(-log(5)/log(3))*x)/(x*x**( - log(5)/log(3))) == sympify(1)
def test_Add_Mul_is_integer():
x = Symbol('x')
k = Symbol('k', integer=True)
n = Symbol('n', integer=True)
assert (2*k).is_integer is True
assert (-k).is_integer is True
assert (k/3).is_integer is None
assert (x*k*n).is_integer is None
assert (k + n).is_integer is True
assert (k + x).is_integer is None
assert (k + n*x).is_integer is None
assert (k + n/3).is_integer is None
assert ((1 + sqrt(3))*(-sqrt(3) + 1)).is_integer is not False
assert (1 + (1 + sqrt(3))*(-sqrt(3) + 1)).is_integer is not False
def test_Add_Mul_is_finite():
x = Symbol('x', real=True, finite=False)
assert sin(x).is_finite is True
assert (x*sin(x)).is_finite is False
assert (1024*sin(x)).is_finite is True
assert (sin(x)*exp(x)).is_finite is not True
assert (sin(x)*cos(x)).is_finite is True
assert (x*sin(x)*exp(x)).is_finite is not True
assert (sin(x) - 67).is_finite is True
assert (sin(x) + exp(x)).is_finite is not True
assert (1 + x).is_finite is False
assert (1 + x**2 + (1 + x)*(1 - x)).is_finite is None
assert (sqrt(2)*(1 + x)).is_finite is False
assert (sqrt(2)*(1 + x)*(1 - x)).is_finite is False
def test_Mul_is_even_odd():
x = Symbol('x', integer=True)
y = Symbol('y', integer=True)
k = Symbol('k', odd=True)
n = Symbol('n', odd=True)
m = Symbol('m', even=True)
assert (2*x).is_even is True
assert (2*x).is_odd is False
assert (3*x).is_even is None
assert (3*x).is_odd is None
assert (k/3).is_integer is None
assert (k/3).is_even is None
assert (k/3).is_odd is None
assert (2*n).is_even is True
assert (2*n).is_odd is False
assert (2*m).is_even is True
assert (2*m).is_odd is False
assert (-n).is_even is False
assert (-n).is_odd is True
assert (k*n).is_even is False
assert (k*n).is_odd is True
assert (k*m).is_even is True
assert (k*m).is_odd is False
assert (k*n*m).is_even is True
assert (k*n*m).is_odd is False
assert (k*m*x).is_even is True
assert (k*m*x).is_odd is False
# issue 6791:
assert (x/2).is_integer is None
assert (k/2).is_integer is False
assert (m/2).is_integer is True
assert (x*y).is_even is None
assert (x*x).is_even is None
assert (x*(x + k)).is_even is True
assert (x*(x + m)).is_even is None
assert (x*y).is_odd is None
assert (x*x).is_odd is None
assert (x*(x + k)).is_odd is False
assert (x*(x + m)).is_odd is None
@XFAIL
def test_evenness_in_ternary_integer_product_with_odd():
# Tests that oddness inference is independent of term ordering.
# Term ordering at the point of testing depends on SymPy's symbol order, so
# we try to force a different order by modifying symbol names.
x = Symbol('x', integer=True)
y = Symbol('y', integer=True)
k = Symbol('k', odd=True)
assert (x*y*(y + k)).is_even is True
assert (y*x*(x + k)).is_even is True
def test_evenness_in_ternary_integer_product_with_even():
x = Symbol('x', integer=True)
y = Symbol('y', integer=True)
m = Symbol('m', even=True)
assert (x*y*(y + m)).is_even is None
@XFAIL
def test_oddness_in_ternary_integer_product_with_odd():
# Tests that oddness inference is independent of term ordering.
# Term ordering at the point of testing depends on SymPy's symbol order, so
# we try to force a different order by modifying symbol names.
x = Symbol('x', integer=True)
y = Symbol('y', integer=True)
k = Symbol('k', odd=True)
assert (x*y*(y + k)).is_odd is False
assert (y*x*(x + k)).is_odd is False
def test_oddness_in_ternary_integer_product_with_even():
x = Symbol('x', integer=True)
y = Symbol('y', integer=True)
m = Symbol('m', even=True)
assert (x*y*(y + m)).is_odd is None
def test_Mul_is_rational():
x = Symbol('x')
n = Symbol('n', integer=True)
m = Symbol('m', integer=True, nonzero=True)
assert (n/m).is_rational is True
assert (x/pi).is_rational is None
assert (x/n).is_rational is None
assert (m/pi).is_rational is False
r = Symbol('r', rational=True)
assert (pi*r).is_rational is None
# issue 8008
z = Symbol('z', zero=True)
i = Symbol('i', imaginary=True)
assert (z*i).is_rational is None
bi = Symbol('i', imaginary=True, finite=True)
assert (z*bi).is_zero is True
def test_Add_is_rational():
x = Symbol('x')
n = Symbol('n', rational=True)
m = Symbol('m', rational=True)
assert (n + m).is_rational is True
assert (x + pi).is_rational is None
assert (x + n).is_rational is None
assert (n + pi).is_rational is False
def test_Add_is_even_odd():
x = Symbol('x', integer=True)
k = Symbol('k', odd=True)
n = Symbol('n', odd=True)
m = Symbol('m', even=True)
assert (k + 7).is_even is True
assert (k + 7).is_odd is False
assert (-k + 7).is_even is True
assert (-k + 7).is_odd is False
assert (k - 12).is_even is False
assert (k - 12).is_odd is True
assert (-k - 12).is_even is False
assert (-k - 12).is_odd is True
assert (k + n).is_even is True
assert (k + n).is_odd is False
assert (k + m).is_even is False
assert (k + m).is_odd is True
assert (k + n + m).is_even is True
assert (k + n + m).is_odd is False
assert (k + n + x + m).is_even is None
assert (k + n + x + m).is_odd is None
def test_Mul_is_negative_positive():
x = Symbol('x', real=True)
y = Symbol('y', real=False, complex=True)
z = Symbol('z', zero=True)
e = 2*z
assert e.is_Mul and e.is_positive is False and e.is_negative is False
neg = Symbol('neg', negative=True)
pos = Symbol('pos', positive=True)
nneg = Symbol('nneg', nonnegative=True)
npos = Symbol('npos', nonpositive=True)
assert neg.is_negative is True
assert (-neg).is_negative is False
assert (2*neg).is_negative is True
assert (2*pos)._eval_is_negative() is False
assert (2*pos).is_negative is False
assert pos.is_negative is False
assert (-pos).is_negative is True
assert (2*pos).is_negative is False
assert (pos*neg).is_negative is True
assert (2*pos*neg).is_negative is True
assert (-pos*neg).is_negative is False
assert (pos*neg*y).is_negative is False # y.is_real=F; !real -> !neg
assert nneg.is_negative is False
assert (-nneg).is_negative is None
assert (2*nneg).is_negative is False
assert npos.is_negative is None
assert (-npos).is_negative is False
assert (2*npos).is_negative is None
assert (nneg*npos).is_negative is None
assert (neg*nneg).is_negative is None
assert (neg*npos).is_negative is False
assert (pos*nneg).is_negative is False
assert (pos*npos).is_negative is None
assert (npos*neg*nneg).is_negative is False
assert (npos*pos*nneg).is_negative is None
assert (-npos*neg*nneg).is_negative is None
assert (-npos*pos*nneg).is_negative is False
assert (17*npos*neg*nneg).is_negative is False
assert (17*npos*pos*nneg).is_negative is None
assert (neg*npos*pos*nneg).is_negative is False
assert (x*neg).is_negative is None
assert (nneg*npos*pos*x*neg).is_negative is None
assert neg.is_positive is False
assert (-neg).is_positive is True
assert (2*neg).is_positive is False
assert pos.is_positive is True
assert (-pos).is_positive is False
assert (2*pos).is_positive is True
assert (pos*neg).is_positive is False
assert (2*pos*neg).is_positive is False
assert (-pos*neg).is_positive is True
assert (-pos*neg*y).is_positive is False # y.is_real=F; !real -> !neg
assert nneg.is_positive is None
assert (-nneg).is_positive is False
assert (2*nneg).is_positive is None
assert npos.is_positive is False
assert (-npos).is_positive is None
assert (2*npos).is_positive is False
assert (nneg*npos).is_positive is False
assert (neg*nneg).is_positive is False
assert (neg*npos).is_positive is None
assert (pos*nneg).is_positive is None
assert (pos*npos).is_positive is False
assert (npos*neg*nneg).is_positive is None
assert (npos*pos*nneg).is_positive is False
assert (-npos*neg*nneg).is_positive is False
assert (-npos*pos*nneg).is_positive is None
assert (17*npos*neg*nneg).is_positive is None
assert (17*npos*pos*nneg).is_positive is False
assert (neg*npos*pos*nneg).is_positive is None
assert (x*neg).is_positive is None
assert (nneg*npos*pos*x*neg).is_positive is None
def test_Mul_is_negative_positive_2():
a = Symbol('a', nonnegative=True)
b = Symbol('b', nonnegative=True)
c = Symbol('c', nonpositive=True)
d = Symbol('d', nonpositive=True)
assert (a*b).is_nonnegative is True
assert (a*b).is_negative is False
assert (a*b).is_zero is None
assert (a*b).is_positive is None
assert (c*d).is_nonnegative is True
assert (c*d).is_negative is False
assert (c*d).is_zero is None
assert (c*d).is_positive is None
assert (a*c).is_nonpositive is True
assert (a*c).is_positive is False
assert (a*c).is_zero is None
assert (a*c).is_negative is None
def test_Mul_is_nonpositive_nonnegative():
x = Symbol('x', real=True)
k = Symbol('k', negative=True)
n = Symbol('n', positive=True)
u = Symbol('u', nonnegative=True)
v = Symbol('v', nonpositive=True)
assert k.is_nonpositive is True
assert (-k).is_nonpositive is False
assert (2*k).is_nonpositive is True
assert n.is_nonpositive is False
assert (-n).is_nonpositive is True
assert (2*n).is_nonpositive is False
assert (n*k).is_nonpositive is True
assert (2*n*k).is_nonpositive is True
assert (-n*k).is_nonpositive is False
assert u.is_nonpositive is None
assert (-u).is_nonpositive is True
assert (2*u).is_nonpositive is None
assert v.is_nonpositive is True
assert (-v).is_nonpositive is None
assert (2*v).is_nonpositive is True
assert (u*v).is_nonpositive is True
assert (k*u).is_nonpositive is True
assert (k*v).is_nonpositive is None
assert (n*u).is_nonpositive is None
assert (n*v).is_nonpositive is True
assert (v*k*u).is_nonpositive is None
assert (v*n*u).is_nonpositive is True
assert (-v*k*u).is_nonpositive is True
assert (-v*n*u).is_nonpositive is None
assert (17*v*k*u).is_nonpositive is None
assert (17*v*n*u).is_nonpositive is True
assert (k*v*n*u).is_nonpositive is None
assert (x*k).is_nonpositive is None
assert (u*v*n*x*k).is_nonpositive is None
assert k.is_nonnegative is False
assert (-k).is_nonnegative is True
assert (2*k).is_nonnegative is False
assert n.is_nonnegative is True
assert (-n).is_nonnegative is False
assert (2*n).is_nonnegative is True
assert (n*k).is_nonnegative is False
assert (2*n*k).is_nonnegative is False
assert (-n*k).is_nonnegative is True
assert u.is_nonnegative is True
assert (-u).is_nonnegative is None
assert (2*u).is_nonnegative is True
assert v.is_nonnegative is None
assert (-v).is_nonnegative is True
assert (2*v).is_nonnegative is None
assert (u*v).is_nonnegative is None
assert (k*u).is_nonnegative is None
assert (k*v).is_nonnegative is True
assert (n*u).is_nonnegative is True
assert (n*v).is_nonnegative is None
assert (v*k*u).is_nonnegative is True
assert (v*n*u).is_nonnegative is None
assert (-v*k*u).is_nonnegative is None
assert (-v*n*u).is_nonnegative is True
assert (17*v*k*u).is_nonnegative is True
assert (17*v*n*u).is_nonnegative is None
assert (k*v*n*u).is_nonnegative is True
assert (x*k).is_nonnegative is None
assert (u*v*n*x*k).is_nonnegative is None
def test_Add_is_negative_positive():
x = Symbol('x', real=True)
k = Symbol('k', negative=True)
n = Symbol('n', positive=True)
u = Symbol('u', nonnegative=True)
v = Symbol('v', nonpositive=True)
assert (k - 2).is_negative is True
assert (k + 17).is_negative is None
assert (-k - 5).is_negative is None
assert (-k + 123).is_negative is False
assert (k - n).is_negative is True
assert (k + n).is_negative is None
assert (-k - n).is_negative is None
assert (-k + n).is_negative is False
assert (k - n - 2).is_negative is True
assert (k + n + 17).is_negative is None
assert (-k - n - 5).is_negative is None
assert (-k + n + 123).is_negative is False
assert (-2*k + 123*n + 17).is_negative is False
assert (k + u).is_negative is None
assert (k + v).is_negative is True
assert (n + u).is_negative is False
assert (n + v).is_negative is None
assert (u - v).is_negative is False
assert (u + v).is_negative is None
assert (-u - v).is_negative is None
assert (-u + v).is_negative is None
assert (u - v + n + 2).is_negative is False
assert (u + v + n + 2).is_negative is None
assert (-u - v + n + 2).is_negative is None
assert (-u + v + n + 2).is_negative is None
assert (k + x).is_negative is None
assert (k + x - n).is_negative is None
assert (k - 2).is_positive is False
assert (k + 17).is_positive is None
assert (-k - 5).is_positive is None
assert (-k + 123).is_positive is True
assert (k - n).is_positive is False
assert (k + n).is_positive is None
assert (-k - n).is_positive is None
assert (-k + n).is_positive is True
assert (k - n - 2).is_positive is False
assert (k + n + 17).is_positive is None
assert (-k - n - 5).is_positive is None
assert (-k + n + 123).is_positive is True
assert (-2*k + 123*n + 17).is_positive is True
assert (k + u).is_positive is None
assert (k + v).is_positive is False
assert (n + u).is_positive is True
assert (n + v).is_positive is None
assert (u - v).is_positive is None
assert (u + v).is_positive is None
assert (-u - v).is_positive is None
assert (-u + v).is_positive is False
assert (u - v - n - 2).is_positive is None
assert (u + v - n - 2).is_positive is None
assert (-u - v - n - 2).is_positive is None
assert (-u + v - n - 2).is_positive is False
assert (n + x).is_positive is None
assert (n + x - k).is_positive is None
z = (-3 - sqrt(5) + (-sqrt(10)/2 - sqrt(2)/2)**2)
assert z.is_zero
z = sqrt(1 + sqrt(3)) + sqrt(3 + 3*sqrt(3)) - sqrt(10 + 6*sqrt(3))
assert z.is_zero
def test_Add_is_nonpositive_nonnegative():
x = Symbol('x', real=True)
k = Symbol('k', negative=True)
n = Symbol('n', positive=True)
u = Symbol('u', nonnegative=True)
v = Symbol('v', nonpositive=True)
assert (u - 2).is_nonpositive is None
assert (u + 17).is_nonpositive is False
assert (-u - 5).is_nonpositive is True
assert (-u + 123).is_nonpositive is None
assert (u - v).is_nonpositive is None
assert (u + v).is_nonpositive is None
assert (-u - v).is_nonpositive is None
assert (-u + v).is_nonpositive is True
assert (u - v - 2).is_nonpositive is None
assert (u + v + 17).is_nonpositive is None
assert (-u - v - 5).is_nonpositive is None
assert (-u + v - 123).is_nonpositive is True
assert (-2*u + 123*v - 17).is_nonpositive is True
assert (k + u).is_nonpositive is None
assert (k + v).is_nonpositive is True
assert (n + u).is_nonpositive is False
assert (n + v).is_nonpositive is None
assert (k - n).is_nonpositive is True
assert (k + n).is_nonpositive is None
assert (-k - n).is_nonpositive is None
assert (-k + n).is_nonpositive is False
assert (k - n + u + 2).is_nonpositive is None
assert (k + n + u + 2).is_nonpositive is None
assert (-k - n + u + 2).is_nonpositive is None
assert (-k + n + u + 2).is_nonpositive is False
assert (u + x).is_nonpositive is None
assert (v - x - n).is_nonpositive is None
assert (u - 2).is_nonnegative is None
assert (u + 17).is_nonnegative is True
assert (-u - 5).is_nonnegative is False
assert (-u + 123).is_nonnegative is None
assert (u - v).is_nonnegative is True
assert (u + v).is_nonnegative is None
assert (-u - v).is_nonnegative is None
assert (-u + v).is_nonnegative is None
assert (u - v + 2).is_nonnegative is True
assert (u + v + 17).is_nonnegative is None
assert (-u - v - 5).is_nonnegative is None
assert (-u + v - 123).is_nonnegative is False
assert (2*u - 123*v + 17).is_nonnegative is True
assert (k + u).is_nonnegative is None
assert (k + v).is_nonnegative is False
assert (n + u).is_nonnegative is True
assert (n + v).is_nonnegative is None
assert (k - n).is_nonnegative is False
assert (k + n).is_nonnegative is None
assert (-k - n).is_nonnegative is None
assert (-k + n).is_nonnegative is True
assert (k - n - u - 2).is_nonnegative is False
assert (k + n - u - 2).is_nonnegative is None
assert (-k - n - u - 2).is_nonnegative is None
assert (-k + n - u - 2).is_nonnegative is None
assert (u - x).is_nonnegative is None
assert (v + x + n).is_nonnegative is None
def test_Pow_is_integer():
x = Symbol('x')
k = Symbol('k', integer=True)
n = Symbol('n', integer=True, nonnegative=True)
m = Symbol('m', integer=True, positive=True)
assert (k**2).is_integer is True
assert (k**(-2)).is_integer is None
assert ((m + 1)**(-2)).is_integer is False
assert (m**(-1)).is_integer is None # issue 8580
assert (2**k).is_integer is None
assert (2**(-k)).is_integer is None
assert (2**n).is_integer is True
assert (2**(-n)).is_integer is None
assert (2**m).is_integer is True
assert (2**(-m)).is_integer is False
assert (x**2).is_integer is None
assert (2**x).is_integer is None
assert (k**n).is_integer is True
assert (k**(-n)).is_integer is None
assert (k**x).is_integer is None
assert (x**k).is_integer is None
assert (k**(n*m)).is_integer is True
assert (k**(-n*m)).is_integer is None
assert sqrt(3).is_integer is False
assert sqrt(.3).is_integer is False
assert Pow(3, 2, evaluate=False).is_integer is True
assert Pow(3, 0, evaluate=False).is_integer is True
assert Pow(3, -2, evaluate=False).is_integer is False
assert Pow(S.Half, 3, evaluate=False).is_integer is False
# decided by re-evaluating
assert Pow(3, S.Half, evaluate=False).is_integer is False
assert Pow(3, S.Half, evaluate=False).is_integer is False
assert Pow(4, S.Half, evaluate=False).is_integer is True
assert Pow(S.Half, -2, evaluate=False).is_integer is True
assert ((-1)**k).is_integer
x = Symbol('x', real=True, integer=False)
assert (x**2).is_integer is None # issue 8641
def test_Pow_is_real():
x = Symbol('x', real=True)
y = Symbol('y', real=True, positive=True)
assert (x**2).is_real is True
assert (x**3).is_real is True
assert (x**x).is_real is None
assert (y**x).is_real is True
assert (x**Rational(1, 3)).is_real is None
assert (y**Rational(1, 3)).is_real is True
assert sqrt(-1 - sqrt(2)).is_real is False
i = Symbol('i', imaginary=True)
assert (i**i).is_real is None
assert (I**i).is_real is True
assert ((-I)**i).is_real is True
assert (2**i).is_real is None # (2**(pi/log(2) * I)) is real, 2**I is not
assert (2**I).is_real is False
assert (2**-I).is_real is False
assert (i**2).is_real is True
assert (i**3).is_real is False
assert (i**x).is_real is None # could be (-I)**(2/3)
e = Symbol('e', even=True)
o = Symbol('o', odd=True)
k = Symbol('k', integer=True)
assert (i**e).is_real is True
assert (i**o).is_real is False
assert (i**k).is_real is None
assert (i**(4*k)).is_real is True
x = Symbol("x", nonnegative=True)
y = Symbol("y", nonnegative=True)
assert im(x**y).expand(complex=True) is S.Zero
assert (x**y).is_real is True
i = Symbol('i', imaginary=True)
assert (exp(i)**I).is_real is True
assert log(exp(i)).is_imaginary is None # i could be 2*pi*I
c = Symbol('c', complex=True)
assert log(c).is_real is None # c could be 0 or 2, too
assert log(exp(c)).is_real is None # log(0), log(E), ...
n = Symbol('n', negative=False)
assert log(n).is_real is None
n = Symbol('n', nonnegative=True)
assert log(n).is_real is None
assert sqrt(-I).is_real is False # issue 7843
def test_real_Pow():
k = Symbol('k', integer=True, nonzero=True)
assert (k**(I*pi/log(k))).is_real
def test_Pow_is_finite():
x = Symbol('x', real=True)
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
assert (x**2).is_finite is None # x could be oo
assert (x**x).is_finite is None # ditto
assert (p**x).is_finite is None # ditto
assert (n**x).is_finite is None # ditto
assert (1/S.Pi).is_finite
assert (sin(x)**2).is_finite is True
assert (sin(x)**x).is_finite is None
assert (sin(x)**exp(x)).is_finite is None
assert (1/sin(x)).is_finite is None # if zero, no, otherwise yes
assert (1/exp(x)).is_finite is None # x could be -oo
def test_Pow_is_even_odd():
x = Symbol('x')
k = Symbol('k', even=True)
n = Symbol('n', odd=True)
m = Symbol('m', integer=True, nonnegative=True)
p = Symbol('p', integer=True, positive=True)
assert ((-1)**n).is_odd
assert ((-1)**k).is_odd
assert ((-1)**(m - p)).is_odd
assert (k**2).is_even is True
assert (n**2).is_even is False
assert (2**k).is_even is None
assert (x**2).is_even is None
assert (k**m).is_even is None
assert (n**m).is_even is False
assert (k**p).is_even is True
assert (n**p).is_even is False
assert (m**k).is_even is None
assert (p**k).is_even is None
assert (m**n).is_even is None
assert (p**n).is_even is None
assert (k**x).is_even is None
assert (n**x).is_even is None
assert (k**2).is_odd is False
assert (n**2).is_odd is True
assert (3**k).is_odd is None
assert (k**m).is_odd is None
assert (n**m).is_odd is True
assert (k**p).is_odd is False
assert (n**p).is_odd is True
assert (m**k).is_odd is None
assert (p**k).is_odd is None
assert (m**n).is_odd is None
assert (p**n).is_odd is None
assert (k**x).is_odd is None
assert (n**x).is_odd is None
def test_Pow_is_negative_positive():
r = Symbol('r', real=True)
k = Symbol('k', integer=True, positive=True)
n = Symbol('n', even=True)
m = Symbol('m', odd=True)
x = Symbol('x')
assert (2**r).is_positive is True
assert ((-2)**r).is_positive is None
assert ((-2)**n).is_positive is True
assert ((-2)**m).is_positive is False
assert (k**2).is_positive is True
assert (k**(-2)).is_positive is True
assert (k**r).is_positive is True
assert ((-k)**r).is_positive is None
assert ((-k)**n).is_positive is True
assert ((-k)**m).is_positive is False
assert (2**r).is_negative is False
assert ((-2)**r).is_negative is None
assert ((-2)**n).is_negative is False
assert ((-2)**m).is_negative is True
assert (k**2).is_negative is False
assert (k**(-2)).is_negative is False
assert (k**r).is_negative is False
assert ((-k)**r).is_negative is None
assert ((-k)**n).is_negative is False
assert ((-k)**m).is_negative is True
assert (2**x).is_positive is None
assert (2**x).is_negative is None
def test_Pow_is_zero():
z = Symbol('z', zero=True)
e = z**2
assert e.is_zero
assert e.is_positive is False
assert e.is_negative is False
assert Pow(0, 0, evaluate=False).is_zero is False
assert Pow(0, 3, evaluate=False).is_zero
assert Pow(0, oo, evaluate=False).is_zero
assert Pow(0, -3, evaluate=False).is_zero is False
assert Pow(0, -oo, evaluate=False).is_zero is False
assert Pow(2, 2, evaluate=False).is_zero is False
a = Symbol('a', zero=False)
assert Pow(a, 3).is_zero is False # issue 7965
assert Pow(2, oo, evaluate=False).is_zero is False
assert Pow(2, -oo, evaluate=False).is_zero
assert Pow(S.Half, oo, evaluate=False).is_zero
assert Pow(S.Half, -oo, evaluate=False).is_zero is False
def test_Pow_is_nonpositive_nonnegative():
x = Symbol('x', real=True)
k = Symbol('k', integer=True, nonnegative=True)
l = Symbol('l', integer=True, positive=True)
n = Symbol('n', even=True)
m = Symbol('m', odd=True)
assert (x**(4*k)).is_nonnegative is True
assert (2**x).is_nonnegative is True
assert ((-2)**x).is_nonnegative is None
assert ((-2)**n).is_nonnegative is True
assert ((-2)**m).is_nonnegative is False
assert (k**2).is_nonnegative is True
assert (k**(-2)).is_nonnegative is None
assert (k**k).is_nonnegative is True
assert (k**x).is_nonnegative is None # NOTE (0**x).is_real = U
assert (l**x).is_nonnegative is True
assert (l**x).is_positive is True
assert ((-k)**x).is_nonnegative is None
assert ((-k)**m).is_nonnegative is None
assert (2**x).is_nonpositive is False
assert ((-2)**x).is_nonpositive is None
assert ((-2)**n).is_nonpositive is False
assert ((-2)**m).is_nonpositive is True
assert (k**2).is_nonpositive is None
assert (k**(-2)).is_nonpositive is None
assert (k**x).is_nonpositive is None
assert ((-k)**x).is_nonpositive is None
assert ((-k)**n).is_nonpositive is None
assert (x**2).is_nonnegative is True
i = symbols('i', imaginary=True)
assert (i**2).is_nonpositive is True
assert (i**4).is_nonpositive is False
assert (i**3).is_nonpositive is False
assert (I**i).is_nonnegative is True
assert (exp(I)**i).is_nonnegative is True
assert ((-k)**n).is_nonnegative is True
assert ((-k)**m).is_nonpositive is True
def test_Mul_is_imaginary_real():
r = Symbol('r', real=True)
p = Symbol('p', positive=True)
i = Symbol('i', imaginary=True)
ii = Symbol('ii', imaginary=True)
x = Symbol('x')
assert I.is_imaginary is True
assert I.is_real is False
assert (-I).is_imaginary is True
assert (-I).is_real is False
assert (3*I).is_imaginary is True
assert (3*I).is_real is False
assert (I*I).is_imaginary is False
assert (I*I).is_real is True
e = (p + p*I)
j = Symbol('j', integer=True, zero=False)
assert (e**j).is_real is None
assert (e**(2*j)).is_real is None
assert (e**j).is_imaginary is None
assert (e**(2*j)).is_imaginary is None
assert (e**-1).is_imaginary is False
assert (e**2).is_imaginary
assert (e**3).is_imaginary is False
assert (e**4).is_imaginary is False
assert (e**5).is_imaginary is False
assert (e**-1).is_real is False
assert (e**2).is_real is False
assert (e**3).is_real is False
assert (e**4).is_real
assert (e**5).is_real is False
assert (e**3).is_complex
assert (r*i).is_imaginary is None
assert (r*i).is_real is None
assert (x*i).is_imaginary is None
assert (x*i).is_real is None
assert (i*ii).is_imaginary is False
assert (i*ii).is_real is True
assert (r*i*ii).is_imaginary is False
assert (r*i*ii).is_real is True
# Github's issue 5874:
nr = Symbol('nr', real=False, complex=True)
a = Symbol('a', real=True, nonzero=True)
b = Symbol('b', real=True)
assert (i*nr).is_real is None
assert (a*nr).is_real is False
assert (b*nr).is_real is None
def test_Mul_hermitian_antihermitian():
a = Symbol('a', hermitian=True, zero=False)
b = Symbol('b', hermitian=True)
c = Symbol('c', hermitian=False)
d = Symbol('d', antihermitian=True)
e1 = Mul(a, b, c, evaluate=False)
e2 = Mul(b, a, c, evaluate=False)
e3 = Mul(a, b, c, d, evaluate=False)
e4 = Mul(b, a, c, d, evaluate=False)
e5 = Mul(a, c, evaluate=False)
e6 = Mul(a, c, d, evaluate=False)
assert e1.is_hermitian is None
assert e2.is_hermitian is None
assert e1.is_antihermitian is None
assert e2.is_antihermitian is None
assert e3.is_antihermitian is None
assert e4.is_antihermitian is None
assert e5.is_antihermitian is None
assert e6.is_antihermitian is None
def test_Add_is_comparable():
assert (x + y).is_comparable is False
assert (x + 1).is_comparable is False
assert (Rational(1, 3) - sqrt(8)).is_comparable is True
def test_Mul_is_comparable():
assert (x*y).is_comparable is False
assert (x*2).is_comparable is False
assert (sqrt(2)*Rational(1, 3)).is_comparable is True
def test_Pow_is_comparable():
assert (x**y).is_comparable is False
assert (x**2).is_comparable is False
assert (sqrt(Rational(1, 3))).is_comparable is True
def test_Add_is_positive_2():
e = Rational(1, 3) - sqrt(8)
assert e.is_positive is False
assert e.is_negative is True
e = pi - 1
assert e.is_positive is True
assert e.is_negative is False
def test_Add_is_irrational():
i = Symbol('i', irrational=True)
assert i.is_irrational is True
assert i.is_rational is False
assert (i + 1).is_irrational is True
assert (i + 1).is_rational is False
@XFAIL
def test_issue_3531():
class MightyNumeric(tuple):
def __rdiv__(self, other):
return "something"
def __rtruediv__(self, other):
return "something"
assert sympify(1)/MightyNumeric((1, 2)) == "something"
def test_issue_3531b():
class Foo:
def __init__(self):
self.field = 1.0
def __mul__(self, other):
self.field = self.field * other
def __rmul__(self, other):
self.field = other * self.field
f = Foo()
x = Symbol("x")
assert f*x == x*f
def test_bug3():
a = Symbol("a")
b = Symbol("b", positive=True)
e = 2*a + b
f = b + 2*a
assert e == f
def test_suppressed_evaluation():
a = Add(0, 3, 2, evaluate=False)
b = Mul(1, 3, 2, evaluate=False)
c = Pow(3, 2, evaluate=False)
assert a != 6
assert a.func is Add
assert a.args == (3, 2)
assert b != 6
assert b.func is Mul
assert b.args == (3, 2)
assert c != 9
assert c.func is Pow
assert c.args == (3, 2)
def test_Add_as_coeff_mul():
# issue 5524. These should all be (1, self)
assert (x + 1).as_coeff_mul() == (1, (x + 1,))
assert (x + 2).as_coeff_mul() == (1, (x + 2,))
assert (x + 3).as_coeff_mul() == (1, (x + 3,))
assert (x - 1).as_coeff_mul() == (1, (x - 1,))
assert (x - 2).as_coeff_mul() == (1, (x - 2,))
assert (x - 3).as_coeff_mul() == (1, (x - 3,))
n = Symbol('n', integer=True)
assert (n + 1).as_coeff_mul() == (1, (n + 1,))
assert (n + 2).as_coeff_mul() == (1, (n + 2,))
assert (n + 3).as_coeff_mul() == (1, (n + 3,))
assert (n - 1).as_coeff_mul() == (1, (n - 1,))
assert (n - 2).as_coeff_mul() == (1, (n - 2,))
assert (n - 3).as_coeff_mul() == (1, (n - 3,))
def test_Pow_as_coeff_mul_doesnt_expand():
assert exp(x + y).as_coeff_mul() == (1, (exp(x + y),))
assert exp(x + exp(x + y)) != exp(x + exp(x)*exp(y))
def test_issue_3514():
assert sqrt(S.Half) * sqrt(6) == 2 * sqrt(3)/2
assert S(1)/2*sqrt(6)*sqrt(2) == sqrt(3)
assert sqrt(6)/2*sqrt(2) == sqrt(3)
assert sqrt(6)*sqrt(2)/2 == sqrt(3)
def test_make_args():
assert Add.make_args(x) == (x,)
assert Mul.make_args(x) == (x,)
assert Add.make_args(x*y*z) == (x*y*z,)
assert Mul.make_args(x*y*z) == (x*y*z).args
assert Add.make_args(x + y + z) == (x + y + z).args
assert Mul.make_args(x + y + z) == (x + y + z,)
assert Add.make_args((x + y)**z) == ((x + y)**z,)
assert Mul.make_args((x + y)**z) == ((x + y)**z,)
def test_issue_5126():
assert (-2)**x*(-3)**x != 6**x
i = Symbol('i', integer=1)
assert (-2)**i*(-3)**i == 6**i
def test_Rational_as_content_primitive():
c, p = S(1), S(0)
assert (c*p).as_content_primitive() == (c, p)
c, p = S(1)/2, S(1)
assert (c*p).as_content_primitive() == (c, p)
def test_Add_as_content_primitive():
assert (x + 2).as_content_primitive() == (1, x + 2)
assert (3*x + 2).as_content_primitive() == (1, 3*x + 2)
assert (3*x + 3).as_content_primitive() == (3, x + 1)
assert (3*x + 6).as_content_primitive() == (3, x + 2)
assert (3*x + 2*y).as_content_primitive() == (1, 3*x + 2*y)
assert (3*x + 3*y).as_content_primitive() == (3, x + y)
assert (3*x + 6*y).as_content_primitive() == (3, x + 2*y)
assert (3/x + 2*x*y*z**2).as_content_primitive() == (1, 3/x + 2*x*y*z**2)
assert (3/x + 3*x*y*z**2).as_content_primitive() == (3, 1/x + x*y*z**2)
assert (3/x + 6*x*y*z**2).as_content_primitive() == (3, 1/x + 2*x*y*z**2)
assert (2*x/3 + 4*y/9).as_content_primitive() == \
(Rational(2, 9), 3*x + 2*y)
assert (2*x/3 + 2.5*y).as_content_primitive() == \
(Rational(1, 3), 2*x + 7.5*y)
# the coefficient may sort to a position other than 0
p = 3 + x + y
assert (2*p).expand().as_content_primitive() == (2, p)
assert (2.0*p).expand().as_content_primitive() == (1, 2.*p)
p *= -1
assert (2*p).expand().as_content_primitive() == (2, p)
def test_Mul_as_content_primitive():
assert (2*x).as_content_primitive() == (2, x)
assert (x*(2 + 2*x)).as_content_primitive() == (2, x*(1 + x))
assert (x*(2 + 2*y)*(3*x + 3)**2).as_content_primitive() == \
(18, x*(1 + y)*(x + 1)**2)
assert ((2 + 2*x)**2*(3 + 6*x) + S.Half).as_content_primitive() == \
(S.Half, 24*(x + 1)**2*(2*x + 1) + 1)
def test_Pow_as_content_primitive():
assert (x**y).as_content_primitive() == (1, x**y)
assert ((2*x + 2)**y).as_content_primitive() == \
(1, (Mul(2, (x + 1), evaluate=False))**y)
assert ((2*x + 2)**3).as_content_primitive() == (8, (x + 1)**3)
def test_issue_5460():
u = Mul(2, (1 + x), evaluate=False)
assert (2 + u).args == (2, u)
def test_product_irrational():
from sympy import I, pi
assert (I*pi).is_irrational is False
# The following used to be deduced from the above bug:
assert (I*pi).is_positive is False
def test_issue_5919():
assert (x/(y*(1 + y))).expand() == x/(y**2 + y)
def test_Mod():
assert Mod(x, 1).func is Mod
assert pi % pi == S.Zero
assert Mod(5, 3) == 2
assert Mod(-5, 3) == 1
assert Mod(5, -3) == -1
assert Mod(-5, -3) == -2
assert type(Mod(3.2, 2, evaluate=False)) == Mod
assert 5 % x == Mod(5, x)
assert x % 5 == Mod(x, 5)
assert x % y == Mod(x, y)
assert (x % y).subs({x: 5, y: 3}) == 2
# Float handling
point3 = Float(3.3) % 1
assert (x - 3.3) % 1 == Mod(1.*x + 1 - point3, 1)
assert Mod(-3.3, 1) == 1 - point3
assert Mod(0.7, 1) == Float(0.7)
e = Mod(1.3, 1)
assert comp(e, .3) and e.is_Float
e = Mod(1.3, .7)
assert comp(e, .6) and e.is_Float
e = Mod(1.3, Rational(7, 10))
assert comp(e, .6) and e.is_Float
e = Mod(Rational(13, 10), 0.7)
assert comp(e, .6) and e.is_Float
e = Mod(Rational(13, 10), Rational(7, 10))
assert comp(e, .6) and e.is_Rational
# check that sign is right
r2 = sqrt(2)
r3 = sqrt(3)
for i in [-r3, -r2, r2, r3]:
for j in [-r3, -r2, r2, r3]:
assert verify_numerically(i % j, i.n() % j.n())
for _x in range(4):
for _y in range(9):
reps = [(x, _x), (y, _y)]
assert Mod(3*x + y, 9).subs(reps) == (3*_x + _y) % 9
# denesting
# easy case
assert Mod(Mod(x, y), y) == Mod(x, y)
# in case someone attempts more denesting
for i in [-3, -2, 2, 3]:
for j in [-3, -2, 2, 3]:
for k in range(3):
assert Mod(Mod(k, i), j) == (k % i) % j
# known difference
assert Mod(5*sqrt(2), sqrt(5)) == 5*sqrt(2) - 3*sqrt(5)
p = symbols('p', positive=True)
assert Mod(p + 1, p + 3) == p + 1
n = symbols('n', negative=True)
assert Mod(n - 3, n - 1) == -2
assert Mod(n - 2*p, n - p) == -p
assert Mod(p - 2*n, p - n) == -n
# handling sums
assert (x + 3) % 1 == Mod(x, 1)
assert (x + 3.0) % 1 == Mod(1.*x, 1)
assert (x - S(33)/10) % 1 == Mod(x + S(7)/10, 1)
a = Mod(.6*x + y, .3*y)
b = Mod(0.1*y + 0.6*x, 0.3*y)
# Test that a, b are equal, with 1e-14 accuracy in coefficients
eps = 1e-14
assert abs((a.args[0] - b.args[0]).subs({x: 1, y: 1})) < eps
assert abs((a.args[1] - b.args[1]).subs({x: 1, y: 1})) < eps
assert (x + 1) % x == 1 % x
assert (x + y) % x == y % x
assert (x + y + 2) % x == (y + 2) % x
assert (a + 3*x + 1) % (2*x) == Mod(a + x + 1, 2*x)
assert (12*x + 18*y) % (3*x) == 3*Mod(6*y, x)
# gcd extraction
assert (-3*x) % (-2*y) == -Mod(3*x, 2*y)
assert (.6*pi) % (.3*x*pi) == 0.3*pi*Mod(2, x)
assert (.6*pi) % (.31*x*pi) == pi*Mod(0.6, 0.31*x)
assert (6*pi) % (.3*x*pi) == 0.3*pi*Mod(20, x)
assert (6*pi) % (.31*x*pi) == pi*Mod(6, 0.31*x)
assert (6*pi) % (.42*x*pi) == pi*Mod(6, 0.42*x)
assert (12*x) % (2*y) == 2*Mod(6*x, y)
assert (12*x) % (3*5*y) == 3*Mod(4*x, 5*y)
assert (12*x) % (15*x*y) == 3*x*Mod(4, 5*y)
assert (-2*pi) % (3*pi) == pi
assert (2*x + 2) % (x + 1) == 0
assert (x*(x + 1)) % (x + 1) == (x + 1)*Mod(x, 1)
assert Mod(5.0*x, 0.1*y) == 0.1*Mod(50*x, y)
i = Symbol('i', integer=True)
assert (3*i*x) % (2*i*y) == i*Mod(3*x, 2*y)
assert Mod(4*i, 4) == 0
# issue 8677
n = Symbol('n', integer=True, positive=True)
assert (factorial(n) % n).equals(0) is not False
# symbolic with known parity
n = Symbol('n', even=True)
assert Mod(n, 2) == 0
n = Symbol('n', odd=True)
assert Mod(n, 2) == 1
def test_Mod_is_integer():
p = Symbol('p', integer=True)
q1 = Symbol('q1', integer=True)
q2 = Symbol('q2', integer=True, nonzero=True)
assert Mod(x, y).is_integer is None
assert Mod(p, q1).is_integer is None
assert Mod(x, q2).is_integer is None
assert Mod(p, q2).is_integer
def test_Mod_is_nonposneg():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True, positive=True)
assert (n%3).is_nonnegative
assert Mod(n, -3).is_nonpositive
assert Mod(n, k).is_nonnegative
assert Mod(n, -k).is_nonpositive
assert Mod(k, n).is_nonnegative is None
def test_issue_6001():
A = Symbol("A", commutative=False)
eq = A + A**2
# it doesn't matter whether it's True or False; they should
# just all be the same
assert (
eq.is_commutative ==
(eq + 1).is_commutative ==
(A + 1).is_commutative)
B = Symbol("B", commutative=False)
# Although commutative terms could cancel we return True
# meaning "there are non-commutative symbols; aftersubstitution
# that definition can change, e.g. (A*B).subs(B,A**-1) -> 1
assert (sqrt(2)*A).is_commutative is False
assert (sqrt(2)*A*B).is_commutative is False
def test_polar():
from sympy import polar_lift
p = Symbol('p', polar=True)
x = Symbol('x')
assert p.is_polar
assert x.is_polar is None
assert S(1).is_polar is None
assert (p**x).is_polar is True
assert (x**p).is_polar is None
assert ((2*p)**x).is_polar is True
assert (2*p).is_polar is True
assert (-2*p).is_polar is not True
assert (polar_lift(-2)*p).is_polar is True
q = Symbol('q', polar=True)
assert (p*q)**2 == p**2 * q**2
assert (2*q)**2 == 4 * q**2
assert ((p*q)**x).expand() == p**x * q**x
def test_issue_6040():
a, b = Pow(1, 2, evaluate=False), S.One
assert a != b
assert b != a
assert not (a == b)
assert not (b == a)
def test_issue_6082():
# Comparison is symmetric
assert Basic.compare(Max(x, 1), Max(x, 2)) == \
- Basic.compare(Max(x, 2), Max(x, 1))
# Equal expressions compare equal
assert Basic.compare(Max(x, 1), Max(x, 1)) == 0
# Basic subtypes (such as Max) compare different than standard types
assert Basic.compare(Max(1, x), frozenset((1, x))) != 0
def test_issue_6077():
assert x**2.0/x == x**1.0
assert x/x**2.0 == x**-1.0
assert x*x**2.0 == x**3.0
assert x**1.5*x**2.5 == x**4.0
assert 2**(2.0*x)/2**x == 2**(1.0*x)
assert 2**x/2**(2.0*x) == 2**(-1.0*x)
assert 2**x*2**(2.0*x) == 2**(3.0*x)
assert 2**(1.5*x)*2**(2.5*x) == 2**(4.0*x)
def test_mul_flatten_oo():
p = symbols('p', positive=True)
n, m = symbols('n,m', negative=True)
x_im = symbols('x_im', imaginary=True)
assert n*oo == -oo
assert n*m*oo == oo
assert p*oo == oo
assert x_im*oo != I*oo # i could be +/- 3*I -> +/-oo
def test_add_flatten():
# see https://github.com/sympy/sympy/issues/2633#issuecomment-29545524
a = oo + I*oo
b = oo - I*oo
assert a + b == nan
assert a - b == nan
assert (1/a).simplify() == (1/b).simplify() == 0
def test_issue_5160_6087_6089_6090():
# issue 6087
assert ((-2*x*y**y)**3.2).n(2) == (2**3.2*(-x*y**y)**3.2).n(2)
# issue 6089
A, B, C = symbols('A,B,C', commutative=False)
assert (2.*B*C)**3 == 8.0*(B*C)**3
assert (-2.*B*C)**3 == -8.0*(B*C)**3
assert (-2*B*C)**2 == 4*(B*C)**2
# issue 5160
assert sqrt(-1.0*x) == 1.0*sqrt(-x)
assert sqrt(1.0*x) == 1.0*sqrt(x)
# issue 6090
assert (-2*x*y*A*B)**2 == 4*x**2*y**2*(A*B)**2
def test_float_int():
assert int(float(sqrt(10))) == int(sqrt(10))
assert int(pi**1000) % 10 == 2
assert int(Float('1.123456789012345678901234567890e20', '')) == \
long(112345678901234567890)
assert int(Float('1.123456789012345678901234567890e25', '')) == \
long(11234567890123456789012345)
# decimal forces float so it's not an exact integer ending in 000000
assert int(Float('1.123456789012345678901234567890e35', '')) == \
112345678901234567890123456789000192
assert int(Float('123456789012345678901234567890e5', '')) == \
12345678901234567890123456789000000
assert Integer(Float('1.123456789012345678901234567890e20', '')) == \
112345678901234567890
assert Integer(Float('1.123456789012345678901234567890e25', '')) == \
11234567890123456789012345
# decimal forces float so it's not an exact integer ending in 000000
assert Integer(Float('1.123456789012345678901234567890e35', '')) == \
112345678901234567890123456789000192
assert Integer(Float('123456789012345678901234567890e5', '')) == \
12345678901234567890123456789000000
assert same_and_same_prec(Float('123000e-2',''), Float('1230.00', ''))
assert same_and_same_prec(Float('123000e2',''), Float('12300000', ''))
assert int(1 + Rational('.9999999999999999999999999')) == 1
assert int(pi/1e20) == 0
assert int(1 + pi/1e20) == 1
assert int(Add(1.2, -2, evaluate=False)) == int(1.2 - 2)
assert int(Add(1.2, +2, evaluate=False)) == int(1.2 + 2)
assert int(Add(1 + Float('.99999999999999999', ''), evaluate=False)) == 1
raises(TypeError, lambda: float(x))
raises(TypeError, lambda: float(sqrt(-1)))
assert int(12345678901234567890 + cos(1)**2 + sin(1)**2) == \
12345678901234567891
def test_issue_6611a():
assert Mul.flatten([3**Rational(1, 3),
Pow(-Rational(1, 9), Rational(2, 3), evaluate=False)]) == \
([Rational(1, 3), (-1)**Rational(2, 3)], [], None)
def test_denest_add_mul():
# when working with evaluated expressions make sure they denest
eq = x + 1
eq = Add(eq, 2, evaluate=False)
eq = Add(eq, 2, evaluate=False)
assert Add(*eq.args) == x + 5
eq = x*2
eq = Mul(eq, 2, evaluate=False)
eq = Mul(eq, 2, evaluate=False)
assert Mul(*eq.args) == 8*x
# but don't let them denest unecessarily
eq = Mul(-2, x - 2, evaluate=False)
assert 2*eq == Mul(-4, x - 2, evaluate=False)
assert -eq == Mul(2, x - 2, evaluate=False)
def test_mul_coeff():
# It is important that all Numbers be removed from the seq;
# This can be tricky when powers combine to produce those numbers
p = exp(I*pi/3)
assert p**2*x*p*y*p*x*p**2 == x**2*y
def test_mul_zero_detection():
nz = Dummy(real=True, zero=False, finite=True)
r = Dummy(real=True)
c = Dummy(real=False, complex=True, finite=True)
c2 = Dummy(real=False, complex=True, finite=True)
i = Dummy(imaginary=True, finite=True)
e = nz*r*c
assert e.is_imaginary is None
assert e.is_real is None
e = nz*c
assert e.is_imaginary is None
assert e.is_real is False
e = nz*i*c
assert e.is_imaginary is False
assert e.is_real is None
# check for more than one complex; it is important to use
# uniquely named Symbols to ensure that two factors appear
# e.g. if the symbols have the same name they just become
# a single factor, a power.
e = nz*i*c*c2
assert e.is_imaginary is None
assert e.is_real is None
# _eval_is_real and _eval_is_zero both employ trapping of the
# zero value so args should be tested in both directions and
# TO AVOID GETTING THE CACHED RESULT, Dummy MUST BE USED
# real is unknonwn
def test(z, b, e):
if z.is_zero and b.is_finite:
assert e.is_real and e.is_zero
else:
assert e.is_real is None
if b.is_finite:
if z.is_zero:
assert e.is_zero
else:
assert e.is_zero is None
elif b.is_finite is False:
if z.is_zero is None:
assert e.is_zero is None
else:
assert e.is_zero is False
for iz, ib in cartes(*[[True, False, None]]*2):
z = Dummy('z', nonzero=iz)
b = Dummy('f', finite=ib)
e = Mul(z, b, evaluate=False)
test(z, b, e)
z = Dummy('nz', nonzero=iz)
b = Dummy('f', finite=ib)
e = Mul(b, z, evaluate=False)
test(z, b, e)
# real is True
def test(z, b, e):
if z.is_zero and not b.is_finite:
assert e.is_real is None
else:
assert e.is_real
for iz, ib in cartes(*[[True, False, None]]*2):
z = Dummy('z', nonzero=iz, real=True)
b = Dummy('b', finite=ib, real=True)
e = Mul(z, b, evaluate=False)
test(z, b, e)
z = Dummy('z', nonzero=iz, real=True)
b = Dummy('b', finite=ib, real=True)
e = Mul(b, z, evaluate=False)
test(z, b, e)
def test_Mul_with_zero_infinite():
zer = Dummy(zero=True)
inf = Dummy(finite=False)
e = Mul(zer, inf, evaluate=False)
assert e.is_positive is None
assert e.is_hermitian is None
e = Mul(inf, zer, evaluate=False)
assert e.is_positive is None
assert e.is_hermitian is None
def test_issue_8247_8354():
from sympy import tan
z = sqrt(1 + sqrt(3)) + sqrt(3 + 3*sqrt(3)) - sqrt(10 + 6*sqrt(3))
assert z.is_positive is False # it's 0
z = S('''-2**(1/3)*(3*sqrt(93) + 29)**2 - 4*(3*sqrt(93) + 29)**(4/3) +
12*sqrt(93)*(3*sqrt(93) + 29)**(1/3) + 116*(3*sqrt(93) + 29)**(1/3) +
174*2**(1/3)*sqrt(93) + 1678*2**(1/3)''')
assert z.is_positive is False # it's 0
z = 2*(-3*tan(19*pi/90) + sqrt(3))*cos(11*pi/90)*cos(19*pi/90) - \
sqrt(3)*(-3 + 4*cos(19*pi/90)**2)
assert z.is_positive is not True # it's zero and it shouldn't hang
z = S('''9*(3*sqrt(93) + 29)**(2/3)*((3*sqrt(93) +
29)**(1/3)*(-2**(2/3)*(3*sqrt(93) + 29)**(1/3) - 2) - 2*2**(1/3))**3 +
72*(3*sqrt(93) + 29)**(2/3)*(81*sqrt(93) + 783) + (162*sqrt(93) +
1566)*((3*sqrt(93) + 29)**(1/3)*(-2**(2/3)*(3*sqrt(93) + 29)**(1/3) -
2) - 2*2**(1/3))**2''')
assert z.is_positive is False # it's 0 (and a single _mexpand isn't enough)
| maniteja123/sympy | sympy/core/tests/test_arit.py | Python | bsd-3-clause | 57,282 |
from django.template.defaultfilters import urlize
from django.test import SimpleTestCase
from django.utils.functional import lazy
from django.utils.safestring import mark_safe
from ..utils import setup
class UrlizeTests(SimpleTestCase):
@setup({'urlize01': '{% autoescape off %}{{ a|urlize }} {{ b|urlize }}{% endautoescape %}'})
def test_urlize01(self):
output = self.engine.render_to_string(
'urlize01',
{'a': 'http://example.com/?x=&y=', 'b': mark_safe('http://example.com?x=&y=<2>')},
)
self.assertEqual(
output,
'<a href="http://example.com/?x=&y=" rel="nofollow">http://example.com/?x=&y=</a> '
'<a href="http://example.com?x=&y=%3C2%3E" rel="nofollow">http://example.com?x=&y=<2></a>'
)
@setup({'urlize02': '{{ a|urlize }} {{ b|urlize }}'})
def test_urlize02(self):
output = self.engine.render_to_string(
'urlize02',
{'a': "http://example.com/?x=&y=", 'b': mark_safe("http://example.com?x=&y=")},
)
self.assertEqual(
output,
'<a href="http://example.com/?x=&y=" rel="nofollow">http://example.com/?x=&y=</a> '
'<a href="http://example.com?x=&y=" rel="nofollow">http://example.com?x=&y=</a>'
)
@setup({'urlize03': '{% autoescape off %}{{ a|urlize }}{% endautoescape %}'})
def test_urlize03(self):
output = self.engine.render_to_string('urlize03', {'a': mark_safe("a & b")})
self.assertEqual(output, 'a & b')
@setup({'urlize04': '{{ a|urlize }}'})
def test_urlize04(self):
output = self.engine.render_to_string('urlize04', {'a': mark_safe("a & b")})
self.assertEqual(output, 'a & b')
# This will lead to a nonsense result, but at least it won't be
# exploitable for XSS purposes when auto-escaping is on.
@setup({'urlize05': '{% autoescape off %}{{ a|urlize }}{% endautoescape %}'})
def test_urlize05(self):
output = self.engine.render_to_string('urlize05', {'a': "<script>alert('foo')</script>"})
self.assertEqual(output, "<script>alert('foo')</script>")
@setup({'urlize06': '{{ a|urlize }}'})
def test_urlize06(self):
output = self.engine.render_to_string('urlize06', {'a': "<script>alert('foo')</script>"})
self.assertEqual(output, '<script>alert('foo')</script>')
# mailto: testing for urlize
@setup({'urlize07': '{{ a|urlize }}'})
def test_urlize07(self):
output = self.engine.render_to_string('urlize07', {'a': "Email me at [email protected]"})
self.assertEqual(
output,
'Email me at <a href="mailto:[email protected]">[email protected]</a>',
)
@setup({'urlize08': '{{ a|urlize }}'})
def test_urlize08(self):
output = self.engine.render_to_string('urlize08', {'a': "Email me at <[email protected]>"})
self.assertEqual(
output,
'Email me at <<a href="mailto:[email protected]">[email protected]</a>>',
)
@setup({'urlize09': '{% autoescape off %}{{ a|urlize }}{% endautoescape %}'})
def test_urlize09(self):
output = self.engine.render_to_string('urlize09', {'a': "http://example.com/?x=&y=<2>"})
self.assertEqual(
output,
'<a href="http://example.com/?x=&y=%3C2%3E" rel="nofollow">http://example.com/?x=&y=<2></a>',
)
class FunctionTests(SimpleTestCase):
def test_urls(self):
self.assertEqual(
urlize('http://google.com'),
'<a href="http://google.com" rel="nofollow">http://google.com</a>',
)
self.assertEqual(
urlize('http://google.com/'),
'<a href="http://google.com/" rel="nofollow">http://google.com/</a>',
)
self.assertEqual(
urlize('www.google.com'),
'<a href="http://www.google.com" rel="nofollow">www.google.com</a>',
)
self.assertEqual(
urlize('djangoproject.org'),
'<a href="http://djangoproject.org" rel="nofollow">djangoproject.org</a>',
)
self.assertEqual(
urlize('djangoproject.org/'),
'<a href="http://djangoproject.org/" rel="nofollow">djangoproject.org/</a>',
)
def test_url_split_chars(self):
# Quotes (single and double) and angle brackets shouldn't be considered
# part of URLs.
self.assertEqual(
urlize('www.server.com"abc'),
'<a href="http://www.server.com" rel="nofollow">www.server.com</a>"abc',
)
self.assertEqual(
urlize('www.server.com\'abc'),
'<a href="http://www.server.com" rel="nofollow">www.server.com</a>'abc',
)
self.assertEqual(
urlize('www.server.com<abc'),
'<a href="http://www.server.com" rel="nofollow">www.server.com</a><abc',
)
self.assertEqual(
urlize('www.server.com>abc'),
'<a href="http://www.server.com" rel="nofollow">www.server.com</a>>abc',
)
def test_email(self):
self.assertEqual(
urlize('[email protected]'),
'<a href="mailto:[email protected]">[email protected]</a>',
)
def test_word_with_dot(self):
self.assertEqual(urlize('some.organization'), 'some.organization'),
def test_https(self):
self.assertEqual(
urlize('https://google.com'),
'<a href="https://google.com" rel="nofollow">https://google.com</a>',
)
def test_quoting(self):
"""
#9655 - Check urlize doesn't overquote already quoted urls. The
teststring is the urlquoted version of 'http://hi.baidu.com/重新开始'
"""
self.assertEqual(
urlize('http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B'),
'<a href="http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B" rel="nofollow">'
'http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B</a>',
)
def test_urlencoded(self):
self.assertEqual(
urlize('www.mystore.com/30%OffCoupons!'),
'<a href="http://www.mystore.com/30%25OffCoupons" rel="nofollow">'
'www.mystore.com/30%OffCoupons</a>!',
)
self.assertEqual(
urlize('https://en.wikipedia.org/wiki/Caf%C3%A9'),
'<a href="https://en.wikipedia.org/wiki/Caf%C3%A9" rel="nofollow">'
'https://en.wikipedia.org/wiki/Caf%C3%A9</a>',
)
def test_unicode(self):
self.assertEqual(
urlize('https://en.wikipedia.org/wiki/Café'),
'<a href="https://en.wikipedia.org/wiki/Caf%C3%A9" rel="nofollow">'
'https://en.wikipedia.org/wiki/Café</a>',
)
def test_parenthesis(self):
"""
#11911 - Check urlize keeps balanced parentheses
"""
self.assertEqual(
urlize('https://en.wikipedia.org/wiki/Django_(web_framework)'),
'<a href="https://en.wikipedia.org/wiki/Django_(web_framework)" rel="nofollow">'
'https://en.wikipedia.org/wiki/Django_(web_framework)</a>',
)
self.assertEqual(
urlize('(see https://en.wikipedia.org/wiki/Django_(web_framework))'),
'(see <a href="https://en.wikipedia.org/wiki/Django_(web_framework)" rel="nofollow">'
'https://en.wikipedia.org/wiki/Django_(web_framework)</a>)',
)
def test_nofollow(self):
"""
#12183 - Check urlize adds nofollow properly - see #12183
"""
self.assertEqual(
urlize('[email protected] or www.bar.com'),
'<a href="mailto:[email protected]">[email protected]</a> or '
'<a href="http://www.bar.com" rel="nofollow">www.bar.com</a>',
)
def test_idn(self):
"""
#13704 - Check urlize handles IDN correctly
"""
self.assertEqual(urlize('http://c✶.ws'), '<a href="http://xn--c-lgq.ws" rel="nofollow">http://c✶.ws</a>')
self.assertEqual(urlize('www.c✶.ws'), '<a href="http://www.xn--c-lgq.ws" rel="nofollow">www.c✶.ws</a>')
self.assertEqual(urlize('c✶.org'), '<a href="http://xn--c-lgq.org" rel="nofollow">c✶.org</a>')
self.assertEqual(urlize('info@c✶.org'), '<a href="mailto:[email protected]">info@c✶.org</a>')
def test_malformed(self):
"""
#16395 - Check urlize doesn't highlight malformed URIs
"""
self.assertEqual(urlize('http:///www.google.com'), 'http:///www.google.com')
self.assertEqual(urlize('http://.google.com'), 'http://.google.com')
self.assertEqual(urlize('http://@foo.com'), 'http://@foo.com')
def test_tlds(self):
"""
#16656 - Check urlize accepts more TLDs
"""
self.assertEqual(urlize('usa.gov'), '<a href="http://usa.gov" rel="nofollow">usa.gov</a>')
def test_invalid_email(self):
"""
#17592 - Check urlize don't crash on invalid email with dot-starting
domain
"""
self.assertEqual(urlize('[email protected]'), '[email protected]')
def test_uppercase(self):
"""
#18071 - Check urlize accepts uppercased URL schemes
"""
self.assertEqual(
urlize('HTTPS://github.com/'),
'<a href="https://github.com/" rel="nofollow">HTTPS://github.com/</a>',
)
def test_trailing_period(self):
"""
#18644 - Check urlize trims trailing period when followed by parenthesis
"""
self.assertEqual(
urlize('(Go to http://www.example.com/foo.)'),
'(Go to <a href="http://www.example.com/foo" rel="nofollow">http://www.example.com/foo</a>.)',
)
def test_trailing_multiple_punctuation(self):
self.assertEqual(
urlize('A test http://testing.com/example..'),
'A test <a href="http://testing.com/example" rel="nofollow">http://testing.com/example</a>..'
)
self.assertEqual(
urlize('A test http://testing.com/example!!'),
'A test <a href="http://testing.com/example" rel="nofollow">http://testing.com/example</a>!!'
)
self.assertEqual(
urlize('A test http://testing.com/example!!!'),
'A test <a href="http://testing.com/example" rel="nofollow">http://testing.com/example</a>!!!'
)
self.assertEqual(
urlize('A test http://testing.com/example.,:;)"!'),
'A test <a href="http://testing.com/example" rel="nofollow">http://testing.com/example</a>.,:;)"!'
)
def test_brackets(self):
"""
#19070 - Check urlize handles brackets properly
"""
self.assertEqual(
urlize('[see www.example.com]'),
'[see <a href="http://www.example.com" rel="nofollow">www.example.com</a>]',
)
self.assertEqual(
urlize('see test[at[example.com'),
'see <a href="http://test[at[example.com" rel="nofollow">test[at[example.com</a>',
)
self.assertEqual(
urlize('[http://168.192.0.1](http://168.192.0.1)'),
'[<a href="http://168.192.0.1](http://168.192.0.1)" rel="nofollow">'
'http://168.192.0.1](http://168.192.0.1)</a>',
)
def test_ipv4(self):
self.assertEqual(
urlize('http://192.168.0.15/api/9'),
'<a href="http://192.168.0.15/api/9" rel="nofollow">http://192.168.0.15/api/9</a>',
)
def test_ipv6(self):
self.assertEqual(
urlize('http://[2001:db8:cafe::2]/api/9'),
'<a href="http://[2001:db8:cafe::2]/api/9" rel="nofollow">http://[2001:db8:cafe::2]/api/9</a>',
)
def test_quotation_marks(self):
"""
#20364 - Check urlize correctly include quotation marks in links
"""
self.assertEqual(
urlize('before "[email protected]" afterwards', autoescape=False),
'before "<a href="mailto:[email protected]">[email protected]</a>" afterwards',
)
self.assertEqual(
urlize('before [email protected]" afterwards', autoescape=False),
'before <a href="mailto:[email protected]">[email protected]</a>" afterwards',
)
self.assertEqual(
urlize('before "[email protected] afterwards', autoescape=False),
'before "<a href="mailto:[email protected]">[email protected]</a> afterwards',
)
self.assertEqual(
urlize('before \'[email protected]\' afterwards', autoescape=False),
'before \'<a href="mailto:[email protected]">[email protected]</a>\' afterwards',
)
self.assertEqual(
urlize('before [email protected]\' afterwards', autoescape=False),
'before <a href="mailto:[email protected]">[email protected]</a>\' afterwards',
)
self.assertEqual(
urlize('before \'[email protected] afterwards', autoescape=False),
'before \'<a href="mailto:[email protected]">[email protected]</a> afterwards',
)
def test_quote_commas(self):
"""
#20364 - Check urlize copes with commas following URLs in quotes
"""
self.assertEqual(
urlize('Email us at "[email protected]", or phone us at +xx.yy', autoescape=False),
'Email us at "<a href="mailto:[email protected]">[email protected]</a>", or phone us at +xx.yy',
)
def test_exclamation_marks(self):
"""
#23715 - Check urlize correctly handles exclamation marks after TLDs
or query string
"""
self.assertEqual(
urlize('Go to djangoproject.com! and enjoy.'),
'Go to <a href="http://djangoproject.com" rel="nofollow">djangoproject.com</a>! and enjoy.',
)
self.assertEqual(
urlize('Search for google.com/?q=! and see.'),
'Search for <a href="http://google.com/?q=" rel="nofollow">google.com/?q=</a>! and see.',
)
self.assertEqual(
urlize('Search for google.com/?q=dj!`? and see.'),
'Search for <a href="http://google.com/?q=dj%21%60%3F" rel="nofollow">google.com/?q=dj!`?</a> and see.',
)
self.assertEqual(
urlize('Search for google.com/?q=dj!`?! and see.'),
'Search for <a href="http://google.com/?q=dj%21%60%3F" rel="nofollow">google.com/?q=dj!`?</a>! and see.',
)
def test_non_string_input(self):
self.assertEqual(urlize(123), '123')
def test_autoescape(self):
self.assertEqual(
urlize('foo<a href=" google.com ">bar</a>buz'),
'foo<a href=" <a href="http://google.com" rel="nofollow">google.com</a> ">bar</a>buz'
)
def test_autoescape_off(self):
self.assertEqual(
urlize('foo<a href=" google.com ">bar</a>buz', autoescape=False),
'foo<a href=" <a href="http://google.com" rel="nofollow">google.com</a> ">bar</a>buz',
)
def test_lazystring(self):
prepend_www = lazy(lambda url: 'www.' + url, str)
self.assertEqual(
urlize(prepend_www('google.com')),
'<a href="http://www.google.com" rel="nofollow">www.google.com</a>',
)
| camilonova/django | tests/template_tests/filter_tests/test_urlize.py | Python | bsd-3-clause | 15,405 |
"""print git versioning info from working directory
written by Travis O'Brien, modified by Timothy W. Hilton
"""
import git
def print_cwd_git_version():
"""print git branch and revision hash from current working directory
"""
try:
_repo = git.Repo(search_parent_directories=True)
_git_sha = _repo.head.object.hexsha
_git_short_sha = _repo.git.rev_parse(_git_sha, short=7)
try:
_git_branch = _repo.active_branch
except TypeError as e:
if 'detached' in str(e):
branch_str = "detached head;"
else:
raise
else:
branch_str = "On branch {}".format(_git_branch)
print("{} at rev {}".format(branch_str, _git_short_sha))
except git.InvalidGitRepositoryError:
print("No git repository detected.")
| Timothy-W-Hilton/TimPyUtils | timutils/git_tools.py | Python | mit | 850 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gitmark.config import GitmarkSettings
import qiniu
QINIU_ACCESS_KEY = GitmarkSettings['qiniu']['access_key']
QINIU_SECRET_KEY = GitmarkSettings['qiniu']['secret_key']
QINIU_BUCKET_NAME = GitmarkSettings['qiniu']['bucket_name']
QINIU_URL = GitmarkSettings['qiniu']['base_url']
def qiniu_fetch_img(img_url, img_name):
q = qiniu.Auth(QINIU_ACCESS_KEY, QINIU_SECRET_KEY)
token = q.upload_token(QINIU_BUCKET_NAME)
bucket = qiniu.BucketManager(q)
ret, info = bucket.fetch(img_url, QINIU_BUCKET_NAME, img_name)
if not ret:
return None
key = ret.get('key')
return QINIU_URL + key
# print dir(qiniu)
# return 'aa'
| GitMarkTeam/gitmark | app/utils/wrap_qiniu.py | Python | gpl-2.0 | 710 |
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.model.ocil_2_0.ChoiceQuestionResultType import ChoiceQuestionResultType
import logging
logger = logging.getLogger(__name__)
class ChoiceQuestionResultElement(ChoiceQuestionResultType):
MODEL_MAP = {
'tag_name': 'choice_question_result',
}
| cjaymes/pyscap | src/scap/model/ocil_2_0/ChoiceQuestionResultElement.py | Python | gpl-3.0 | 947 |
import sys
orig = ('>','<','+','-','.',',','[',']') # Origital set of instructions
new = ('→','←','↑','↓','↘','↙','↗','↖') # New set of instructions
### Verify input
if len(sys.argv) != 3:
print('''Usage:
python3 bf.py <input.bf> <output.bf>''')
sys.exit(0)
inFile = sys.argv[1]
outFile = sys.argv[2]
with open(inFile, 'r') as codeFile:
codeSource=list(codeFile.read().replace('\n', '')) # Read input file
source = []
for i in codeSource:
if i in orig:
source.append(i) # Read input code
dest = [] # Init output code
for i in source:
dest.append(new[orig.index(i)])
with open(outFile, 'w') as codeFile:
try:
codeFile.write(''.join(dest))
except Exception as e:
raise
| abcsds/Brainfuck | translator.py | Python | gpl-3.0 | 820 |
from .workflow_factory import workflow_factory
from noodles import (
schedule, has_scheduled_methods)
@has_scheduled_methods
class A(object):
def __init__(self, x):
super().__init__()
self.x = x
@schedule
def __call__(self, y):
return self.x * y
def __serialize__(self, pack):
return pack(self.x)
@classmethod
def __construct__(cls, data):
return cls(data)
@workflow_factory(result=42)
def test_class_methods_00():
a = A(7)
return a(6)
| NLeSC/noodles | test/workflows/class_methods.py | Python | apache-2.0 | 519 |
default_app_config = "Timeline.util" | Lapeth/timeline | Timeline/util/__init__.py | Python | apache-2.0 | 36 |
# -*- coding: utf-8 -*-
# Copyright: Damien Elmes <[email protected]>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import sre_constants
import cgi
import time
import re
from operator import itemgetter
import anki.hooks
import aqt
import aqt.modelchooser
from anki.lang import ngettext
from anki.notes import Note
from aqt.qt import *
import anki
import aqt.forms
from anki.utils import fmtTimeSpan, ids2str, stripHTMLMedia, isWin, intTime, isMac
from aqt.utils import saveGeom, restoreGeom, saveSplitter, restoreSplitter, \
saveHeader, restoreHeader, saveState, restoreState, applyStyles, getTag, \
showInfo, askUser, tooltip, openHelp, showWarning, shortcut, mungeQA
from anki.hooks import runHook, addHook, remHook
from aqt.webview import AnkiWebView
from aqt.toolbar import Toolbar
from anki.consts import *
from anki.sound import playFromText, clearAudioQueue
COLOUR_SUSPENDED = "#FFFFB2"
COLOUR_MARKED = "#D9B2E9"
# fixme: need to refresh after undo
# Data model
##########################################################################
class DataModel(QAbstractTableModel):
def __init__(self, browser):
QAbstractTableModel.__init__(self)
self.browser = browser
self.col = browser.col
self.sortKey = None
self.activeCols = self.col.conf.get(
"activeCols", ["noteFld", "template", "cardDue", "deck"])
self.cards = []
self.cardObjs = {}
def getCard(self, index):
id = self.cards[index.row()]
if not id in self.cardObjs:
self.cardObjs[id] = self.col.getCard(id)
return self.cardObjs[id]
def refreshNote(self, note):
refresh = False
for c in note.cards():
if c.id in self.cardObjs:
del self.cardObjs[c.id]
refresh = True
if refresh:
self.layoutChanged.emit()
# Model interface
######################################################################
def rowCount(self, index):
return len(self.cards)
def columnCount(self, index):
return len(self.activeCols)
def data(self, index, role):
if not index.isValid():
return
if role == Qt.FontRole:
if self.activeCols[index.column()] not in (
"question", "answer", "noteFld"):
return
f = QFont()
row = index.row()
c = self.getCard(index)
t = c.template()
f.setFamily(t.get("bfont", "arial"))
f.setPixelSize(t.get("bsize", 12))
return f
elif role == Qt.TextAlignmentRole:
align = Qt.AlignVCenter
if self.activeCols[index.column()] not in ("question", "answer",
"template", "deck", "noteFld", "note"):
align |= Qt.AlignHCenter
return align
elif role == Qt.DisplayRole or role == Qt.EditRole:
return self.columnData(index)
else:
return
def headerData(self, section, orientation, role):
if orientation == Qt.Vertical:
return
elif role == Qt.DisplayRole and section < len(self.activeCols):
type = self.columnType(section)
txt = None
for stype, name in self.browser.columns:
if type == stype:
txt = name
break
# handle case where extension has set an invalid column type
if not txt:
txt = self.browser.columns[0][1]
return txt
else:
return
def flags(self, index):
return Qt.ItemFlag(Qt.ItemIsEnabled |
Qt.ItemIsSelectable)
# Filtering
######################################################################
def search(self, txt):
self.beginReset()
t = time.time()
# the db progress handler may cause a refresh, so we need to zero out
# old data first
self.cards = []
self.cards = self.col.findCards(txt, order=True)
#self.browser.mw.pm.profile['fullSearch'])
#print "fetch cards in %dms" % ((time.time() - t)*1000)
self.endReset()
def reset(self):
self.beginReset()
self.endReset()
# caller must have called editor.saveNow() before calling this or .reset()
def beginReset(self):
self.browser.editor.setNote(None, hide=False)
self.browser.mw.progress.start()
self.saveSelection()
self.beginResetModel()
self.cardObjs = {}
def endReset(self):
t = time.time()
self.endResetModel()
self.restoreSelection()
self.browser.mw.progress.finish()
def reverse(self):
self.browser.editor.saveNow(self._reverse)
def _reverse(self):
self.beginReset()
self.cards.reverse()
self.endReset()
def saveSelection(self):
cards = self.browser.selectedCards()
self.selectedCards = dict([(id, True) for id in cards])
if getattr(self.browser, 'card', None):
self.focusedCard = self.browser.card.id
else:
self.focusedCard = None
def restoreSelection(self):
if not self.cards:
return
sm = self.browser.form.tableView.selectionModel()
sm.clear()
# restore selection
items = QItemSelection()
count = 0
firstIdx = None
focusedIdx = None
for row, id in enumerate(self.cards):
# if the id matches the focused card, note the index
if self.focusedCard == id:
focusedIdx = self.index(row, 0)
items.select(focusedIdx, focusedIdx)
self.focusedCard = None
# if the card was previously selected, select again
if id in self.selectedCards:
count += 1
idx = self.index(row, 0)
items.select(idx, idx)
# note down the first card of the selection, in case we don't
# have a focused card
if not firstIdx:
firstIdx = idx
# focus previously focused or first in selection
idx = focusedIdx or firstIdx
tv = self.browser.form.tableView
if idx:
tv.selectRow(idx.row())
# we save and then restore the horizontal scroll position because
# scrollTo() also scrolls horizontally which is confusing
h = tv.horizontalScrollBar().value()
tv.scrollTo(idx, tv.PositionAtCenter)
tv.horizontalScrollBar().setValue(h)
if count < 500:
# discard large selections; they're too slow
sm.select(items, QItemSelectionModel.SelectCurrent |
QItemSelectionModel.Rows)
else:
tv.selectRow(0)
# Column data
######################################################################
def columnType(self, column):
return self.activeCols[column]
def columnData(self, index):
row = index.row()
col = index.column()
type = self.columnType(col)
c = self.getCard(index)
if type == "question":
return self.question(c)
elif type == "answer":
return self.answer(c)
elif type == "noteFld":
f = c.note()
return self.formatQA(f.fields[self.col.models.sortIdx(f.model())])
elif type == "template":
t = c.template()['name']
if c.model()['type'] == MODEL_CLOZE:
t += " %d" % (c.ord+1)
return t
elif type == "cardDue":
# catch invalid dates
try:
t = self.nextDue(c, index)
except:
t = ""
if c.queue < 0:
t = "(" + t + ")"
return t
elif type == "noteCrt":
return time.strftime("%Y-%m-%d", time.localtime(c.note().id/1000))
elif type == "noteMod":
return time.strftime("%Y-%m-%d", time.localtime(c.note().mod))
elif type == "cardMod":
return time.strftime("%Y-%m-%d", time.localtime(c.mod))
elif type == "cardReps":
return str(c.reps)
elif type == "cardLapses":
return str(c.lapses)
elif type == "noteTags":
return " ".join(c.note().tags)
elif type == "note":
return c.model()['name']
elif type == "cardIvl":
if c.type == 0:
return _("(new)")
elif c.type == 1:
return _("(learning)")
return fmtTimeSpan(c.ivl*86400)
elif type == "cardEase":
if c.type == 0:
return _("(new)")
return "%d%%" % (c.factor/10)
elif type == "deck":
if c.odid:
# in a cram deck
return "%s (%s)" % (
self.browser.mw.col.decks.name(c.did),
self.browser.mw.col.decks.name(c.odid))
# normal deck
return self.browser.mw.col.decks.name(c.did)
def question(self, c):
return self.formatQA(c.q(browser=True))
def answer(self, c):
if c.template().get('bafmt'):
# they have provided a template, use it verbatim
c.q(browser=True)
return self.formatQA(c.a())
# need to strip question from answer
q = self.question(c)
a = self.formatQA(c.a())
if a.startswith(q):
return a[len(q):].strip()
return a
def formatQA(self, txt):
s = txt.replace("<br>", " ")
s = s.replace("<br />", " ")
s = s.replace("<div>", " ")
s = s.replace("\n", " ")
s = re.sub("\[sound:[^]]+\]", "", s)
s = re.sub("\[\[type:[^]]+\]\]", "", s)
s = stripHTMLMedia(s)
s = s.strip()
return s
def nextDue(self, c, index):
if c.odid:
return _("(filtered)")
elif c.queue == 1:
date = c.due
elif c.queue == 0 or c.type == 0:
return str(c.due)
elif c.queue in (2,3) or (c.type == 2 and c.queue < 0):
date = time.time() + ((c.due - self.col.sched.today)*86400)
else:
return ""
return time.strftime("%Y-%m-%d", time.localtime(date))
# Line painter
######################################################################
class StatusDelegate(QItemDelegate):
def __init__(self, browser, model):
QItemDelegate.__init__(self, browser)
self.browser = browser
self.model = model
def paint(self, painter, option, index):
self.browser.mw.progress.blockUpdates = True
try:
c = self.model.getCard(index)
except:
# in the the middle of a reset; return nothing so this row is not
# rendered until we have a chance to reset the model
return
finally:
self.browser.mw.progress.blockUpdates = True
col = None
if c.note().hasTag("Marked"):
col = COLOUR_MARKED
elif c.queue == -1:
col = COLOUR_SUSPENDED
if col:
brush = QBrush(QColor(col))
painter.save()
painter.fillRect(option.rect, brush)
painter.restore()
return QItemDelegate.paint(self, painter, option, index)
# Browser window
######################################################################
# fixme: respond to reset+edit hooks
class Browser(QMainWindow):
def __init__(self, mw):
QMainWindow.__init__(self, None, Qt.Window)
applyStyles(self)
self.mw = mw
self.col = self.mw.col
self.lastFilter = ""
self._previewWindow = None
self._closeEventHasCleanedUp = False
self.form = aqt.forms.browser.Ui_Dialog()
self.form.setupUi(self)
restoreGeom(self, "editor", 0)
restoreState(self, "editor")
restoreSplitter(self.form.splitter_2, "editor2")
restoreSplitter(self.form.splitter, "editor3")
self.form.splitter_2.setChildrenCollapsible(False)
self.form.splitter.setChildrenCollapsible(False)
self.card = None
self.setupToolbar()
self.setupColumns()
self.setupTable()
self.setupMenus()
self.setupTree()
self.setupHeaders()
self.setupHooks()
self.setupEditor()
self.updateFont()
self.onUndoState(self.mw.form.actionUndo.isEnabled())
self.setupSearch()
self.show()
def setupToolbar(self):
self.toolbarWeb = AnkiWebView()
self.toolbarWeb.title = "browser toolbar"
self.toolbar = BrowserToolbar(self.mw, self.toolbarWeb, self)
self.form.verticalLayout_3.insertWidget(0, self.toolbarWeb)
self.toolbar.draw()
def setupMenus(self):
# actions
f = self.form
if not isMac:
f.actionClose.setVisible(False)
f.actionReposition.triggered.connect(self.reposition)
f.actionReschedule.triggered.connect(self.reschedule)
f.actionChangeModel.triggered.connect(self.onChangeModel)
# edit
f.actionUndo.triggered.connect(self.mw.onUndo)
f.previewButton.clicked.connect(self.onTogglePreview)
f.previewButton.setToolTip(_("Preview Selected Card (%s)") %
shortcut(_("Ctrl+Shift+P")))
f.actionInvertSelection.triggered.connect(self.invertSelection)
f.actionSelectNotes.triggered.connect(self.selectNotes)
f.actionFindReplace.triggered.connect(self.onFindReplace)
f.actionFindDuplicates.triggered.connect(self.onFindDupes)
# jumps
f.actionPreviousCard.triggered.connect(self.onPreviousCard)
f.actionNextCard.triggered.connect(self.onNextCard)
f.actionFirstCard.triggered.connect(self.onFirstCard)
f.actionLastCard.triggered.connect(self.onLastCard)
f.actionFind.triggered.connect(self.onFind)
f.actionNote.triggered.connect(self.onNote)
f.actionTags.triggered.connect(self.onTags)
f.actionCardList.triggered.connect(self.onCardList)
# help
f.actionGuide.triggered.connect(self.onHelp)
# keyboard shortcut for shift+home/end
self.pgUpCut = QShortcut(QKeySequence("Shift+Home"), self)
self.pgUpCut.activated.connect(self.onFirstCard)
self.pgDownCut = QShortcut(QKeySequence("Shift+End"), self)
self.pgDownCut.activated.connect(self.onLastCard)
# add note
self.addCut = QShortcut(QKeySequence("Ctrl+E"), self)
self.addCut.activated.connect(self.mw.onAddCard)
# card info
self.infoCut = QShortcut(QKeySequence("Ctrl+Shift+I"), self)
self.infoCut.activated.connect(self.showCardInfo)
# set deck
self.changeDeckCut = QShortcut(QKeySequence("Ctrl+D"), self)
self.changeDeckCut.activated.connect(self.setDeck)
# add/remove tags
self.tagCut1 = QShortcut(QKeySequence("Ctrl+Shift+T"), self)
self.tagCut1.activated.connect(self.addTags)
self.tagCut2 = QShortcut(QKeySequence("Ctrl+Alt+T"), self)
self.tagCut2.activated.connect(self.deleteTags)
self.tagCut3 = QShortcut(QKeySequence("Ctrl+K"), self)
self.tagCut3.activated.connect(self.onMark)
# suspending
self.susCut1 = QShortcut(QKeySequence("Ctrl+J"), self)
self.susCut1.activated.connect(self.onSuspend)
# deletion
self.delCut1 = QShortcut(QKeySequence("Delete"), self)
self.delCut1.setAutoRepeat(False)
self.delCut1.activated.connect(self.deleteNotes)
# add-on hook
runHook('browser.setupMenus', self)
self.mw.maybeHideAccelerators(self)
def updateFont(self):
# we can't choose different line heights efficiently, so we need
# to pick a line height big enough for any card template
curmax = 16
for m in self.col.models.all():
for t in m['tmpls']:
bsize = t.get("bsize", 0)
if bsize > curmax:
curmax = bsize
self.form.tableView.verticalHeader().setDefaultSectionSize(
curmax + 6)
def closeEvent(self, evt):
if not self._closeEventHasCleanedUp:
if self.editor.note:
# ignore event for now to allow us to save
self.editor.saveNow(self._closeEventAfterSave)
evt.ignore()
else:
self._closeEventCleanup()
evt.accept()
self.mw.gcWindow(self)
else:
evt.accept()
self.mw.gcWindow(self)
def _closeEventAfterSave(self):
self._closeEventCleanup()
self.close()
def _closeEventCleanup(self):
self.editor.setNote(None)
saveSplitter(self.form.splitter_2, "editor2")
saveSplitter(self.form.splitter, "editor3")
saveGeom(self, "editor")
saveState(self, "editor")
saveHeader(self.form.tableView.horizontalHeader(), "editor")
self.col.conf['activeCols'] = self.model.activeCols
self.col.setMod()
self.teardownHooks()
self.mw.maybeReset()
aqt.dialogs.close("Browser")
self._closeEventHasCleanedUp = True
def canClose(self):
return True
def keyPressEvent(self, evt):
"Show answer on RET or register answer."
if evt.key() == Qt.Key_Escape:
self.close()
elif self.mw.app.focusWidget() == self.form.tree:
if evt.key() in (Qt.Key_Return, Qt.Key_Enter):
item = self.form.tree.currentItem()
self.onTreeClick(item, 0)
def setupColumns(self):
self.columns = [
('question', _("Question")),
('answer', _("Answer")),
('template', _("Card")),
('deck', _("Deck")),
('noteFld', _("Sort Field")),
('noteCrt', _("Created")),
('noteMod', _("Edited")),
('cardMod', _("Changed")),
('cardDue', _("Due")),
('cardIvl', _("Interval")),
('cardEase', _("Ease")),
('cardReps', _("Reviews")),
('cardLapses', _("Lapses")),
('noteTags', _("Tags")),
('note', _("Note")),
]
self.columns.sort(key=itemgetter(1))
# Searching
######################################################################
def setupSearch(self):
self.form.searchEdit.setLineEdit(FavouritesLineEdit(self.mw, self))
self.form.searchButton.clicked.connect(self.onSearchActivated)
self.form.searchEdit.lineEdit().returnPressed.connect(self.onSearchActivated)
self.form.searchEdit.setCompleter(None)
self.form.searchEdit.addItems(self.mw.pm.profile['searchHistory'])
self._searchPrompt = _("<type here to search; hit enter to show current deck>")
self._lastSearchTxt = "is:current"
self.search()
# then replace text for easily showing the deck
self.form.searchEdit.lineEdit().setText(self._searchPrompt)
self.form.searchEdit.lineEdit().selectAll()
self.form.searchEdit.setFocus()
# search triggered by user
def onSearchActivated(self):
self.editor.saveNow(self._onSearchActivated)
def _onSearchActivated(self):
# convert guide text before we save history
if self.form.searchEdit.lineEdit().text() == self._searchPrompt:
self.form.searchEdit.lineEdit().setText("deck:current ")
# update history
txt = str(self.form.searchEdit.lineEdit().text()).strip()
sh = self.mw.pm.profile['searchHistory']
if txt in sh:
sh.remove(txt)
sh.insert(0, txt)
sh = sh[:30]
self.form.searchEdit.clear()
self.form.searchEdit.addItems(sh)
self.mw.pm.profile['searchHistory'] = sh
# keep track of search string so that we reuse identical search when
# refreshing, rather than whatever is currently in the search field
self._lastSearchTxt = txt
self.search()
# search triggered programmatically. caller must have saved note first.
def search(self):
if "is:current" in self._lastSearchTxt:
# show current card if there is one
c = self.mw.reviewer.card
nid = c and c.nid or 0
self.model.search("nid:%d"%nid)
else:
self.model.search(self._lastSearchTxt)
if not self.model.cards:
# no row change will fire
self._onRowChanged(None, None)
def updateTitle(self):
selected = len(self.form.tableView.selectionModel().selectedRows())
cur = len(self.model.cards)
self.setWindowTitle(ngettext("Browser (%(cur)d card shown; %(sel)s)",
"Browser (%(cur)d cards shown; %(sel)s)",
cur) % {
"cur": cur,
"sel": ngettext("%d selected", "%d selected", selected) % selected
})
return selected
def onReset(self):
self.editor.setNote(None)
self.search()
# Table view & editor
######################################################################
def setupTable(self):
self.model = DataModel(self)
self.form.tableView.setSortingEnabled(True)
self.form.tableView.setModel(self.model)
self.form.tableView.selectionModel()
self.form.tableView.setItemDelegate(StatusDelegate(self, self.model))
self.form.tableView.selectionModel().selectionChanged.connect(self.onRowChanged)
def setupEditor(self):
self.editor = aqt.editor.Editor(
self.mw, self.form.fieldsArea, self)
self.editor.stealFocus = False
def onRowChanged(self, current, previous):
"Update current note and hide/show editor."
self.editor.saveNow(lambda: self._onRowChanged(current, previous))
def _onRowChanged(self, current, previous):
update = self.updateTitle()
show = self.model.cards and update == 1
self.form.splitter.widget(1).setVisible(not not show)
if not show:
self.editor.setNote(None)
self.singleCard = False
else:
self.card = self.model.getCard(
self.form.tableView.selectionModel().currentIndex())
self.editor.setNote(self.card.note(reload=True))
self.editor.card = self.card
self.singleCard = True
self._renderPreview(True)
self.toolbar.update()
def refreshCurrentCard(self, note):
self.model.refreshNote(note)
self._renderPreview(False)
def refreshCurrentCardFilter(self, flag, note, fidx):
self.refreshCurrentCard(note)
return flag
def currentRow(self):
idx = self.form.tableView.selectionModel().currentIndex()
return idx.row()
# Headers & sorting
######################################################################
def setupHeaders(self):
vh = self.form.tableView.verticalHeader()
hh = self.form.tableView.horizontalHeader()
if not isWin:
vh.hide()
hh.show()
restoreHeader(hh, "editor")
hh.setHighlightSections(False)
hh.setMinimumSectionSize(50)
hh.setSectionsMovable(True)
self.setColumnSizes()
hh.setContextMenuPolicy(Qt.CustomContextMenu)
hh.customContextMenuRequested.connect(self.onHeaderContext)
self.setSortIndicator()
hh.sortIndicatorChanged.connect(self.onSortChanged)
hh.sectionMoved.connect(self.onColumnMoved)
def onSortChanged(self, idx, ord):
self.editor.saveNow(lambda: self._onSortChanged(idx, ord))
def _onSortChanged(self, idx, ord):
type = self.model.activeCols[idx]
noSort = ("question", "answer", "template", "deck", "note", "noteTags")
if type in noSort:
if type == "template":
showInfo(_("""\
This column can't be sorted on, but you can search for individual card types, \
such as 'card:1'."""))
elif type == "deck":
showInfo(_("""\
This column can't be sorted on, but you can search for specific decks \
by clicking on one on the left."""))
else:
showInfo(_("Sorting on this column is not supported. Please "
"choose another."))
type = self.col.conf['sortType']
if self.col.conf['sortType'] != type:
self.col.conf['sortType'] = type
# default to descending for non-text fields
if type == "noteFld":
ord = not ord
self.col.conf['sortBackwards'] = ord
self.search()
else:
if self.col.conf['sortBackwards'] != ord:
self.col.conf['sortBackwards'] = ord
self.model.reverse()
self.setSortIndicator()
def setSortIndicator(self):
hh = self.form.tableView.horizontalHeader()
type = self.col.conf['sortType']
if type not in self.model.activeCols:
hh.setSortIndicatorShown(False)
return
idx = self.model.activeCols.index(type)
if self.col.conf['sortBackwards']:
ord = Qt.DescendingOrder
else:
ord = Qt.AscendingOrder
hh.blockSignals(True)
hh.setSortIndicator(idx, ord)
hh.blockSignals(False)
hh.setSortIndicatorShown(True)
def onHeaderContext(self, pos):
gpos = self.form.tableView.mapToGlobal(pos)
m = QMenu()
for type, name in self.columns:
a = m.addAction(name)
a.setCheckable(True)
a.setChecked(type in self.model.activeCols)
a.toggled.connect(lambda b, t=type: self.toggleField(t))
m.exec_(gpos)
def toggleField(self, type):
self.editor.saveNow(lambda: self._toggleField(type))
def _toggleField(self, type):
self.model.beginReset()
if type in self.model.activeCols:
if len(self.model.activeCols) < 2:
return showInfo(_("You must have at least one column."))
self.model.activeCols.remove(type)
adding=False
else:
self.model.activeCols.append(type)
adding=True
# sorted field may have been hidden
self.setSortIndicator()
self.setColumnSizes()
self.model.endReset()
# if we added a column, scroll to it
if adding:
row = self.currentRow()
idx = self.model.index(row, len(self.model.activeCols) - 1)
self.form.tableView.scrollTo(idx)
def setColumnSizes(self):
hh = self.form.tableView.horizontalHeader()
hh.setSectionResizeMode(QHeaderView.Interactive)
hh.setSectionResizeMode(hh.logicalIndex(len(self.model.activeCols)-1),
QHeaderView.Stretch)
# this must be set post-resize or it doesn't work
hh.setCascadingSectionResizes(False)
def onColumnMoved(self, a, b, c):
self.setColumnSizes()
# Filter tree
######################################################################
class CallbackItem(QTreeWidgetItem):
def __init__(self, root, name, onclick, oncollapse=None):
QTreeWidgetItem.__init__(self, root, [name])
self.onclick = onclick
self.oncollapse = oncollapse
def setupTree(self):
self.form.tree.itemClicked.connect(self.onTreeClick)
p = QPalette()
p.setColor(QPalette.Base, QColor("#d6dde0"))
self.form.tree.setPalette(p)
self.buildTree()
self.form.tree.itemExpanded.connect(lambda item: self.onTreeCollapse(item))
self.form.tree.itemCollapsed.connect(lambda item: self.onTreeCollapse(item))
def buildTree(self):
self.form.tree.clear()
root = self.form.tree
self._systemTagTree(root)
self._favTree(root)
self._decksTree(root)
self._modelTree(root)
self._userTagTree(root)
self.form.tree.setIndentation(15)
def onTreeClick(self, item, col):
if getattr(item, 'onclick', None):
item.onclick()
def onTreeCollapse(self, item):
if getattr(item, 'oncollapse', None):
item.oncollapse()
def setFilter(self, *args):
if len(args) == 1:
txt = args[0]
else:
txt = ""
items = []
for c, a in enumerate(args):
if c % 2 == 0:
txt += a + ":"
else:
txt += a
if " " in txt or "(" in txt or ")" in txt:
txt = '"%s"' % txt
items.append(txt)
txt = ""
txt = " ".join(items)
if self.mw.app.keyboardModifiers() & Qt.AltModifier:
txt = "-"+txt
if self.mw.app.keyboardModifiers() & Qt.ControlModifier:
cur = str(self.form.searchEdit.lineEdit().text())
if cur and cur != self._searchPrompt:
txt = cur + " " + txt
elif self.mw.app.keyboardModifiers() & Qt.ShiftModifier:
cur = str(self.form.searchEdit.lineEdit().text())
if cur:
txt = cur + " or " + txt
self.form.searchEdit.lineEdit().setText(txt)
self.onSearchActivated()
def _systemTagTree(self, root):
tags = (
(_("Whole Collection"), "ankibw", ""),
(_("Current Deck"), "deck16", "deck:current"),
(_("Added Today"), "view-pim-calendar.png", "added:1"),
(_("Studied Today"), "view-pim-calendar.png", "rated:1"),
(_("Again Today"), "view-pim-calendar.png", "rated:1:1"),
(_("New"), "plus16.png", "is:new"),
(_("Learning"), "stock_new_template_red.png", "is:learn"),
(_("Review"), "clock16.png", "is:review"),
(_("Due"), "clock16.png", "is:due"),
(_("Marked"), "star16.png", "tag:marked"),
(_("Suspended"), "media-playback-pause.png", "is:suspended"),
(_("Leech"), "emblem-important.png", "tag:leech"))
for name, icon, cmd in tags:
item = self.CallbackItem(
root, name, lambda c=cmd: self.setFilter(c))
item.setIcon(0, QIcon(":/icons/" + icon))
return root
def _favTree(self, root):
saved = self.col.conf.get('savedFilters', [])
if not saved:
# Don't add favourites to tree if none saved
return
root = self.CallbackItem(root, _("My Searches"), None)
root.setExpanded(True)
root.setIcon(0, QIcon(":/icons/emblem-favorite-dark.png"))
for name, filt in sorted(saved.items()):
item = self.CallbackItem(root, name, lambda s=filt: self.setFilter(s))
item.setIcon(0, QIcon(":/icons/emblem-favorite-dark.png"))
def _userTagTree(self, root):
for t in sorted(self.col.tags.all()):
if t.lower() == "marked" or t.lower() == "leech":
continue
item = self.CallbackItem(
root, t, lambda t=t: self.setFilter("tag", t))
item.setIcon(0, QIcon(":/icons/anki-tag.png"))
def _decksTree(self, root):
grps = self.col.sched.deckDueTree()
def fillGroups(root, grps, head=""):
for g in grps:
item = self.CallbackItem(
root, g[0],
lambda g=g: self.setFilter("deck", head+g[0]),
lambda g=g: self.mw.col.decks.collapseBrowser(g[1]))
item.setIcon(0, QIcon(":/icons/deck16.png"))
newhead = head + g[0]+"::"
collapsed = self.mw.col.decks.get(g[1]).get('browserCollapsed', False)
item.setExpanded(not collapsed)
fillGroups(item, g[5], newhead)
fillGroups(root, grps)
def _modelTree(self, root):
for m in sorted(self.col.models.all(), key=itemgetter("name")):
mitem = self.CallbackItem(
root, m['name'], lambda m=m: self.setFilter("mid", str(m['id'])))
mitem.setIcon(0, QIcon(":/icons/product_design.png"))
# for t in m['tmpls']:
# titem = self.CallbackItem(
# t['name'], lambda m=m, t=t: self.setFilter(
# "model", m['name'], "card", t['name']))
# titem.setIcon(0, QIcon(":/icons/stock_new_template.png"))
# mitem.addChild(titem)
# Info
######################################################################
def showCardInfo(self):
if not self.card:
return
info, cs = self._cardInfoData()
reps = self._revlogData(cs)
d = QDialog(self)
l = QVBoxLayout()
l.setContentsMargins(0,0,0,0)
w = AnkiWebView()
l.addWidget(w)
w.stdHtml(info + "<p>" + reps)
bb = QDialogButtonBox(QDialogButtonBox.Close)
l.addWidget(bb)
bb.rejected.connect(d.reject)
d.setLayout(l)
d.setWindowModality(Qt.WindowModal)
d.resize(500, 400)
restoreGeom(d, "revlog")
d.show()
saveGeom(d, "revlog")
def _cardInfoData(self):
from anki.stats import CardStats
cs = CardStats(self.col, self.card)
rep = cs.report()
m = self.card.model()
rep = """
<div style='width: 400px; margin: 0 auto 0;
border: 1px solid #000; padding: 3px; '>%s</div>""" % rep
return rep, cs
def _revlogData(self, cs):
entries = self.mw.col.db.all(
"select id/1000.0, ease, ivl, factor, time/1000.0, type "
"from revlog where cid = ?", self.card.id)
if not entries:
return ""
s = "<table width=100%%><tr><th align=left>%s</th>" % _("Date")
s += ("<th align=right>%s</th>" * 5) % (
_("Type"), _("Rating"), _("Interval"), _("Ease"), _("Time"))
cnt = 0
for (date, ease, ivl, factor, taken, type) in reversed(entries):
cnt += 1
s += "<tr><td>%s</td>" % time.strftime(_("<b>%Y-%m-%d</b> @ %H:%M"),
time.localtime(date))
tstr = [_("Learn"), _("Review"), _("Relearn"), _("Filtered"),
_("Resched")][type]
import anki.stats as st
fmt = "<span style='color:%s'>%s</span>"
if type == 0:
tstr = fmt % (st.colLearn, tstr)
elif type == 1:
tstr = fmt % (st.colMature, tstr)
elif type == 2:
tstr = fmt % (st.colRelearn, tstr)
elif type == 3:
tstr = fmt % (st.colCram, tstr)
else:
tstr = fmt % ("#000", tstr)
if ease == 1:
ease = fmt % (st.colRelearn, ease)
if ivl == 0:
ivl = _("0d")
elif ivl > 0:
ivl = fmtTimeSpan(ivl*86400, short=True)
else:
ivl = cs.time(-ivl)
s += ("<td align=right>%s</td>" * 5) % (
tstr,
ease, ivl,
"%d%%" % (factor/10) if factor else "",
cs.time(taken)) + "</tr>"
s += "</table>"
if cnt < self.card.reps:
s += _("""\
Note: Some of the history is missing. For more information, \
please see the browser documentation.""")
return s
# Menu helpers
######################################################################
def selectedCards(self):
return [self.model.cards[idx.row()] for idx in
self.form.tableView.selectionModel().selectedRows()]
def selectedNotes(self):
return self.col.db.list("""
select distinct nid from cards
where id in %s""" % ids2str(
[self.model.cards[idx.row()] for idx in
self.form.tableView.selectionModel().selectedRows()]))
def selectedNotesAsCards(self):
return self.col.db.list(
"select id from cards where nid in (%s)" %
",".join([str(s) for s in self.selectedNotes()]))
def oneModelNotes(self):
sf = self.selectedNotes()
if not sf:
return
mods = self.col.db.scalar("""
select count(distinct mid) from notes
where id in %s""" % ids2str(sf))
if mods > 1:
showInfo(_("Please select cards from only one note type."))
return
return sf
def onHelp(self):
openHelp("browser")
# Misc menu options
######################################################################
def onChangeModel(self):
self.editor.saveNow(self._onChangeModel)
def _onChangeModel(self):
nids = self.oneModelNotes()
if nids:
ChangeModel(self.mw.col, nids, parent=self).exec_()
# Preview
######################################################################
def onTogglePreview(self):
if self._previewWindow:
self._closePreview()
else:
self._openPreview()
def _openPreview(self):
self._previewState = "question"
self._previewWindow = QDialog(None, Qt.Window)
self._previewWindow.setWindowTitle(_("Preview"))
self._previewWindow.finished.connect(self._onPreviewFinished)
vbox = QVBoxLayout()
vbox.setContentsMargins(0,0,0,0)
self._previewWeb = AnkiWebView()
vbox.addWidget(self._previewWeb)
bbox = QDialogButtonBox()
self._previewReplay = bbox.addButton(_("Replay Audio"), QDialogButtonBox.ActionRole)
self._previewReplay.setAutoDefault(False)
self._previewReplay.setShortcut(QKeySequence("R"))
self._previewReplay.setToolTip(_("Shortcut key: %s" % "R"))
self._previewPrev = bbox.addButton("<", QDialogButtonBox.ActionRole)
self._previewPrev.setAutoDefault(False)
self._previewPrev.setShortcut(QKeySequence("Left"))
self._previewPrev.setToolTip(_("Shortcut key: Left arrow"))
self._previewNext = bbox.addButton(">", QDialogButtonBox.ActionRole)
self._previewNext.setAutoDefault(True)
self._previewNext.setShortcut(QKeySequence("Right"))
self._previewNext.setToolTip(_("Shortcut key: Right arrow or Enter"))
self._previewPrev.clicked.connect(self._onPreviewPrev)
self._previewNext.clicked.connect(self._onPreviewNext)
self._previewReplay.clicked.connect(self._onReplayAudio)
vbox.addWidget(bbox)
self._previewWindow.setLayout(vbox)
restoreGeom(self._previewWindow, "preview")
self._previewWindow.show()
self._renderPreview(True)
def _onPreviewFinished(self, ok):
saveGeom(self._previewWindow, "preview")
self.mw.progress.timer(100, self._onClosePreview, False)
self.form.previewButton.setChecked(False)
def _onPreviewPrev(self):
if self._previewState == "question":
self._previewState = "answer"
self._renderPreview()
else:
self.onPreviousCard()
self._updatePreviewButtons()
def _onPreviewNext(self):
if self._previewState == "question":
self._previewState = "answer"
self._renderPreview()
else:
self.onNextCard()
self._updatePreviewButtons()
def _onReplayAudio(self):
self.mw.reviewer.replayAudio(self)
def _updatePreviewButtons(self):
if not self._previewWindow:
return
canBack = self.currentRow() > 0 or self._previewState == "question"
self._previewPrev.setEnabled(not not (self.singleCard and canBack))
canForward = self.currentRow() < self.model.rowCount(None) - 1 or \
self._previewState == "question"
self._previewNext.setEnabled(not not (self.singleCard and canForward))
def _closePreview(self):
if self._previewWindow:
self._previewWindow.close()
self._onClosePreview()
def _onClosePreview(self):
self._previewWindow = self._previewPrev = self._previewNext = None
def _renderPreview(self, cardChanged=False):
if not self._previewWindow:
return
c = self.card
if not c:
txt = _("(please select 1 card)")
self._previewWeb.stdHtml(txt)
self._updatePreviewButtons()
return
self._updatePreviewButtons()
if cardChanged:
self._previewState = "question"
# need to force reload even if answer
txt = c.q(reload=True)
if self._previewState == "answer":
txt = c.a()
txt = re.sub("\[\[type:[^]]+\]\]", "", txt)
ti = lambda x: x
base = self.mw.baseHTML()
self._previewWeb.stdHtml(
ti(mungeQA(self.col, txt)), self.mw.reviewer._styles(),
bodyClass="card card%d" % (c.ord+1), head=base,
js=anki.js.browserSel)
clearAudioQueue()
if self.mw.reviewer.autoplay(c):
playFromText(txt)
# Card deletion
######################################################################
def deleteNotes(self):
self.editor.saveNow(self._deleteNotes)
def _deleteNotes(self):
nids = self.selectedNotes()
if not nids:
return
self.mw.checkpoint(_("Delete Notes"))
self.model.beginReset()
# figure out where to place the cursor after the deletion
curRow = self.form.tableView.selectionModel().currentIndex().row()
selectedRows = [i.row() for i in
self.form.tableView.selectionModel().selectedRows()]
if min(selectedRows) < curRow < max(selectedRows):
# last selection in middle; place one below last selected item
move = sum(1 for i in selectedRows if i > curRow)
newRow = curRow - move
elif max(selectedRows) <= curRow:
# last selection at bottom; place one below bottommost selection
newRow = max(selectedRows) - len(nids) + 1
else:
# last selection at top; place one above topmost selection
newRow = min(selectedRows) - 1
self.col.remNotes(nids)
self.search()
if len(self.model.cards):
newRow = min(newRow, len(self.model.cards) - 1)
newRow = max(newRow, 0)
self.model.focusedCard = self.model.cards[newRow]
self.model.endReset()
self.mw.requireReset()
tooltip(ngettext("%d note deleted.", "%d notes deleted.", len(nids)) % len(nids))
# Deck change
######################################################################
def setDeck(self):
self.editor.saveNow(self._setDeck)
def _setDeck(self):
from aqt.studydeck import StudyDeck
cids = self.selectedCards()
if not cids:
return
did = self.mw.col.db.scalar(
"select did from cards where id = ?", cids[0])
current=self.mw.col.decks.get(did)['name']
ret = StudyDeck(
self.mw, current=current, accept=_("Move Cards"),
title=_("Change Deck"), help="browse", parent=self)
if not ret.name:
return
did = self.col.decks.id(ret.name)
deck = self.col.decks.get(did)
if deck['dyn']:
showWarning(_("Cards can't be manually moved into a filtered deck."))
return
self.model.beginReset()
self.mw.checkpoint(_("Change Deck"))
mod = intTime()
usn = self.col.usn()
# normal cards
scids = ids2str(cids)
# remove any cards from filtered deck first
self.col.sched.remFromDyn(cids)
# then move into new deck
self.col.db.execute("""
update cards set usn=?, mod=?, did=? where id in """ + scids,
usn, mod, did)
self.model.endReset()
self.mw.requireReset()
# Tags
######################################################################
def addTags(self, tags=None, label=None, prompt=None, func=None):
self.editor.saveNow(lambda: self._addTags(tags, label, prompt, func))
def _addTags(self, tags, label, prompt, func):
if prompt is None:
prompt = _("Enter tags to add:")
if tags is None:
(tags, r) = getTag(self, self.col, prompt)
else:
r = True
if not r:
return
if func is None:
func = self.col.tags.bulkAdd
if label is None:
label = _("Add Tags")
if label:
self.mw.checkpoint(label)
self.model.beginReset()
func(self.selectedNotes(), tags)
self.model.endReset()
self.mw.requireReset()
def deleteTags(self, tags=None, label=None):
if label is None:
label = _("Delete Tags")
self.addTags(tags, label, _("Enter tags to delete:"),
func=self.col.tags.bulkRem)
# Suspending and marking
######################################################################
def isSuspended(self):
return not not (self.card and self.card.queue == -1)
def onSuspend(self):
self.editor.saveNow(self._onSuspend)
def _onSuspend(self):
sus = not self.isSuspended()
c = self.selectedCards()
if sus:
self.col.sched.suspendCards(c)
else:
self.col.sched.unsuspendCards(c)
self.model.reset()
self.mw.requireReset()
def isMarked(self):
return not not (self.card and self.card.note().hasTag("Marked"))
def onMark(self, mark=None):
if mark is None:
mark = not self.isMarked()
if mark:
self.addTags(tags="marked", label=False)
else:
self.deleteTags(tags="marked", label=False)
# Repositioning
######################################################################
def reposition(self):
self.editor.saveNow(self._reposition)
def _reposition(self):
cids = self.selectedCards()
cids2 = self.col.db.list(
"select id from cards where type = 0 and id in " + ids2str(cids))
if not cids2:
return showInfo(_("Only new cards can be repositioned."))
d = QDialog(self)
d.setWindowModality(Qt.WindowModal)
frm = aqt.forms.reposition.Ui_Dialog()
frm.setupUi(d)
(pmin, pmax) = self.col.db.first(
"select min(due), max(due) from cards where type=0 and odid=0")
pmin = pmin or 0
pmax = pmax or 0
txt = _("Queue top: %d") % pmin
txt += "\n" + _("Queue bottom: %d") % pmax
frm.label.setText(txt)
if not d.exec_():
return
self.model.beginReset()
self.mw.checkpoint(_("Reposition"))
self.col.sched.sortCards(
cids, start=frm.start.value(), step=frm.step.value(),
shuffle=frm.randomize.isChecked(), shift=frm.shift.isChecked())
self.search()
self.mw.requireReset()
self.model.endReset()
# Rescheduling
######################################################################
def reschedule(self):
self.editor.saveNow(self._reschedule)
def _reschedule(self):
d = QDialog(self)
d.setWindowModality(Qt.WindowModal)
frm = aqt.forms.reschedule.Ui_Dialog()
frm.setupUi(d)
if not d.exec_():
return
self.model.beginReset()
self.mw.checkpoint(_("Reschedule"))
if frm.asNew.isChecked():
self.col.sched.forgetCards(self.selectedCards())
else:
fmin = frm.min.value()
fmax = frm.max.value()
fmax = max(fmin, fmax)
self.col.sched.reschedCards(
self.selectedCards(), fmin, fmax)
self.search()
self.mw.requireReset()
self.model.endReset()
# Edit: selection
######################################################################
def selectNotes(self):
self.editor.saveNow(self._selectNotes)
def _selectNotes(self):
nids = self.selectedNotes()
# bypass search history
self._lastSearchTxt = "nid:"+",".join([str(x) for x in nids])
self.form.searchEdit.lineEdit().setText(self._lastSearchTxt)
# clear the selection so we don't waste energy preserving it
tv = self.form.tableView
tv.selectionModel().clear()
self.search()
tv.selectAll()
def invertSelection(self):
sm = self.form.tableView.selectionModel()
items = sm.selection()
self.form.tableView.selectAll()
sm.select(items, QItemSelectionModel.Deselect | QItemSelectionModel.Rows)
# Edit: undo
######################################################################
def setupHooks(self):
addHook("undoState", self.onUndoState)
addHook("reset", self.onReset)
addHook("editTimer", self.refreshCurrentCard)
addHook("editFocusLost", self.refreshCurrentCardFilter)
for t in "newTag", "newModel", "newDeck":
addHook(t, self.buildTree)
addHook("change_note_type_start", self.on_start_change_note_type)
addHook("change_note_type_end", self.on_end_change_note_type)
def teardownHooks(self):
remHook("reset", self.onReset)
remHook("editTimer", self.refreshCurrentCard)
remHook("editFocusLost", self.refreshCurrentCardFilter)
remHook("undoState", self.onUndoState)
for t in "newTag", "newModel", "newDeck":
remHook(t, self.buildTree)
remHook("change_note_type_start", self.on_start_change_note_type)
remHook("change_note_type_end", self.on_end_change_note_type)
def onUndoState(self, on):
self.form.actionUndo.setEnabled(on)
if on:
self.form.actionUndo.setText(self.mw.form.actionUndo.text())
# Edit: replacing
######################################################################
def onFindReplace(self):
self.editor.saveNow(self._onFindReplace)
def _onFindReplace(self):
sf = self.selectedNotes()
if not sf:
return
import anki.find
fields = sorted(anki.find.fieldNames(self.col, downcase=False))
d = QDialog(self)
frm = aqt.forms.findreplace.Ui_Dialog()
frm.setupUi(d)
d.setWindowModality(Qt.WindowModal)
frm.field.addItems([_("All Fields")] + fields)
frm.buttonBox.helpRequested.connect(self.onFindReplaceHelp)
restoreGeom(d, "findreplace")
r = d.exec_()
saveGeom(d, "findreplace")
if not r:
return
if frm.field.currentIndex() == 0:
field = None
else:
field = fields[frm.field.currentIndex()-1]
self.mw.checkpoint(_("Find and Replace"))
self.mw.progress.start()
self.model.beginReset()
try:
changed = self.col.findReplace(sf,
str(frm.find.text()),
str(frm.replace.text()),
frm.re.isChecked(),
field,
frm.ignoreCase.isChecked())
except sre_constants.error:
showInfo(_("Invalid regular expression."), parent=self)
return
else:
self.search()
self.mw.requireReset()
finally:
self.model.endReset()
self.mw.progress.finish()
showInfo(ngettext(
"%(a)d of %(b)d note updated",
"%(a)d of %(b)d notes updated", len(sf)) % {
'a': changed,
'b': len(sf),
})
def onFindReplaceHelp(self):
openHelp("findreplace")
# Edit: finding dupes
######################################################################
def onFindDupes(self):
self.editor.saveNow(self._onFindDupes)
def _onFindDupes(self):
d = QDialog(self)
self.mw.setupDialogGC(d)
frm = aqt.forms.finddupes.Ui_Dialog()
frm.setupUi(d)
restoreGeom(d, "findDupes")
fields = sorted(anki.find.fieldNames(self.col, downcase=False))
frm.fields.addItems(fields)
self._dupesButton = None
# links
frm.webView.onBridgeCmd = self.dupeLinkClicked
def onFin(code):
saveGeom(d, "findDupes")
d.finished.connect(onFin)
def onClick():
field = fields[frm.fields.currentIndex()]
self.duplicatesReport(frm.webView, field, frm.search.text(), frm)
search = frm.buttonBox.addButton(
_("Search"), QDialogButtonBox.ActionRole)
search.clicked.connect(onClick)
d.show()
def duplicatesReport(self, web, fname, search, frm):
self.mw.progress.start()
res = self.mw.col.findDupes(fname, search)
if not self._dupesButton:
self._dupesButton = b = frm.buttonBox.addButton(
_("Tag Duplicates"), QDialogButtonBox.ActionRole)
b.clicked.connect(lambda: self._onTagDupes(res))
t = "<html><body>"
groups = len(res)
notes = sum(len(r[1]) for r in res)
part1 = ngettext("%d group", "%d groups", groups) % groups
part2 = ngettext("%d note", "%d notes", notes) % notes
t += _("Found %(a)s across %(b)s.") % dict(a=part1, b=part2)
t += "<p><ol>"
for val, nids in res:
t += '''<li><a href=# onclick="pycmd('%s')">%s</a>: %s</a>''' % (
"nid:" + ",".join(str(id) for id in nids),
ngettext("%d note", "%d notes", len(nids)) % len(nids),
cgi.escape(val))
t += "</ol>"
t += "</body></html>"
web.setHtml(t)
self.mw.progress.finish()
def _onTagDupes(self, res):
if not res:
return
self.model.beginReset()
self.mw.checkpoint(_("Tag Duplicates"))
nids = set()
for s, nidlist in res:
nids.update(nidlist)
self.col.tags.bulkAdd(nids, _("duplicate"))
self.mw.progress.finish()
self.model.endReset()
self.mw.requireReset()
tooltip(_("Notes tagged."))
def dupeLinkClicked(self, link):
self.form.searchEdit.lineEdit().setText(link)
# manually, because we've already saved
self._lastSearchTxt = link
self.search()
self.onNote()
# Jumping
######################################################################
def _moveCur(self, dir=None, idx=None):
if not self.model.cards:
return
tv = self.form.tableView
if idx is None:
idx = tv.moveCursor(dir, self.mw.app.keyboardModifiers())
tv.selectionModel().clear()
tv.setCurrentIndex(idx)
def onPreviousCard(self):
self.editor.saveNow(self._onPreviousCard)
def _onPreviousCard(self):
f = self.editor.currentField
self._moveCur(QAbstractItemView.MoveUp)
self.editor.web.setFocus()
self.editor.web.eval("focusField(%d)" % f)
def onNextCard(self):
self.editor.saveNow(self._onNextCard)
def _onNextCard(self):
f = self.editor.currentField
self._moveCur(QAbstractItemView.MoveDown)
self.editor.web.setFocus()
self.editor.web.eval("focusField(%d)" % f)
def onFirstCard(self):
sm = self.form.tableView.selectionModel()
idx = sm.currentIndex()
self._moveCur(None, self.model.index(0, 0))
if not self.mw.app.keyboardModifiers() & Qt.ShiftModifier:
return
idx2 = sm.currentIndex()
item = QItemSelection(idx2, idx)
sm.select(item, QItemSelectionModel.SelectCurrent|
QItemSelectionModel.Rows)
def onLastCard(self):
sm = self.form.tableView.selectionModel()
idx = sm.currentIndex()
self._moveCur(
None, self.model.index(len(self.model.cards) - 1, 0))
if not self.mw.app.keyboardModifiers() & Qt.ShiftModifier:
return
idx2 = sm.currentIndex()
item = QItemSelection(idx, idx2)
sm.select(item, QItemSelectionModel.SelectCurrent|
QItemSelectionModel.Rows)
def onFind(self):
self.form.searchEdit.setFocus()
self.form.searchEdit.lineEdit().selectAll()
def onNote(self):
self.editor.focus()
self.editor.web.setFocus()
self.editor.web.eval("focusField(0);")
def onTags(self):
self.form.tree.setFocus()
def onCardList(self):
self.form.tableView.setFocus()
def focusCid(self, cid):
try:
row = self.model.cards.index(cid)
except:
return
self.form.tableView.selectRow(row)
# Note update:
######################################################################
def on_start_change_note_type(self):
self.mw.checkpoint(_("Change Note Type"))
self.mw.col.modSchema(check=True)
self.mw.progress.start()
self.model.beginReset()
def on_end_change_note_type(self):
self.search()
self.model.endReset()
self.mw.progress.finish()
self.mw.reset()
# Change model dialog
######################################################################
class ChangeModel(QDialog):
"""
Dialog that allows user to create field and template maps from one note model to another
"""
def __init__(self, collection, note_id_list, old_model=None, parent=None):
QDialog.__init__(self, parent)
self.collection = collection
self.nids = note_id_list
self.oldModel = old_model
if self.oldModel is None:
first_note = Note(collection, id=note_id_list[0])
self.oldModel = first_note.model()
self.form = aqt.forms.changemodel.Ui_Dialog()
self.form.setupUi(self)
self.setWindowModality(Qt.WindowModal)
self.setup()
self.pauseUpdate = False
self.modelChanged(self.collection.models.current())
aqt.utils.restoreGeom(self, "changeModel")
anki.hooks.addHook("reset", self.onReset)
anki.hooks.addHook("currentModelChanged", self.onReset)
def setup(self):
# maps
self.flayout = QHBoxLayout()
self.flayout.setContentsMargins(0, 0, 0, 0)
self.fwidg = None
self.form.fieldMap.setLayout(self.flayout)
self.tlayout = QHBoxLayout()
self.tlayout.setContentsMargins(0, 0, 0, 0)
self.twidg = None
self.form.templateMap.setLayout(self.tlayout)
if self.style().objectName() == "gtk+":
# gtk+ requires margins in inner layout
self.form.verticalLayout_2.setContentsMargins(0, 11, 0, 0)
self.form.verticalLayout_3.setContentsMargins(0, 11, 0, 0)
# model chooser
import aqt.modelchooser
self.form.oldModelLabel.setText(self.oldModel['name'])
self.modelChooser = aqt.modelchooser.ModelChooser(
aqt.mw, self.form.modelChooserWidget, label=False)
self.modelChooser.models.setFocus()
self.form.buttonBox.helpRequested.connect(self.onHelp)
def onReset(self):
self.modelChanged(self.collection.models.current())
def modelChanged(self, model):
self.targetModel = model
self.rebuildTemplateMap()
self.rebuildFieldMap()
def rebuildTemplateMap(self, key=None, attr=None):
if not key:
key = "t"
attr = "tmpls"
map_widget = getattr(self, key + "widg")
layout = getattr(self, key + "layout")
src = self.oldModel[attr]
dst = self.targetModel[attr]
if map_widget:
layout.removeWidget(map_widget)
map_widget.deleteLater()
setattr(self, key + "MapWidget", None)
map_widget = QWidget()
map_widget_layout = QGridLayout()
combos = []
targets = [entity['name'] for entity in dst] + [_("Nothing")]
indices = {}
for i, entity in enumerate(src):
map_widget_layout.addWidget(QLabel(_("Change %s to:") % entity['name']), i, 0)
combo_box = QComboBox()
combo_box.addItems(targets)
idx = min(i, len(targets) - 1)
combo_box.setCurrentIndex(idx)
indices[combo_box] = idx
combo_box.currentIndexChanged.connect(
lambda entry_id: self.onComboChanged(entry_id, combo_box, key))
combos.append(combo_box)
map_widget_layout.addWidget(combo_box, i, 1)
map_widget.setLayout(map_widget_layout)
layout.addWidget(map_widget)
setattr(self, key + "widg", map_widget)
setattr(self, key + "layout", layout)
setattr(self, key + "combos", combos)
setattr(self, key + "indices", indices)
def rebuildFieldMap(self):
return self.rebuildTemplateMap(key="f", attr="flds")
def onComboChanged(self, combo_box_index, combo_box, key):
indices = getattr(self, key + "indices")
if self.pauseUpdate:
indices[combo_box] = combo_box_index
return
combos = getattr(self, key + "combos")
if combo_box_index == combo_box.count() - 1:
# set to 'nothing'
return
# find another combo with same index
for c in combos:
if c == combo_box:
continue
if c.currentIndex() == combo_box_index:
self.pauseUpdate = True
c.setCurrentIndex(indices[combo_box])
self.pauseUpdate = False
break
indices[combo_box] = combo_box_index
def getTemplateMap(self, old=None, combos=None, new=None):
if not old:
old = self.oldModel['tmpls']
combos = self.tcombos
new = self.targetModel['tmpls']
model_map = {}
for i, f in enumerate(old):
idx = combos[i].currentIndex()
if idx == len(new):
# ignore
model_map[f['ord']] = None
else:
f2 = new[idx]
model_map[f['ord']] = f2['ord']
return model_map
def getFieldMap(self):
return self.getTemplateMap(
old=self.oldModel['flds'],
combos=self.fcombos,
new=self.targetModel['flds'])
def cleanup(self):
anki.hooks.remHook("reset", self.onReset)
anki.hooks.remHook("currentModelChanged", self.onReset)
self.modelChooser.cleanup()
aqt.utils.saveGeom(self, "changeModel")
def reject(self):
self.cleanup()
return QDialog.reject(self)
def accept(self):
# check maps
field_map = self.getFieldMap()
templates_map = self.getTemplateMap()
if any(True for template in list(templates_map.values()) if template is None):
if not aqt.utils.askUser(_(
"Any cards mapped to nothing will be deleted. "
"If a note has no remaining cards, it will be lost. "
"Are you sure you want to continue?")):
return
self.collection.models.change(self.oldModel, self.nids, self.targetModel, field_map, templates_map)
self.cleanup()
QDialog.accept(self)
@staticmethod
def onHelp():
aqt.utils.openHelp("browsermisc")
# Toolbar
######################################################################
class BrowserToolbar(Toolbar):
def __init__(self, mw, web, browser):
self.browser = browser
Toolbar.__init__(self, mw, web)
def draw(self):
self._loaded = False
self.web.onBridgeCmd = self._linkHandler
self.web.onLoadFinished = self.onLoaded
self.web.stdHtml(self.html(), self.css())
def onLoaded(self):
super().onLoaded()
self._loaded = True
self.update()
def update(self):
if not self._loaded:
return
for link, enabled in (
("mark", self.browser.isMarked()),
("pause", self.browser.isSuspended())):
if enabled:
self.web.eval("$('#%s').addClass('buttonOn')" % link)
else:
self.web.eval("$('#%s').removeClass('buttonOn')" % link)
def html(self):
def borderImg(link, icon, title, tooltip=None):
fmt = '''\
<a class=hitem title="%s" href=# onclick="pycmd('%s')"><img id=%s valign=bottom src="qrc:/icons/%s.png"> %s</a>'''
return fmt % (tooltip or title, link, link, icon, title)
right = "<div>"
right += borderImg("add", "add16", _("Add"),
shortcut(_("Add Note (Ctrl+E)")))
right += borderImg("info", "info", _("Info"),
shortcut(_("Card Info (Ctrl+Shift+I)")))
right += borderImg("mark", "star16", _("Mark"),
shortcut(_("Mark Note (Ctrl+K)")))
right += borderImg("pause", "pause16", _("Suspend"),
shortcut(_("Suspend Card (Ctrl+J)")))
right += borderImg("setDeck", "deck16", _("Change Deck"),
shortcut(_("Move To Deck (Ctrl+D)")))
right += borderImg("addtag", "addtag16", _("Add Tags"),
shortcut(_("Bulk Add Tags (Ctrl+Shift+T)")))
right += borderImg("deletetag", "deletetag16", _("Remove Tags"), shortcut(_(
"Bulk Remove Tags (Ctrl+Alt+T)")))
right += borderImg("delete", "delete16", _("Delete"))
right += "</div>"
return self._body % ("", right, "")
def css(self):
return self._css + """
#header { font-weight: normal; }
a { margin-right: 1em; }
.hitem { overflow: hidden; white-space: nowrap; }
.hitem img { padding: 1px; }
.buttonOn { border: 1px solid #aaa; padding: 0px !important; }
"""
# Link handling
######################################################################
def _linkHandler(self, l):
if l == "anki":
self.showMenu()
elif l == "add":
self.browser.mw.onAddCard()
elif l == "delete":
self.browser.deleteNotes()
elif l == "setDeck":
self.browser.setDeck()
# icons
elif l == "info":
self.browser.showCardInfo()
elif l == "mark":
self.browser.onMark()
elif l == "pause":
self.browser.onSuspend()
elif l == "addtag":
self.browser.addTags()
elif l == "deletetag":
self.browser.deleteTags()
# Favourites button
######################################################################
class FavouritesLineEdit(QLineEdit):
buttonClicked = pyqtSignal(bool)
def __init__(self, mw, browser, parent=None):
super(FavouritesLineEdit, self).__init__(parent)
self.mw = mw
self.browser = browser
# add conf if missing
if 'savedFilters' not in self.mw.col.conf:
self.mw.col.conf['savedFilters'] = {}
self.button = QToolButton(self)
self.button.setStyleSheet('border: 0px;')
self.button.setCursor(Qt.ArrowCursor)
self.button.clicked.connect(self.buttonClicked.emit)
self.setIcon(':/icons/emblem-favorite-off.png')
# flag to raise save or delete dialog on button click
self.doSave = True
# name of current saved filter (if query matches)
self.name = None
self.buttonClicked.connect(self.onClicked)
self.textChanged.connect(self.updateButton)
def resizeEvent(self, event):
buttonSize = self.button.sizeHint()
frameWidth = self.style().pixelMetric(QStyle.PM_DefaultFrameWidth)
self.button.move(self.rect().right() - frameWidth - buttonSize.width(),
(self.rect().bottom() - buttonSize.height() + 1) / 2)
self.setTextMargins(0, 0, buttonSize.width() * 1.5, 0)
super(FavouritesLineEdit, self).resizeEvent(event)
def setIcon(self, path):
self.button.setIcon(QIcon(path))
def setText(self, txt):
super(FavouritesLineEdit, self).setText(txt)
self.updateButton()
def updateButton(self, reset=True):
# If search text is a saved query, switch to the delete button.
# Otherwise show save button.
txt = str(self.text()).strip()
for key, value in list(self.mw.col.conf['savedFilters'].items()):
if txt == value:
self.doSave = False
self.name = key
self.setIcon(QIcon(":/icons/emblem-favorite.png"))
return
self.doSave = True
self.setIcon(QIcon(":/icons/emblem-favorite-off.png"))
def onClicked(self):
if self.doSave:
self.saveClicked()
else:
self.deleteClicked()
def saveClicked(self):
txt = str(self.text()).strip()
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.TextInput)
dlg.setLabelText(_("The current search terms will be added as a new "
"item in the sidebar.\n"
"Search name:"))
dlg.setWindowTitle(_("Save search"))
ok = dlg.exec_()
name = dlg.textValue()
if ok:
self.mw.col.conf['savedFilters'][name] = txt
self.mw.col.setMod()
self.updateButton()
self.browser.buildTree()
def deleteClicked(self):
msg = _('Remove "%s" from your saved searches?') % self.name
ok = QMessageBox.question(self, _('Remove search'),
msg, QMessageBox.Yes, QMessageBox.No)
if ok == QMessageBox.Yes:
self.mw.col.conf['savedFilters'].pop(self.name, None)
self.mw.col.setMod()
self.updateButton()
self.browser.buildTree()
| Stvad/anki | aqt/browser.py | Python | agpl-3.0 | 70,604 |
from pyqrllib.pyqrllib import bin2hstr
from qrl.core.State import State
from qrl.core.StateContainer import StateContainer
from qrl.core.VoteStats import VoteStats
from qrl.core.OptimizedAddressState import OptimizedAddressState
from qrl.core.MultiSigAddressState import MultiSigAddressState
from qrl.core.misc import logger
from qrl.core.txs.Transaction import Transaction
from qrl.crypto.misc import sha256
class MultiSigSpend(Transaction):
"""
MultiSigSpend for the transaction of QRL from a multi sig wallet to another wallet.
"""
def __init__(self, protobuf_transaction=None):
super(MultiSigSpend, self).__init__(protobuf_transaction)
@property
def multi_sig_address(self):
return self._data.multi_sig_spend.multi_sig_address
@property
def addrs_to(self):
return self._data.multi_sig_spend.addrs_to
@property
def total_amount(self):
total_amount = 0
for amount in self.amounts:
total_amount += amount
return total_amount
@property
def amounts(self):
return self._data.multi_sig_spend.amounts
@property
def expiry_block_number(self):
return self._data.multi_sig_spend.expiry_block_number
def get_data_hash(self):
tmp_tx_hash = (self.master_addr +
self.fee.to_bytes(8, byteorder='big', signed=False) +
self.multi_sig_address +
self.expiry_block_number.to_bytes(8, byteorder='big', signed=False))
for index in range(0, len(self.addrs_to)):
tmp_tx_hash = (tmp_tx_hash +
self.addrs_to[index] +
self.amounts[index].to_bytes(8, byteorder='big', signed=False))
return sha256(tmp_tx_hash)
@staticmethod
def create(multi_sig_address: bytes,
addrs_to: list,
amounts: list,
expiry_block_number: int,
fee: int,
xmss_pk,
master_addr: bytes = None):
multi_sig_spend = MultiSigSpend()
if master_addr:
multi_sig_spend._data.master_addr = master_addr
multi_sig_spend._data.public_key = bytes(xmss_pk)
multi_sig_spend._data.multi_sig_spend.multi_sig_address = multi_sig_address
for addr_to in addrs_to:
multi_sig_spend._data.multi_sig_spend.addrs_to.append(addr_to)
for amount in amounts:
multi_sig_spend._data.multi_sig_spend.amounts.append(amount)
multi_sig_spend._data.multi_sig_spend.expiry_block_number = expiry_block_number
multi_sig_spend._data.fee = int(fee)
multi_sig_spend.validate_or_raise(verify_signature=False)
return multi_sig_spend
def _validate_custom(self):
for amount in self.amounts:
if amount == 0:
logger.warning('Amount cannot be 0 - %s', self.amounts)
logger.warning('Invalid TransferTransaction')
return False
if self.fee < 0:
logger.warning('MultiSigSpend [%s] Invalid Fee = %d', bin2hstr(self.txhash), self.fee)
return False
if len(self.addrs_to) == 0:
logger.warning('[MultiSigSpend] No addrs_to found')
return False
if len(self.addrs_to) != len(self.amounts):
logger.warning('[MultiSigSpend] Mismatch number of addresses to & amounts')
logger.warning('>> Length of addrs_to %s', len(self.addrs_to))
logger.warning('>> Length of amounts %s', len(self.amounts))
return False
if not MultiSigAddressState.address_is_valid(self.multi_sig_address):
logger.warning('[MultiSigSpend] Invalid MultiSig Address')
logger.warning('Multi Sig Address %s', self.multi_sig_address)
return False
if not OptimizedAddressState.address_is_valid(self.addr_from):
logger.warning('[MultiSigSpend] Invalid address addr_from: %s', bin2hstr(self.addr_from))
return False
for addr_to in self.addrs_to:
if not (OptimizedAddressState.address_is_valid(addr_to) or MultiSigAddressState.address_is_valid(addr_to)):
logger.warning('[MultiSigSpend] Invalid address addr_to: %s', bin2hstr(addr_to))
return False
return True
def _validate_extended(self, state_container: StateContainer):
if state_container.block_number < state_container.current_dev_config.hard_fork_heights[0]:
logger.warning("[MultiSigSpend] Hard Fork Feature not yet activated")
return False
if len(self.addrs_to) > state_container.current_dev_config.transaction_multi_output_limit:
logger.warning('[MultiSigSpend] Number of addresses exceeds max limit')
logger.warning('Number of addresses %s', len(self.addrs_to))
logger.warning('Number of amounts %s', len(self.amounts))
return False
addr_from_state = state_container.addresses_state[self.addr_from]
if self.multi_sig_address not in state_container.addresses_state:
logger.error("[MultiSigSpend] Multi Sig address state not found in state_container %s",
self.multi_sig_address)
return False
multi_sig_address_state = state_container.addresses_state[self.multi_sig_address]
block_number = state_container.block_number
if addr_from_state.address != self.addr_from:
logger.error("[MultiSigSpend] Unexpected addr_from_state")
logger.error("Expecting State for address %s, but got state for address %s",
bin2hstr(self.addr_from),
bin2hstr(addr_from_state.address))
return False
if multi_sig_address_state.address != self.multi_sig_address:
logger.error("[MultiSigSpend] Unexpected multi sig address state")
logger.error("Expecting State for address %s, but got state for address %s",
bin2hstr(self.multi_sig_address),
bin2hstr(multi_sig_address_state.address))
return False
tx_balance = addr_from_state.balance
total_amount = self.total_amount
if tx_balance < self.fee:
logger.info('[MultiSigSpend] State validation failed for %s because: Insufficient funds',
bin2hstr(self.txhash))
logger.info('address: %s, balance: %s, fee: %s', bin2hstr(self.addr_from), tx_balance, self.fee)
return False
if multi_sig_address_state.balance < total_amount:
logger.info('[MultiSigSpend] State validation failed for %s because: Insufficient funds',
bin2hstr(self.txhash))
logger.info('address: %s, balance: %s, fee: %s', bin2hstr(self.multi_sig_address), tx_balance, self.fee)
return False
# Multi Sig Spend considered to be expired after block having block number equals to
# self.expiry_block_number gets added into the main chain
if self.expiry_block_number <= block_number:
logger.info('[MultiSigSpend] State validation failed for %s due to invalid expiry_block_number',
bin2hstr(self.txhash))
logger.info('Chain Height: %s, Expiry Block Number: %s',
block_number,
self.expiry_block_number)
return False
if self.addr_from not in multi_sig_address_state.signatories:
logger.info('[MultiSigSpend] Address is not in the signatories list: %s',
bin2hstr(self.addr_from))
return False
return True
def set_affected_address(self, addresses_set: set):
super().set_affected_address(addresses_set)
addresses_set.add(self.multi_sig_address)
for addrs_to in self.addrs_to:
addresses_set.add(addrs_to)
def apply(self,
state: State,
state_container: StateContainer) -> bool:
address_state = state_container.addresses_state[self.addr_from]
address_state.update_balance(state_container, self.fee, subtract=True)
state_container.paginated_tx_hash.insert(address_state, self.txhash)
# TODO: Following line might not be needed
state_container.multi_sig_spend_txs[self.txhash] = self
multi_sig_address_state = state_container.addresses_state[self.multi_sig_address]
for signatory_address in multi_sig_address_state.signatories:
signatory_address_state = state_container.addresses_state[signatory_address]
state_container.paginated_multi_sig_spend.insert(signatory_address_state, self.txhash)
state_container.paginated_tx_hash.insert(multi_sig_address_state, self.txhash)
state_container.paginated_multi_sig_spend.insert(multi_sig_address_state, self.txhash)
vote_stats = VoteStats.create(self.multi_sig_address,
self.txhash,
state_container.addresses_state[self.multi_sig_address].signatories,
self.expiry_block_number)
state_container.votes_stats[self.txhash] = vote_stats
return self._apply_state_changes_for_PK(state_container)
def revert(self,
state: State,
state_container: StateContainer) -> bool:
address_state = state_container.addresses_state[self.addr_from]
address_state.update_balance(state_container, self.fee)
state_container.paginated_tx_hash.remove(address_state, self.txhash)
multi_sig_address_state = state_container.addresses_state[self.multi_sig_address]
for signatory_address in multi_sig_address_state.signatories:
signatory_address_state = state_container.addresses_state[signatory_address]
state_container.paginated_multi_sig_spend.remove(signatory_address_state, self.txhash)
state_container.paginated_tx_hash.remove(multi_sig_address_state, self.txhash)
state_container.paginated_multi_sig_spend.remove(multi_sig_address_state, self.txhash)
VoteStats.delete_state(state, self.txhash, state_container.batch)
if self.txhash in state_container.votes_stats:
del state_container.votes_stats[self.txhash]
return self._revert_state_changes_for_PK(state_container)
| theQRL/QRL | src/qrl/core/txs/multisig/MultiSigSpend.py | Python | mit | 10,504 |
"""
Provide a mock switch platform.
Call init before using it in your tests to ensure clean test data.
"""
from homeassistant.const import STATE_ON, STATE_OFF
from tests.common import MockToggleDevice
DEVICES = []
def init(empty=False):
"""Initialize the platform with devices."""
global DEVICES
DEVICES = [] if empty else [
MockToggleDevice('Ceiling', STATE_ON),
MockToggleDevice('Ceiling', STATE_OFF),
MockToggleDevice(None, STATE_OFF)
]
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Return mock devices."""
add_devices_callback(DEVICES)
| MungoRae/home-assistant | tests/testing_config/custom_components/light/test.py | Python | apache-2.0 | 630 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsAggregateCalculator.
From build dir, run: ctest -R PyQgsAggregateCalculator -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '16/05/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA
from qgis.core import (QgsAggregateCalculator,
QgsVectorLayer,
QgsFeature,
QgsInterval,
QgsExpressionContext,
QgsExpressionContextScope,
QgsGeometry,
QgsFeatureRequest,
NULL
)
from qgis.PyQt.QtCore import QDateTime, QDate, QTime
from qgis.testing import unittest, start_app
from utilities import compareWkt
start_app()
class TestQgsAggregateCalculator(unittest.TestCase):
def testLayer(self):
""" Test setting/retrieving layer """
a = QgsAggregateCalculator(None)
self.assertEqual(a.layer(), None)
# should not crash
val, ok = a.calculate(QgsAggregateCalculator.Sum, 'field')
self.assertFalse(ok)
layer = QgsVectorLayer("Point?field=fldint:integer&field=flddbl:double",
"layer", "memory")
a = QgsAggregateCalculator(layer)
self.assertEqual(a.layer(), layer)
def testParameters(self):
""" Test setting parameters"""
a = QgsAggregateCalculator(None)
params = QgsAggregateCalculator.AggregateParameters()
params.filter = 'string filter'
params.delimiter = 'delim'
a.setParameters(params)
self.assertEqual(a.filter(), 'string filter')
self.assertEqual(a.delimiter(), 'delim')
def testGeometry(self):
""" Test calculation of aggregates on geometry expressions """
layer = QgsVectorLayer("Point?",
"layer", "memory")
pr = layer.dataProvider()
# must be same length:
geometry_values = [QgsGeometry.fromWkt("Point ( 0 0 )"), QgsGeometry.fromWkt("Point ( 1 1 )"), QgsGeometry.fromWkt("Point ( 2 2 )")]
features = []
for i in range(len(geometry_values)):
f = QgsFeature()
f.setGeometry(geometry_values[i])
features.append(f)
self.assertTrue(pr.addFeatures(features))
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.GeometryCollect, '$geometry')
self.assertTrue(ok)
expwkt = "MultiPoint ((0 0), (1 1), (2 2))"
wkt = val.asWkt()
self.assertTrue(compareWkt(expwkt, wkt), "Expected:\n%s\nGot:\n%s\n" % (expwkt, wkt))
def testNumeric(self):
""" Test calculation of aggregates on numeric fields"""
layer = QgsVectorLayer("Point?field=fldint:integer&field=flddbl:double",
"layer", "memory")
pr = layer.dataProvider()
# must be same length:
int_values = [4, 2, 3, 2, 5, None, 8]
dbl_values = [5.5, 3.5, 7.5, 5, 9, None, 7]
self.assertEqual(len(int_values), len(dbl_values))
features = []
for i in range(len(int_values)):
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([int_values[i], dbl_values[i]])
features.append(f)
assert pr.addFeatures(features)
tests = [[QgsAggregateCalculator.Count, 'fldint', 6],
[QgsAggregateCalculator.Count, 'flddbl', 6],
[QgsAggregateCalculator.Sum, 'fldint', 24],
[QgsAggregateCalculator.Sum, 'flddbl', 37.5],
[QgsAggregateCalculator.Mean, 'fldint', 4],
[QgsAggregateCalculator.Mean, 'flddbl', 6.25],
[QgsAggregateCalculator.StDev, 'fldint', 2.0816],
[QgsAggregateCalculator.StDev, 'flddbl', 1.7969],
[QgsAggregateCalculator.StDevSample, 'fldint', 2.2803],
[QgsAggregateCalculator.StDevSample, 'flddbl', 1.9685],
[QgsAggregateCalculator.Min, 'fldint', 2],
[QgsAggregateCalculator.Min, 'flddbl', 3.5],
[QgsAggregateCalculator.Max, 'fldint', 8],
[QgsAggregateCalculator.Max, 'flddbl', 9],
[QgsAggregateCalculator.Range, 'fldint', 6],
[QgsAggregateCalculator.Range, 'flddbl', 5.5],
[QgsAggregateCalculator.Median, 'fldint', 3.5],
[QgsAggregateCalculator.Median, 'flddbl', 6.25],
[QgsAggregateCalculator.CountDistinct, 'fldint', 5],
[QgsAggregateCalculator.CountDistinct, 'flddbl', 6],
[QgsAggregateCalculator.CountMissing, 'fldint', 1],
[QgsAggregateCalculator.CountMissing, 'flddbl', 1],
[QgsAggregateCalculator.FirstQuartile, 'fldint', 2],
[QgsAggregateCalculator.FirstQuartile, 'flddbl', 5.0],
[QgsAggregateCalculator.ThirdQuartile, 'fldint', 5.0],
[QgsAggregateCalculator.ThirdQuartile, 'flddbl', 7.5],
[QgsAggregateCalculator.InterQuartileRange, 'fldint', 3.0],
[QgsAggregateCalculator.InterQuartileRange, 'flddbl', 2.5],
[QgsAggregateCalculator.ArrayAggregate, 'fldint', int_values],
[QgsAggregateCalculator.ArrayAggregate, 'flddbl', dbl_values],
]
agg = QgsAggregateCalculator(layer)
for t in tests:
val, ok = agg.calculate(t[0], t[1])
self.assertTrue(ok)
if isinstance(t[2], (int, list)):
self.assertEqual(val, t[2])
else:
self.assertAlmostEqual(val, t[2], 3)
# bad tests - the following stats should not be calculatable for numeric fields
for t in [QgsAggregateCalculator.StringMinimumLength,
QgsAggregateCalculator.StringMaximumLength]:
val, ok = agg.calculate(t, 'fldint')
self.assertFalse(ok)
val, ok = agg.calculate(t, 'flddbl')
self.assertFalse(ok)
# with order by
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.ArrayAggregate, 'fldint')
self.assertEqual(val, [4, 2, 3, 2, 5, NULL, 8])
params = QgsAggregateCalculator.AggregateParameters()
params.orderBy = QgsFeatureRequest.OrderBy([QgsFeatureRequest.OrderByClause('fldint')])
agg.setParameters(params)
val, ok = agg.calculate(QgsAggregateCalculator.ArrayAggregate, 'fldint')
self.assertEqual(val, [2, 2, 3, 4, 5, 8, NULL])
params.orderBy = QgsFeatureRequest.OrderBy([QgsFeatureRequest.OrderByClause('flddbl')])
agg.setParameters(params)
val, ok = agg.calculate(QgsAggregateCalculator.ArrayAggregate, 'fldint')
self.assertEqual(val, [2, 2, 4, 8, 3, 5, NULL])
def testString(self):
""" Test calculation of aggregates on string fields"""
layer = QgsVectorLayer("Point?field=fldstring:string", "layer", "memory")
pr = layer.dataProvider()
values = ['cc', 'aaaa', 'bbbbbbbb', 'aaaa', 'eeee', '', 'eeee', '', 'dddd']
features = []
for v in values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([v])
features.append(f)
assert pr.addFeatures(features)
tests = [[QgsAggregateCalculator.Count, 'fldstring', 9],
[QgsAggregateCalculator.CountDistinct, 'fldstring', 6],
[QgsAggregateCalculator.CountMissing, 'fldstring', 2],
[QgsAggregateCalculator.Min, 'fldstring', 'aaaa'],
[QgsAggregateCalculator.Max, 'fldstring', 'eeee'],
[QgsAggregateCalculator.StringMinimumLength, 'fldstring', 0],
[QgsAggregateCalculator.StringMaximumLength, 'fldstring', 8],
[QgsAggregateCalculator.ArrayAggregate, 'fldstring', values],
]
agg = QgsAggregateCalculator(layer)
for t in tests:
val, ok = agg.calculate(t[0], t[1])
self.assertTrue(ok)
self.assertEqual(val, t[2])
# test string concatenation
agg.setDelimiter(',')
self.assertEqual(agg.delimiter(), ',')
val, ok = agg.calculate(QgsAggregateCalculator.StringConcatenate, 'fldstring')
self.assertTrue(ok)
self.assertEqual(val, 'cc,aaaa,bbbbbbbb,aaaa,eeee,,eeee,,dddd')
val, ok = agg.calculate(QgsAggregateCalculator.StringConcatenateUnique, 'fldstring')
self.assertTrue(ok)
self.assertEqual(val, 'cc,aaaa,bbbbbbbb,eeee,,dddd')
# bad tests - the following stats should not be calculatable for string fields
for t in [QgsAggregateCalculator.Sum,
QgsAggregateCalculator.Mean,
QgsAggregateCalculator.Median,
QgsAggregateCalculator.StDev,
QgsAggregateCalculator.StDevSample,
QgsAggregateCalculator.Range,
QgsAggregateCalculator.FirstQuartile,
QgsAggregateCalculator.ThirdQuartile,
QgsAggregateCalculator.InterQuartileRange
]:
val, ok = agg.calculate(t, 'fldstring')
self.assertFalse(ok)
# with order by
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.ArrayAggregate, 'fldstring')
self.assertEqual(val, ['cc', 'aaaa', 'bbbbbbbb', 'aaaa', 'eeee', '', 'eeee', '', 'dddd'])
params = QgsAggregateCalculator.AggregateParameters()
params.orderBy = QgsFeatureRequest.OrderBy([QgsFeatureRequest.OrderByClause('fldstring')])
agg.setParameters(params)
val, ok = agg.calculate(QgsAggregateCalculator.ArrayAggregate, 'fldstring')
self.assertEqual(val, ['', '', 'aaaa', 'aaaa', 'bbbbbbbb', 'cc', 'dddd', 'eeee', 'eeee'])
val, ok = agg.calculate(QgsAggregateCalculator.StringConcatenate, 'fldstring')
self.assertEqual(val, 'aaaaaaaabbbbbbbbccddddeeeeeeee')
val, ok = agg.calculate(QgsAggregateCalculator.Minority, 'fldstring')
self.assertEqual(val, 'bbbbbbbb')
val, ok = agg.calculate(QgsAggregateCalculator.Majority, 'fldstring')
self.assertEqual(val, '')
def testDateTime(self):
""" Test calculation of aggregates on date/datetime fields"""
layer = QgsVectorLayer("Point?field=flddate:date&field=flddatetime:datetime", "layer", "memory")
pr = layer.dataProvider()
# must be same length:
datetime_values = [QDateTime(QDate(2015, 3, 4), QTime(11, 10, 54)),
QDateTime(QDate(2011, 1, 5), QTime(15, 3, 1)),
QDateTime(QDate(2015, 3, 4), QTime(11, 10, 54)),
QDateTime(QDate(2015, 3, 4), QTime(11, 10, 54)),
QDateTime(QDate(2019, 12, 28), QTime(23, 10, 1)),
QDateTime(),
QDateTime(QDate(1998, 1, 2), QTime(1, 10, 54)),
QDateTime(),
QDateTime(QDate(2011, 1, 5), QTime(11, 10, 54))]
date_values = [QDate(2015, 3, 4),
QDate(2015, 3, 4),
QDate(2019, 12, 28),
QDate(),
QDate(1998, 1, 2),
QDate(),
QDate(2011, 1, 5),
QDate(2011, 1, 5),
QDate(2011, 1, 5)]
self.assertEqual(len(datetime_values), len(date_values))
features = []
for i in range(len(datetime_values)):
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([date_values[i], datetime_values[i]])
features.append(f)
assert pr.addFeatures(features)
tests = [[QgsAggregateCalculator.Count, 'flddatetime', 9],
[QgsAggregateCalculator.Count, 'flddate', 9],
[QgsAggregateCalculator.CountDistinct, 'flddatetime', 6],
[QgsAggregateCalculator.CountDistinct, 'flddate', 5],
[QgsAggregateCalculator.CountMissing, 'flddatetime', 2],
[QgsAggregateCalculator.CountMissing, 'flddate', 2],
[QgsAggregateCalculator.Min, 'flddatetime', QDateTime(QDate(1998, 1, 2), QTime(1, 10, 54))],
[QgsAggregateCalculator.Min, 'flddate', QDateTime(QDate(1998, 1, 2), QTime(0, 0, 0))],
[QgsAggregateCalculator.Max, 'flddatetime', QDateTime(QDate(2019, 12, 28), QTime(23, 10, 1))],
[QgsAggregateCalculator.Max, 'flddate', QDateTime(QDate(2019, 12, 28), QTime(0, 0, 0))],
[QgsAggregateCalculator.Range, 'flddatetime', QgsInterval(693871147)],
[QgsAggregateCalculator.Range, 'flddate', QgsInterval(693792000)],
[QgsAggregateCalculator.ArrayAggregate, 'flddatetime', [None if v.isNull() else v for v in datetime_values]],
[QgsAggregateCalculator.ArrayAggregate, 'flddate', [None if v.isNull() else v for v in date_values]],
]
agg = QgsAggregateCalculator(layer)
for t in tests:
val, ok = agg.calculate(t[0], t[1])
self.assertTrue(ok)
self.assertEqual(val, t[2])
# bad tests - the following stats should not be calculatable for string fields
for t in [QgsAggregateCalculator.Sum,
QgsAggregateCalculator.Mean,
QgsAggregateCalculator.Median,
QgsAggregateCalculator.StDev,
QgsAggregateCalculator.StDevSample,
QgsAggregateCalculator.Minority,
QgsAggregateCalculator.Majority,
QgsAggregateCalculator.FirstQuartile,
QgsAggregateCalculator.ThirdQuartile,
QgsAggregateCalculator.InterQuartileRange,
QgsAggregateCalculator.StringMinimumLength,
QgsAggregateCalculator.StringMaximumLength,
]:
val, ok = agg.calculate(t, 'flddatetime')
self.assertFalse(ok)
def testFilter(self):
""" test calculating aggregate with filter """
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for v in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([v])
features.append(f)
assert pr.addFeatures(features)
agg = QgsAggregateCalculator(layer)
filter_string = "fldint > 2"
agg.setFilter(filter_string)
self.assertEqual(agg.filter(), filter_string)
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint')
self.assertTrue(ok)
self.assertEqual(val, 20)
# remove filter and retest
agg.setFilter(None)
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint')
self.assertTrue(ok)
self.assertEqual(val, 24)
def testExpression(self):
""" test aggregate calculation using an expression """
# numeric
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for v in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([v])
features.append(f)
assert pr.addFeatures(features)
# int
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, 48)
# double
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint * 1.5')
self.assertTrue(ok)
self.assertEqual(val, 36)
# datetime
val, ok = agg.calculate(QgsAggregateCalculator.Max, "to_date('2012-05-04') + to_interval( fldint || ' day' )")
self.assertTrue(ok)
self.assertEqual(val, QDateTime(QDate(2012, 5, 12), QTime(0, 0, 0)))
# date
val, ok = agg.calculate(QgsAggregateCalculator.Min, "to_date(to_date('2012-05-04') + to_interval( fldint || ' day' ))")
self.assertTrue(ok)
self.assertEqual(val, QDateTime(QDate(2012, 5, 6), QTime(0, 0, 0)))
# string
val, ok = agg.calculate(QgsAggregateCalculator.Max, "fldint || ' oranges'")
self.assertTrue(ok)
self.assertEqual(val, '8 oranges')
# geometry
val, ok = agg.calculate(QgsAggregateCalculator.GeometryCollect, "make_point( coalesce(fldint,0), 2 )")
self.assertTrue(ok)
self.assertTrue(val.asWkt(), 'MultiPoint((4 2, 2 2, 3 2, 2 2,5 2, 0 2,8 2))')
# try a bad expression
val, ok = agg.calculate(QgsAggregateCalculator.Max, "not_a_field || ' oranges'")
self.assertFalse(ok)
val, ok = agg.calculate(QgsAggregateCalculator.Max, "5+")
self.assertFalse(ok)
# test expression context
# check default context first
# should have layer variables:
val, ok = agg.calculate(QgsAggregateCalculator.Min, "@layer_name")
self.assertTrue(ok)
self.assertEqual(val, 'layer')
# but not custom variables:
val, ok = agg.calculate(QgsAggregateCalculator.Min, "@my_var")
self.assertTrue(ok)
self.assertEqual(val, NULL)
# test with manual expression context
scope = QgsExpressionContextScope()
scope.setVariable('my_var', 5)
context = QgsExpressionContext()
context.appendScope(scope)
val, ok = agg.calculate(QgsAggregateCalculator.Min, "@my_var", context)
self.assertTrue(ok)
self.assertEqual(val, 5)
# test with subset
agg = QgsAggregateCalculator(layer) # reset to remove expression filter
agg.setFidsFilter([1, 2])
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint')
self.assertTrue(ok)
self.assertEqual(val, 6.0)
# test with empty subset
agg.setFidsFilter(list())
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint')
self.assertTrue(ok)
self.assertEqual(val, 0.0)
def testExpressionNullValuesAtStart(self):
""" test aggregate calculation using an expression which returns null values at first """
# numeric
layer = QgsVectorLayer("Point?field=fldstr:string", "layer", "memory")
pr = layer.dataProvider()
values = [None, None, None, None, None, None, None, None, None, None, '2', '3', '5']
features = []
for v in values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([v])
features.append(f)
assert pr.addFeatures(features)
# number aggregation
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'to_int(fldstr)')
self.assertTrue(ok)
self.assertEqual(val, 10)
# string aggregation
agg.setDelimiter(',')
val, ok = agg.calculate(QgsAggregateCalculator.StringConcatenate, 'fldstr || \'suffix\'')
self.assertTrue(ok)
self.assertEqual(val, ',,,,,,,,,,2suffix,3suffix,5suffix')
def testExpressionNoMatch(self):
""" test aggregate calculation using an expression with no features """
# no features
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
# sum
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.Sum, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, None)
# count
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.Count, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, 0)
# count distinct
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.CountDistinct, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, 0)
# count missing
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.CountMissing, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, 0)
# min
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.Min, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, None)
# max
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.Max, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, None)
# array_agg
agg = QgsAggregateCalculator(layer)
val, ok = agg.calculate(QgsAggregateCalculator.ArrayAggregate, 'fldint * 2')
self.assertTrue(ok)
self.assertEqual(val, [])
def testStringToAggregate(self):
""" test converting strings to aggregate types """
tests = [[QgsAggregateCalculator.Count, ' cOUnT '],
[QgsAggregateCalculator.CountDistinct, ' count_distinct '],
[QgsAggregateCalculator.CountMissing, 'COUNT_MISSING'],
[QgsAggregateCalculator.Min, ' MiN'],
[QgsAggregateCalculator.Max, 'mAX'],
[QgsAggregateCalculator.Sum, 'sum'],
[QgsAggregateCalculator.Mean, 'MEAn '],
[QgsAggregateCalculator.Median, 'median'],
[QgsAggregateCalculator.StDev, 'stdev'],
[QgsAggregateCalculator.StDevSample, 'stdevsample'],
[QgsAggregateCalculator.Range, 'range'],
[QgsAggregateCalculator.Minority, 'minority'],
[QgsAggregateCalculator.Majority, 'majority'],
[QgsAggregateCalculator.FirstQuartile, 'q1'],
[QgsAggregateCalculator.ThirdQuartile, 'q3'],
[QgsAggregateCalculator.InterQuartileRange, 'iqr'],
[QgsAggregateCalculator.StringMinimumLength, 'min_length'],
[QgsAggregateCalculator.StringMaximumLength, 'max_length'],
[QgsAggregateCalculator.StringConcatenate, 'concatenate'],
[QgsAggregateCalculator.StringConcatenateUnique, 'concatenate_unique'],
[QgsAggregateCalculator.GeometryCollect, 'collect']]
for t in tests:
agg, ok = QgsAggregateCalculator.stringToAggregate(t[1])
self.assertTrue(ok)
self.assertEqual(agg, t[0])
# test some bad values
agg, ok = QgsAggregateCalculator.stringToAggregate('')
self.assertFalse(ok)
agg, ok = QgsAggregateCalculator.stringToAggregate('bad')
self.assertFalse(ok)
if __name__ == "__main__":
unittest.main()
| PeterPetrik/QGIS | tests/src/python/test_qgsaggregatecalculator.py | Python | gpl-2.0 | 23,395 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
import os
import datetime
from path import Path
from reportlab.lib import colors
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.pagesizes import A4
from reportlab.platypus import (Paragraph, Table, TableStyle, Image,
SimpleDocTemplate)
from ramed.app_logging import logger
from ramed.tools.ramed_instance import RamedInstance
from ramed.tools import create_shortcut
BLANK = "néant"
def gen_pdf_export(export_folder, instance):
story = []
styles = getSampleStyleSheet()
b_style = styles["BodyText"]
h3 = styles["Heading3"]
h4 = styles["Heading4"]
output_folder = os.path.join(export_folder, instance.folder_name)
Path(output_folder).makedirs_p()
fname = "{name}.pdf".format(name=instance.name)
fpath = os.path.join(output_folder, fname)
# writting data
def format_location(parts):
return " / ".join([part for part in parts if part])
def concat(parts, sep=" / "):
return sep.join([part for part in parts if part])
def get_lieu_naissance(data, key, village=False):
region = data.get('{}region'.format(key))
cercle = data.get('{}cercle'.format(key))
commune = data.get('{}commune'.format(key))
lieu_naissance = format_location([commune, cercle, region])
return region, cercle, commune, lieu_naissance
def get_lieu(data, key):
region, cercle, commune, _ = get_lieu_naissance(data, key)
village = data.get('{}village'.format(key))
lieu = format_location([village, commune, cercle, region])
return region, cercle, commune, village, lieu
def get_other(data, key):
profession = data.get(key)
profession_other = data.get('{}_other'.format(key))
return profession_other if profession == 'other' else profession
def get_int(data, key, default=0):
try:
return int(data.get(key, default))
except:
return default
def get_date(data, key):
try:
return datetime.date(*[int(x) for x in data.get(key).split('-')])
except:
return None
def get_dob(data, key, female=False):
type_naissance = data.get('{}type-naissance'.format(key), 'ne-vers')
annee_naissance = get_int(data, '{}annee-naissance'.format(key), None)
ddn = get_date(data, '{}ddn'.format(key))
human = "Né{f} ".format(f="e" if female else "")
if type_naissance == 'ddn':
human += "le {}".format(ddn.strftime("%d-%m-%Y"))
else:
human += "vers {}".format(annee_naissance)
return type_naissance, annee_naissance, ddn, human
def get_bool(data, key, default='non'):
text = data.get(key, default)
return text == 'oui', text
def get_nom(data, p='', s=''):
nom = RamedInstance.clean_lastname(
data.get('{p}nom{s}'.format(p=p, s=s)))
prenoms = RamedInstance.clean_firstnames(data.get('{p}prenoms{s}'
.format(p=p, s=s)))
name = RamedInstance.clean_name(nom, prenoms)
return nom, prenoms, name
def draw_String_title(text):
return """<para align=center spaceb=5><b><font size=11>{}</font>
</b></para>""".format(text)
def draw_String(label, text):
# if len(text) == 0:
# text = BLANK
return """<para align=left spaceb=5><font size=9><u>{label}</u></font>
: {text}</para>""".format(label=label, text=text)
# lieu (pas sur papier)
lieu_region, lieu_cercle, lieu_commune, lieu_village, lieu = get_lieu(
instance, 'lieu_')
numero_enquete = instance.get('numero') or ""
objet_enquete = instance.get('objet') or instance.get('objet_other')
identifiant_enqueteur = instance.get('enqueteur') or BLANK
demandeur = instance.get('demandeur') or BLANK
# enquêté
nom, prenoms, name = get_nom(instance)
sexe = instance.get('sexe') or 'masculin'
is_female = sexe == 'feminin'
type_naissance, annee_naissance, ddn, naissance = get_dob(
instance, '', is_female)
region_naissance, cercle_naissance, commune_naissance, lieu_naissance = get_lieu_naissance(
instance, '')
# enquêté / instance
nom_pere, prenoms_pere, name_pere = get_nom(instance, s='-pere')
nom_mere, prenoms_mere, name_mere = get_nom(instance, s='-mere')
situation_matrioniale = instance.get('situation-matrimoniale', BLANK)
profession = get_other(instance, 'profession')
adresse = instance.get('adresse') or ""
nina_text = instance.get('nina_text') or ""
telephones = [str(tel.get('numero'))
for tel in instance.get('telephones', [])]
nb_epouses = get_int(instance, 'nb_epouses', 0)
# enfants
logger.info("enfants")
nb_enfants = get_int(instance, 'nb_enfants')
nb_enfants_handicapes = get_int(instance, 'nb_enfants_handicapes')
nb_enfants_acharge = get_int(instance, 'nb_enfants_acharge')
# ressources
salaire = get_int(instance, 'salaire')
pension = get_int(instance, 'pension')
allocations = get_int(instance, 'allocations')
has_autres_revenus = get_bool(instance, 'autres-sources-revenu')
autres_revenus = [
(revenu.get('source-revenu'), get_int(revenu, 'montant-revenu'))
for revenu in instance.get('autres_revenus', [])]
total_autres_revenus = get_int(instance, 'total_autres_revenus')
# charges
loyer = get_int(instance, 'loyer')
impot = get_int(instance, 'impot')
dettes = get_int(instance, 'dettes')
aliments = get_int(instance, 'aliments')
sante = get_int(instance, 'sante')
autres_charges = get_int(instance, 'autres_charges')
# habitat
type_habitat = get_other(instance, 'type')
materiau_habitat = get_other(instance, 'materiau')
# antecedents
antecedents_personnels = instance.get('personnels')
antecedents_personnels_details = instance.get(
'personnels-details') or BLANK
antecedents_familiaux = instance.get('familiaux')
antecedents_familiaux_details = instance.get('familiaux-details') or BLANK
antecedents_sociaux = instance.get('sociaux')
antecedents_sociaux_details = instance.get('sociaux-details') or BLANK
situation_actuelle = instance.get('situation-actuelle') or BLANK
diagnostic = instance.get('diagnostic') or BLANK
diagnostic_details = instance.get('diagnostic-details') or BLANK
recommande_assistance = get_bool(instance, 'observation') or BLANK
doc = SimpleDocTemplate(fpath, pagesize=A4, fontsize=3)
logger.info("Headers")
headers = [["MINISTÈRE DE LA SOLIDARITÉ", "", " REPUBLIQUE DU MALI"],
["DE L’ACTION HUMANITAIRE", "", "UN PEUPLE UN BUT UNE FOI"],
["ET DE LA RECONSTRUCTION DU NORD", "", ""],
["AGENCE NATIONALE D’ASSISTANCE MEDICALE (ANAM)", "", ""]]
# headers_t = Table(headers, colWidths=(160))
headers_t = Table(headers, colWidths=150, rowHeights=11)
story.append(headers_t)
headers_t.setStyle(TableStyle([('SPAN', (1, 30), (1, 13)),
('ALIGN', (0, 0), (-1, -1), 'LEFT'), ]))
story.append(Paragraph(draw_String_title("CONFIDENTIEL"), styles["Title"]))
numero_enquete_t = Table([["FICHE D’ENQUETE SOCIALE N°.............../{year}"
.format(year=datetime.datetime.now().year), ]],)
numero_enquete_t.setStyle(TableStyle(
[('BOX', (0, 0), (-1, -1), 0.25, colors.black), ]))
story.append(numero_enquete_t)
story.append(Paragraph(draw_String("Identifiant enquêteur", numero_enquete),
b_style))
story.append(Paragraph(draw_String("Objet de l’enquête", objet_enquete),
b_style))
story.append(Paragraph(draw_String("Enquête demandée par", demandeur),
b_style))
story.append(Paragraph("Enquêté", h3))
logger.info("Enquêté")
story.append(Paragraph(draw_String("Concernant", concat(
[name, sexe, situation_matrioniale])), b_style))
story.append(Paragraph(draw_String(
naissance, "à {}".format(lieu_naissance)), b_style))
logger.info("Parent")
story.append(Paragraph(draw_String("Père", name_pere), b_style))
story.append(Paragraph(draw_String("Mère", name_mere), b_style))
story.append(Paragraph(draw_String("Profession", profession), b_style))
story.append(Paragraph(draw_String("Adresse", adresse), b_style))
logger.info("NINA CARD")
story.append(Paragraph(draw_String("N° NINA", nina_text), b_style))
story.append(Paragraph(draw_String(
"Téléphones", concat(telephones)), b_style))
story.append(Paragraph("COMPOSITION DE LA FAMILLE", h3))
story.append(Paragraph("Situation des Epouses", h4))
epouses = instance.get('epouses', [])
logger.info("Epouses")
if epouses == []:
story.append(Paragraph(BLANK, b_style))
for nb, epouse in enumerate(epouses):
nom_epouse, prenoms_epouse, name_epouse = get_nom(epouse, p='e_')
nom_pere_epouse, prenoms_pere_epouse, name_pere_epouse = get_nom(
epouse, p='e_p_')
nom_mere_epouse, prenoms_mere_epouse, name_mere_epouse = get_nom(
epouse, p='e_m_')
region_epouse, cercle_epouse, commune_epouse, lieu_naissance_epouse = get_lieu_naissance(
epouse, 'e_')
type_naissance_epouse, annee_naissance_epouse, \
ddn_epouse, naissance_epouse = get_dob(epouse, 'e_', True)
profession_epouse = get_other(epouse, 'e_profession')
nb_enfants_epouse = get_int(epouse, 'e_nb_enfants', 0)
story.append(Paragraph(draw_String(
"EPOUSE", "{}".format(nb + 1)), b_style))
epouses = concat([name_epouse, str(nb_enfants_epouse) +
" enfant{p}".format(p="s" if nb_enfants_epouse > 1 else "")])
story.append(Paragraph(epouses, b_style))
dob = "{naissance} à {lieu_naissance}".format(
naissance=naissance_epouse, lieu_naissance=lieu_naissance_epouse)
story.append(Paragraph(dob, b_style))
story.append(Paragraph(draw_String("Père", name_pere_epouse), b_style))
story.append(Paragraph(draw_String("Mère", name_mere_epouse), b_style))
story.append(Paragraph(draw_String(
"Profession", profession_epouse), b_style))
story.append(Paragraph("Situation des Enfants", h4))
# c.setFont('Courier', 10)
# row -= interligne
# enfants
logger.debug("Child")
enfants = instance.get('enfants', [])
if enfants == []:
story.append(Paragraph(BLANK, b_style))
for nb, enfant in enumerate(enfants):
nom_enfant, prenoms_enfant, name_enfant = get_nom(
enfant, p='enfant_')
nom_autre_parent, prenoms_autre_parent, name_autre_parent = get_nom(
instance, s='-autre-parent')
region_enfant, cercle_enfant, commune_enfant, \
lieu_naissance_enfant = get_lieu_naissance(enfant, 'enfant_')
type_naissance_enfant, annee_naissance_enfant, \
ddn_enfant, naissance_enfant = get_dob(enfant, 'enfant_')
# situation
scolarise, scolarise_text = get_bool(enfant, 'scolarise')
handicape, handicape_text = get_bool(enfant, 'handicape')
acharge, acharge_text = get_bool(enfant, 'acharge')
nb_enfant_handicap = get_int(enfant, 'nb_enfant_handicap')
nb_enfant_acharge = get_int(enfant, 'nb_enfant_acharge')
story.append(Paragraph("{nb}. {enfant}".format(
nb=nb + 1, enfant=concat([name_enfant or BLANK, naissance_enfant,
"à {lieu}".format(
lieu=lieu_naissance_enfant),
scolarise_text, handicape_text,
name_autre_parent])), b_style))
story.append(Paragraph("AUTRES PERSONNES à la charge de l’enquêté", h4))
# # autres
autres = instance.get('autres', [])
if autres == []:
story.append(Paragraph(BLANK, b_style))
logger.debug("Other")
for nb, autre in enumerate(autres):
nom_autre, prenoms_autre, name_autre = get_nom(autre, p='autre_')
region_autre, cercle_autre, commune_autre, \
lieu_naissance_autre = get_lieu_naissance(autre, 'autre_')
type_naissance_autre, annee_naissance_autre, \
ddn_autre, naissance_autre = get_dob(autre, 'autre_')
parente_autre = get_other(autre, 'autre_parente')
profession_autre = get_other(autre, 'autre_profession')
story.append(Paragraph("{nb}. {enfant}".format(nb=nb + 1, enfant=concat(
[name_autre or BLANK, naissance_autre, "à {lieu}".format(lieu=lieu_naissance_autre), parente_autre, profession_autre])), b_style))
# ressources
logger.debug("Ressources")
story.append(
Paragraph("RESSOURCES ET CONDITIONS DE VIE DE L’ENQUETE (E)", h4))
story.append(Paragraph(
concat(["Salaire : {}/mois".format(salaire),
"Pension : {}/mois".format(pension),
"Allocations : {}/mois".format(allocations)], sep=". "), b_style))
autres_revenus_f = ["[{}/{}]".format(source_revenu, montant_revenu)
for source_revenu, montant_revenu in autres_revenus]
story.append(Paragraph(draw_String("Autre", concat(
autres_revenus_f, sep=". ")), b_style))
story.append(Paragraph(
"LES CHARGES DE L’ENQUETE (Préciser le montant et la période)", h4))
story.append(Paragraph(concat(["Loyer : {}".format(loyer), "Impot : {}".format(impot), "Dettes : {}".format(dettes), "Aliments : {}".format(aliments),
"Santé : {}".format(sante), ], sep=". "), b_style))
story.append(Paragraph(draw_String(
"Autres Charges", autres_charges), b_style))
story.append(Paragraph(draw_String("HABITAT", concat(
[type_habitat, materiau_habitat])), b_style))
story.append(Paragraph("EXPOSER DETAILLE DES FAITS", h4))
# antecedents
logger.debug("Antecedents")
story.append(Paragraph(draw_String("Antécédents personnels",
concat(antecedents_personnels)), b_style))
story.append(Paragraph(draw_String("Détails Antécédents personnels",
antecedents_personnels_details), b_style))
story.append(Paragraph(draw_String("Antécédents familiaux",
antecedents_familiaux), b_style))
story.append(Paragraph(draw_String("Détails Antécédents familiaux",
antecedents_familiaux_details), b_style))
story.append(Paragraph(draw_String("Antécédents sociaux",
antecedents_sociaux), b_style))
story.append(Paragraph(draw_String("Détails Antécédents sociaux",
antecedents_sociaux_details), b_style))
story.append(Paragraph(draw_String("Situation actuelle",
concat(situation_actuelle)), b_style))
story.append(Paragraph(draw_String("Diagnostic", diagnostic), b_style))
story.append(Paragraph(draw_String(
"Diagnostic details", diagnostic_details), b_style))
signature_dict = instance.get("signature")
img = ""
if signature_dict:
dir_media = os.path.join(output_folder, "signature_{}".format(
signature_dict.get("filename")))
img = Image(dir_media, width=80, height=82)
signature = [["SIGNATURE DE L’ENQUÊTEUR", "",
"VISA DU CHEF DU SERVICE SOCIAL"], [img, ""]]
signature_t = Table(signature, colWidths=150, rowHeights=90)
signature_t.setStyle(TableStyle([('FONTSIZE', (0, 0), (-1, -1), 8), ]))
story.append(signature_t)
# Fait le 01-06-2016 à cercle-de-mopti
# VISA DU CHEF DU SERVICE SOCIAL
# SIGNATURE DE L’ENQUÊTEUR
doc.build(story)
logger.info("Save")
# create shortcut
shortcut_folder = os.path.join(export_folder, "PDF")
Path(shortcut_folder).makedirs_p()
shortcut_fname = "{}.lnk".format(instance.folder_name)
create_shortcut(fpath, os.path.join(shortcut_folder, shortcut_fname))
return fname, fpath
| yeleman/ramed-desktop | ramed/tools/ramed_form_pdf_export.py | Python | mit | 16,484 |
# -*- coding: utf-8 -*-
#
# Django documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 27 09:06:53 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ["djangodocs", "sphinx.ext.intersphinx"]
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General substitutions.
project = 'Django'
copyright = 'Django Software Foundation and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.4.13'
# The full version, including alpha/beta/rc tags.
release = '1.4.13'
# The next version to be released
django_next_version = '1.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Sphinx will recurse into subversion configuration folders and try to read
# any document file within. These should be ignored.
# Note: exclude_dirnames is new in Sphinx 0.5
exclude_dirnames = ['.svn']
# Links to Python's docs should reference the most recent version of the 2.x
# branch, which is located at this URL.
intersphinx_mapping = {
'python': ('http://docs.python.org/2.7', None),
'sphinx': ('http://sphinx.pocoo.org/', None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# HTML translator class for the builder
html_translator_class = "djangodocs.DjangoHTMLTranslator"
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Djangodoc'
modindex_common_prefix = ["django."]
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
#latex_documents = []
latex_documents = [
('contents', 'django.tex', u'Django Documentation',
u'Django Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('contents', 'django', 'Django Documentation', ['Django Software Foundation'], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Django'
epub_author = u'Django Software Foundation'
epub_publisher = u'Django Software Foundation'
epub_copyright = u'2010, Django Software Foundation'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
| dcroc16/skunk_works | google_appengine/lib/django-1.4/docs/conf.py | Python | mit | 9,063 |
'''Arsenal client hardware_profile command line helpers.
These functions are called directly by args.func() to invoke the
appropriate action. They also handle output formatting to the commmand
line.
'''
#
# Copyright 2015 CityGrid Media, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
import logging
from arsenalclient.cli.common import (
ask_yes_no,
check_resp,
parse_cli_args,
print_results,
update_object_fields,
)
LOG = logging.getLogger(__name__)
def search_hardware_profiles(args, client):
'''Search for hardware_profiles and perform optional updates.'''
LOG.debug('action_command is: {0}'.format(args.action_command))
LOG.debug('object_type is: {0}'.format(args.object_type))
resp = None
update_fields = [
'hardware_profiles_rack_color',
'hardware_profiles_rack_u',
]
search_fields = args.fields
if any(getattr(args, key) for key in update_fields):
search_fields = 'all'
params = parse_cli_args(args.search, search_fields, args.exact_get, args.exclude)
resp = client.hardware_profiles.search(params)
if not resp.get('results'):
return resp
results = resp['results']
if args.audit_history:
results = client.hardware_profiles.get_audit_history(results)
if not any(getattr(args, key) for key in update_fields):
print_results(args, results)
else:
if len(results) > 1:
LOG.error('Expected 1 result, exiting.')
sys.exit(1)
else:
hardware_profile = results[0]
LOG.debug('HARDWARE_PROFILE: {0}'.format(hardware_profile))
msg = 'We are ready to update the following hardware_profile: \n ' \
'{0}\nContinue?'.format(hardware_profile['name'])
if any(getattr(args, key) for key in update_fields) and ask_yes_no(msg, args.answer_yes):
hardware_profile_update = update_object_fields(args,
'hardware_profiles',
hardware_profile,
update_fields)
resp = client.hardware_profiles.update(hardware_profile_update)
if resp:
check_resp(resp)
LOG.debug('Complete.')
| CityGrid/arsenal | client/arsenalclient/cli/hardware_profile.py | Python | apache-2.0 | 2,867 |
# -*- coding: utf-8 -*-
import os
"""
Illustration d'un exercice de TD visant à montrer l'évolution temporelle de la
densité de probabilité pour la superposition équiprobable d'un état n=1 et
d'un état n quelconque (à fixer) pour le puits quantique infini.
Par souci de simplicité, on se débrouille pour que E_1/hbar = 1
"""
import numpy as np # Boîte à outils numériques
import matplotlib.pyplot as plt # Boîte à outils graphiques
from matplotlib import animation # Pour l'animation progressive
# Second état n observer (à fixer)
n = 2
# On met tous les paramètres à 1 (ou presque)
t0 = 0
dt = 0.1
L = 1
hbar = 1
h = hbar * 2 * np.pi
m = (2 * np.pi)**2
E1 = h**2 / (8 * m * L**2)
En = n * E1
x = np.linspace(0, L, 1000)
def psi1(x, t):
return np.sin(np.pi * x / L) * np.exp(1j * E1 * t / hbar)
def psin(x, t):
return np.sin(n * np.pi * x / L) * np.exp(1j * En * t / hbar)
def psi(x, t):
return 1 / L**0.5 * (psi1(x, t) + psin(x, t))
fig = plt.figure()
line, = plt.plot(x, abs(psi(x, t0))**2)
plt.title('$t={}$'.format(t0))
plt.ylabel('$|\psi(x,t)|^2$')
plt.xlabel('$x$')
plt.plot(x, abs(psi1(x, t0))**2, '--', label='$|\psi_1|^2$')
plt.plot(x, abs(psin(x, t0))**2, '--', label='$|\psi_{}|^2$'.format(n))
plt.legend()
def init():
pass
def animate(i):
t = i * dt + t0
line.set_ydata(abs(psi(x, t))**2)
plt.title('$t={}$'.format(t))
anim = animation.FuncAnimation(fig, animate, frames=1000, interval=20)
plt.show()
os.system("pause")
| NicovincX2/Python-3.5 | Physique/Physique quantique/Mécanique quantique/principe_de_superposition_lineaire.py | Python | gpl-3.0 | 1,519 |
from scipy.optimize import minimize, fmin_cobyla, fmin_tnc, fmin_slsqp, fmin_l_bfgs_b
from path_planning_analysis_cost_function import calibration_cost_function
from scipy.optimize import differential_evolution
x0 = [0.23, -0.08]
# print minimize(calibration_cost_function,x0, method='Nelder-Mead')
def c1(x):
# x0<.4
return .4-x[0]
def c2(x):
# x0>0
return x[0]
def c3(x):
# x1>-.1
return x[1] + .1
def c4(x):
# x1<.1
return .1 - x[1]
# return [c1,c2,c3,c4]
cons = ({'type': 'ineq', 'fun': lambda x: x[0] + .4},
{'type': 'ineq', 'fun': lambda x: .1 - x[0]},
{'type': 'ineq', 'fun': lambda x: x[1] + .2},
{'type': 'ineq', 'fun': lambda x: .2 - x[1]},)
# bfgs_options = {'epa':.01}
# print minimize(calibration_cost_function,x0,method = 'L-BFGS-B')
# cobyla_options = {'rhobeg':.005}
# print minimize(calibration_cost_function,x0,constraints = cons,options =
# cobyla_options)
print fmin_cobyla(calibration_cost_function, x0, [c1, c2, c3, c4],
rhobeg=.01)
b = [(0, .3), (-.1, .1)]
print differential_evolution(calibration_cost_function, b)
print fmin_tnc(calibration_cost_function, x0, approx_grad=True, bounds=b, epsilon=.01)
print fmin_slsqp(calibration_cost_function, x0, bounds=b, epsilon=.01)
| NASLab/GroundROS | src/experimental_results/robot_calibration.py | Python | mit | 1,278 |
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import npyscreen as np
import os
import sys
import MySQLdb
import _mysql_exceptions
from subprocess import Popen
__author__ = 'Shamal Faily'
class CAIRISConfigurationForm(np.ActionForm):
def create(self):
self.findRootDir()
self.name = "Configure CAIRIS"
self.theHost = self.add(np.TitleText, name = "Database host:", value = "localhost")
self.thePort = self.add(np.TitleText, name = "Database port:", value = "3306")
self.theRootPassword = self.add(np.TitlePassword, name = "Database root password:", value = "")
self.theDbName = self.add(np.TitleText, name = "Database name (created if non-existent):", value = "cairis_default")
self.theUser = self.add(np.TitleText, name = "Database user (created if non-existent):", value = "cairisuser")
defaultUserPassword = os.urandom(10).encode('hex')
self.thePassword = self.add(np.TitlePassword, name = "Database user password:", value = defaultUserPassword)
self.theTmpDir = self.add(np.TitleText, name = "Temp directory:", value = "/tmp")
self.theRootDir = self.add(np.TitleText, name = "Root directory:", value = self.defaultRootDir)
self.theImageDir = self.add(np.TitleText, name = "Default image directory:", value = ".")
self.theFileName = self.add(np.TitleText, name = "CAIRIS configuration file name:", value = os.environ.get("HOME") + "/cairis.cnf")
self.theWebPort = self.add(np.TitleText,name = "Web port:", value = "7071")
self.theLogLevel = self.add(np.TitleText,name = "Log level:", value = "warning");
self.theStaticDir = self.add(np.TitleText,name = "Static directory:", value = os.path.join(self.defaultRootDir, "web"))
self.theUploadDir = self.add(np.TitleText,name = "Upload directory:", value = "/tmp")
self.theSecretKey = os.urandom(16).encode('hex')
self.theSalt = os.urandom(16).encode('hex')
def findRootDir(self):
self.defaultRootDir = "/usr/local/lib/python2.7/dist-packages/cairis"
for cpath in sys.path:
if "/dist-packages/cairis-" in cpath and cpath.endswith(".egg"):
self.defaultRootDir = os.path.join(cpath, "cairis")
break
def on_ok(self):
self.createDatabase()
self.initialiseDatabase()
self.createCairisCnf()
def on_cancel(self):
self.parentApp.setNextForm(None)
def createDatabase(self):
rootConn = MySQLdb.connect(host=self.theHost.value,port=int(self.thePort.value),user='root',passwd=self.theRootPassword.value)
rootCursor = rootConn.cursor()
try:
grantUsageSql = "grant usage on *.* to '" + self.theUser.value + "'@'" + self.theHost.value + "' identified by '" + self.thePassword.value + "' with max_queries_per_hour 0 max_connections_per_hour 0 max_updates_per_hour 0 max_user_connections 0"
rootCursor.execute(grantUsageSql)
except _mysql_exceptions.DatabaseError as e:
id,msg = e
exceptionText = 'MySQL error granting usage to ' + self.theUser.value + ' (id: ' + str(id) + ', message: ' + msg
try:
createSql = "create database if not exists `" + self.theDbName.value + "`"
rootCursor.execute(createSql)
except _mysql_exceptions.DatabaseError as e:
id,msg = e
exceptionText = 'MySQL error creating ' + self.theDbName.value + ' database (id: ' + str(id) + ', message: ' + msg
try:
grantPrivilegesSql = "grant all privileges on `" + self.theDbName.value + "`.* to '" + self.theUser.value + "'@'" + self.theHost.value + "'"
rootCursor.execute(grantPrivilegesSql)
except _mysql_exceptions.DatabaseError as e:
id,msg = e
exceptionText = 'MySQL error granting privileges to ' + self.theUser.value + ' for ' + self.theDbName.value + ' database (id: ' + str(id) + ', message: ' + msg
try:
recursionDepthSql = "set global max_sp_recursion_depth = 255"
rootCursor.execute(recursionDepthSql)
except _mysql_exceptions.DatabaseError as e:
id,msg = e
exceptionText = 'MySQL error setting recursion depth ' + self.theUser.value + ' for ' + self.theDbName.value + ' database (id: ' + str(id) + ', message: ' + msg
try:
flushPrivilegesSql = "flush privileges"
rootCursor.execute(flushPrivilegesSql)
except _mysql_exceptions.DatabaseError as e:
id,msg = e
exceptionText = 'MySQL error flushing privileges (id: ' + str(id) + ', message: ' + msg
rootCursor.close()
rootConn.close()
def initialiseDatabase(self):
initDbCmd = "mysql --user=" + self.theUser.value + " --password=" + self.thePassword.value + " --database=" + self.theDbName.value + " < " + self.theRootDir.value + "/sql/init.sql"
p = Popen(initDbCmd,shell=True)
os.waitpid(p.pid,0)
procsCmd = "mysql --user=" + self.theUser.value + " --password=" + self.thePassword.value + " --database=" + self.theDbName.value + " < " + self.theRootDir.value + "/sql/procs.sql"
p = Popen(procsCmd,shell=True)
os.waitpid(p.pid,0)
def createCairisCnf(self):
f = open(self.theFileName.value,'w')
f.write("rpasswd = " +self.theRootPassword.value + "\n")
f.write("dbhost = " + self.theHost.value + "\n")
f.write("dbport = " + self.thePort.value + "\n")
f.write("dbuser = " + self.theUser.value + "\n")
f.write("dbpasswd = " + self.thePassword.value + "\n")
f.write("dbname = " + self.theDbName.value + "\n")
f.write("tmp_dir = " + self.theTmpDir.value + "\n")
f.write("root = " + self.theRootDir.value + "\n")
f.write("default_image_dir = " + self.theImageDir.value + "\n")
f.write("web_port = " + self.theWebPort.value + "\n")
f.write("log_level = " + self.theLogLevel.value + "\n")
f.write("web_static_dir = " + self.theStaticDir.value + "\n")
f.write("upload_dir = " + self.theUploadDir.value + "\n")
f.write("\n")
f.write("auth_dbhost = " + self.theHost.value + "\n")
f.write("auth_dbuser = " + self.theUser.value + "\n")
f.write("auth_dbpasswd = " + self.thePassword.value + "\n")
f.write("auth_dbname = " + self.theDbName.value + "\n")
f.write("\n")
f.write("secret_key = " + self.theSecretKey + "\n")
f.write("password_hash = sha512_crypt\n")
f.write("password_salt = " + self.theSalt + "\n")
f.close()
self.parentApp.setNextForm(None)
class CAIRISConfigurationApp(np.NPSAppManaged):
def onStart(self):
self.addForm("MAIN",CAIRISConfigurationForm)
def main(args=None):
if args is None:
args = sys.argv[1:]
App = CAIRISConfigurationApp()
try:
App.run()
except np.wgwidget.NotEnoughSpaceForWidget:
print("The terminal window is too small to display the configuration form, please resize it and try again.")
if __name__ == '__main__':
main()
| nathanbjenx/cairis | cairis/bin/configure_cairis_db.py | Python | apache-2.0 | 7,476 |
#!/usr/bin/python3
# Copyright (C) 2007-2010 PlayOnLinux Team
# Copyright (C) 2011 - Quentin PARIS
import os, random, sys
import wx, lib.playonlinux as playonlinux
from lib.dpiFetcher import dpiFetcher
# Un ptit check
try:
os.environ["POL_OS"]
except:
print("ERROR ! Please define POL_OS environment var first.")
os._exit(1)
# Variables mixte 1
os.environ["POL_PORT"] = "0"
os.environ["PLAYONLINUX"] = os.path.realpath(os.path.realpath(__file__)+"/../../../")
os.environ["SITE"] = "http://repository.playonlinux.com"
os.environ["VERSION"] = "4.4.1"
os.environ["POL_ID"] = str(random.randint(1, 100000000))
os.environ["GECKO_SITE"] = "http://wine.playonlinux.com/gecko"
os.environ["MONO_SITE"] = "http://wine.playonlinux.com/mono"
homedir = os.environ["HOME"]
# Debian packagers should switch this to TRUE
# It will disable update alerts, bug reports, statistics
# It will set the good locale directory, and it will use the good msttcorefonts
os.environ["DEBIAN_PACKAGE"] = "FALSE"
# Variables PlayOnMac
if os.environ["POL_OS"] == "Mac":
os.environ["PLAYONMAC"] = os.environ["PLAYONLINUX"]
os.environ["POL_USER_ROOT"] = os.environ["HOME"]+"/Library/PlayOnMac/"
os.environ["APPLICATION_TITLE"] = "PlayOnMac"
os.environ["WINE_SITE"] = "https://phoenicis.playonlinux.com/index.php/wine?os=darwin"
os.environ["POL_DNS"] = "playonmac.com"
windows_add_size = 20
windows_add_playonmac = 1
widget_borders = wx.SIMPLE_BORDER
os_name = "darwin"
os.environ["POL_WGET"] = "wget --prefer-family=IPv4 -q --no-check-certificate"
# Variables PlayOnLinux
if os.environ["POL_OS"] == "Linux":
os.environ["POL_USER_ROOT"] = os.environ["HOME"]+"/.PlayOnLinux/"
os.environ["APPLICATION_TITLE"] = "PlayOnLinux"
os.environ["POL_DNS"] = "playonlinux.com"
os.environ["WINE_SITE"] = "https://phoenicis.playonlinux.com/index.php/wine?os=linux"
if playonlinux.VersionLower(wx.VERSION_STRING, "3.0.0"):
windows_add_size = 0
windows_add_playonmac = 0
else:
windows_add_size = dpiFetcher().fetch_extra_pixel()+60
windows_add_playonmac = 0
widget_borders = wx.RAISED_BORDER
os_name = "linux"
try:
if not os.path.exists("/proc/net/if_inet6"):
os.environ["POL_WGET"] = "env LD_LIBRARY_PATH=\""+os.environ["LD_LIBRARY_PATH"]+"\" wget -q"
else:
os.environ["POL_WGET"] = "env LD_LIBRARY_PATH=\""+os.environ["LD_LIBRARY_PATH"]+"\" wget --prefer-family=IPv4 -q"
except KeyError:
if not os.path.exists("/proc/net/if_inet6"):
os.environ["POL_WGET"] = "env LD_LIBRARY_PATH=\"\" wget -q"
else:
os.environ["POL_WGET"] = "env LD_LIBRARY_PATH=\"\" wget --prefer-family=IPv4 -q"
if os.environ["POL_OS"] == "FreeBSD":
os.environ["POL_USER_ROOT"] = os.environ["HOME"]+"/.PlayOnBSD/"
os.environ["APPLICATION_TITLE"] = "PlayOnBSD"
os.environ["WINE_SITE"] = "https://phoenicis.playonlinux.com/index.php/wine?os=freebsd"
os.environ["POL_DNS"] = "playonlinux.com"
windows_add_size = 0
windows_add_playonmac = 0
widget_borders = wx.RAISED_BORDER
os_name = "freebsd"
if not os.path.exists("/proc/net/if_inet6"):
os.environ["POL_WGET"] = "wget -q"
else:
os.environ["POL_WGET"] = "wget --prefer-family=IPv4 -q"
os.environ["POL_CURL"] = "curl"
archi = os.environ["MACHTYPE"].split("-")
archi = archi[0]
if archi == "x86_64":
os.environ["AMD64_COMPATIBLE"] = "True"
else:
os.environ["AMD64_COMPATIBLE"] = "False"
# Variables mixtes
os.environ["REPERTOIRE"] = os.environ["POL_USER_ROOT"]
os.environ["TITRE"] = os.environ["APPLICATION_TITLE"]
os.environ["WINEPREFIX"] = os.environ["POL_USER_ROOT"]+"/wineprefix/default"
os.environ["OS_NAME"] = os_name
# Wine
os.environ["WINEDLLOVERRIDES"] = "winemenubuilder.exe=d"
# Si DYLD_LIBRARY_PATH n'existe pas, on la defini pour etre sur
try:
os.environ["DYLD_LIBRARY_PATH"]
except:
os.environ["DYLD_LIBRARY_PATH"] = ""
# Pareil pour LD
try:
os.environ["LD_LIBRARY_PATH"]
except:
os.environ["LD_LIBRARY_PATH"] = ""
if os.environ["POL_OS"] == "Mac":
os.environ["MAGICK_HOME"] = os.environ["PLAYONLINUX"]+"/../unix/image_magick/"
os.environ["PATH"] = os.environ["PLAYONLINUX"]+"/../unix/wine/bin:" + os.environ["PLAYONLINUX"]+"/../unix/image_magick/bin:" + os.environ["PLAYONLINUX"]+"/../unix/tools/bin/:" + os.environ["PATH"]
os.environ["WRITE_LD"] = os.environ["LD_LIBRARY_PATH"]
os.environ["DYLD_FALLBACK_LIBRARY_PATH"] = os.environ["PLAYONLINUX"]+"/../unix/wine/lib"
os.environ["WRITE_DYLD_FALLBACK_LIBRARY_PATH"] = os.environ["PLAYONLINUX"]+"/../unix/wine/lib"
os.environ["FREETYPE_PROPERTIES"]="truetype:interpreter-version=35"
os.environ["WRITE_DYLD"] = os.environ["DYLD_LIBRARY_PATH"]
else:
# Debian maintainer decided for some reason not to let wineserver binary into PATH...
for winepath in ('/usr/lib/i386-linux-gnu/wine/bin', '/usr/lib/i386-linux-gnu/wine-unstable/bin',
'/usr/lib32/wine', '/usr/lib32/wine-unstable',
'/usr/lib/wine', '/usr/lib/wine-unstable'):
if os.path.exists('%s/wineserver' % (winepath,)):
os.environ["PATH"] += ':%s' % (winepath,)
break
os.environ["PATH_ORIGIN"] = os.environ["PATH"]
os.environ["LD_PATH_ORIGIN"] = os.environ["LD_LIBRARY_PATH"]
try:
os.environ["LD_32_PATH_ORIGIN"] = os.environ["LD_32_LIBRARY_PATH"]
except KeyError:
os.environ["LD_32_PATH_ORIGIN"] = ""
os.environ["DYLDPATH_ORIGIN"] = os.environ["DYLD_LIBRARY_PATH"]
playonlinux_env = os.environ["PLAYONLINUX"]
playonlinux_rep = os.environ["POL_USER_ROOT"]
version = os.environ["VERSION"]
current_user = os.environ["USER"]
os.environ["WGETRC"] = os.environ["POL_USER_ROOT"]+"/configurations/wgetrc"
## Proxy settings
if playonlinux.GetSettings("PROXY_ENABLED") == "1":
if playonlinux.GetSettings("PROXY_URL") != "":
if playonlinux.GetSettings("PROXY_LOGIN") == "":
http_proxy = "http://"+playonlinux.GetSettings("PROXY_URL")+":"+playonlinux.GetSettings("PROXY_PORT")
else:
http_proxy = "http://"+playonlinux.GetSettings("PROXY_LOGIN")+":"+playonlinux.GetSettings("PROXY_PASSWORD")+"@"+playonlinux.GetSettings("PROXY_URL")+":"+playonlinux.GetSettings("PROXY_PORT")
os.environ["http_proxy"] = http_proxy
userAgent = "PlayOnLinux/" + os.environ["VERSION"]
| PlayOnLinux/POL-POM-4 | python/lib/Variables.py | Python | gpl-3.0 | 6,416 |
#!/usr/bin/env python
# Copyright (C) 2001 Jeff Epler <[email protected]>
# Copyright (C) 2006 Csaba Henk <[email protected]>
#
# This program can be distributed under the terms of the GNU LGPL.
# See the file COPYING.
#
from __future__ import print_function
import os, sys
from errno import *
from stat import *
import fcntl
from threading import Lock
# pull in some spaghetti to make this stuff work without fuse-py being installed
try:
import _find_fuse_parts
except ImportError:
pass
import fuse
from fuse import Fuse
if not hasattr(fuse, '__version__'):
raise RuntimeError("your fuse-py doesn't know of fuse.__version__, probably it's too old.")
fuse.fuse_python_api = (0, 2)
fuse.feature_assert('stateful_files', 'has_init')
def flag2mode(flags):
md = {os.O_RDONLY: 'rb', os.O_WRONLY: 'wb', os.O_RDWR: 'wb+'}
m = md[flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)]
if flags | os.O_APPEND:
m = m.replace('w', 'a', 1)
return m
class Xmp(Fuse):
def __init__(self, *args, **kw):
Fuse.__init__(self, *args, **kw)
# do stuff to set up your filesystem here, if you want
#import thread
#thread.start_new_thread(self.mythread, ())
self.root = '/'
# def mythread(self):
#
# """
# The beauty of the FUSE python implementation is that with the python interp
# running in foreground, you can have threads
# """
# print "mythread: started"
# while 1:
# time.sleep(120)
# print "mythread: ticking"
def getattr(self, path):
return os.lstat("." + path)
def readlink(self, path):
return os.readlink("." + path)
def readdir(self, path, offset):
for e in os.listdir("." + path):
yield fuse.Direntry(e)
def unlink(self, path):
os.unlink("." + path)
def rmdir(self, path):
os.rmdir("." + path)
def symlink(self, path, path1):
os.symlink(path, "." + path1)
def rename(self, path, path1):
os.rename("." + path, "." + path1)
def link(self, path, path1):
os.link("." + path, "." + path1)
def chmod(self, path, mode):
os.chmod("." + path, mode)
def chown(self, path, user, group):
os.chown("." + path, user, group)
def truncate(self, path, len):
f = open("." + path, "a")
f.truncate(len)
f.close()
def mknod(self, path, mode, dev):
os.mknod("." + path, mode, dev)
def mkdir(self, path, mode):
os.mkdir("." + path, mode)
def utime(self, path, times):
os.utime("." + path, times)
# The following utimens method would do the same as the above utime method.
# We can't make it better though as the Python stdlib doesn't know of
# subsecond preciseness in acces/modify times.
#
# def utimens(self, path, ts_acc, ts_mod):
# os.utime("." + path, (ts_acc.tv_sec, ts_mod.tv_sec))
def access(self, path, mode):
if not os.access("." + path, mode):
return -EACCES
# This is how we could add stub extended attribute handlers...
# (We can't have ones which aptly delegate requests to the underlying fs
# because Python lacks a standard xattr interface.)
#
# def getxattr(self, path, name, size):
# val = name.swapcase() + '@' + path
# if size == 0:
# # We are asked for size of the value.
# return len(val)
# return val
#
# def listxattr(self, path, size):
# # We use the "user" namespace to please XFS utils
# aa = ["user." + a for a in ("foo", "bar")]
# if size == 0:
# # We are asked for size of the attr list, ie. joint size of attrs
# # plus null separators.
# return len("".join(aa)) + len(aa)
# return aa
def statfs(self):
"""
Should return an object with statvfs attributes (f_bsize, f_frsize...).
Eg., the return value of os.statvfs() is such a thing (since py 2.2).
If you are not reusing an existing statvfs object, start with
fuse.StatVFS(), and define the attributes.
To provide usable information (ie., you want sensible df(1)
output, you are suggested to specify the following attributes:
- f_bsize - preferred size of file blocks, in bytes
- f_frsize - fundamental size of file blcoks, in bytes
[if you have no idea, use the same as blocksize]
- f_blocks - total number of blocks in the filesystem
- f_bfree - number of free blocks
- f_files - total number of file inodes
- f_ffree - nunber of free file inodes
"""
return os.statvfs(".")
def fsinit(self):
os.chdir(self.root)
class XmpFile(object):
def __init__(self, path, flags, *mode):
self.file = os.fdopen(os.open("." + path, flags, *mode),
flag2mode(flags))
self.fd = self.file.fileno()
if hasattr(os, 'pread'):
self.iolock = None
else:
self.iolock = Lock()
def read(self, length, offset):
if self.iolock:
self.iolock.acquire()
try:
self.file.seek(offset)
return self.file.read(length)
finally:
self.iolock.release()
else:
return os.pread(self.fd, length, offset)
def write(self, buf, offset):
if self.iolock:
self.iolock.acquire()
try:
self.file.seek(offset)
self.file.write(buf)
return len(buf)
finally:
self.iolock.release()
else:
return os.pwrite(self.fd, buf, offset)
def release(self, flags):
self.file.close()
def _fflush(self):
if 'w' in self.file.mode or 'a' in self.file.mode:
self.file.flush()
def fsync(self, isfsyncfile):
self._fflush()
if isfsyncfile and hasattr(os, 'fdatasync'):
os.fdatasync(self.fd)
else:
os.fsync(self.fd)
def flush(self):
self._fflush()
# cf. xmp_flush() in fusexmp_fh.c
os.close(os.dup(self.fd))
def fgetattr(self):
return os.fstat(self.fd)
def ftruncate(self, len):
self.file.truncate(len)
def lock(self, cmd, owner, **kw):
# The code here is much rather just a demonstration of the locking
# API than something which actually was seen to be useful.
# Advisory file locking is pretty messy in Unix, and the Python
# interface to this doesn't make it better.
# We can't do fcntl(2)/F_GETLK from Python in a platfrom independent
# way. The following implementation *might* work under Linux.
#
# if cmd == fcntl.F_GETLK:
# import struct
#
# lockdata = struct.pack('hhQQi', kw['l_type'], os.SEEK_SET,
# kw['l_start'], kw['l_len'], kw['l_pid'])
# ld2 = fcntl.fcntl(self.fd, fcntl.F_GETLK, lockdata)
# flockfields = ('l_type', 'l_whence', 'l_start', 'l_len', 'l_pid')
# uld2 = struct.unpack('hhQQi', ld2)
# res = {}
# for i in xrange(len(uld2)):
# res[flockfields[i]] = uld2[i]
#
# return fuse.Flock(**res)
# Convert fcntl-ish lock parameters to Python's weird
# lockf(3)/flock(2) medley locking API...
op = { fcntl.F_UNLCK : fcntl.LOCK_UN,
fcntl.F_RDLCK : fcntl.LOCK_SH,
fcntl.F_WRLCK : fcntl.LOCK_EX }[kw['l_type']]
if cmd == fcntl.F_GETLK:
return -EOPNOTSUPP
elif cmd == fcntl.F_SETLK:
if op != fcntl.LOCK_UN:
op |= fcntl.LOCK_NB
elif cmd == fcntl.F_SETLKW:
pass
else:
return -EINVAL
fcntl.lockf(self.fd, op, kw['l_start'], kw['l_len'])
def main(self, *a, **kw):
self.file_class = self.XmpFile
return Fuse.main(self, *a, **kw)
def main():
usage = """
Userspace nullfs-alike: mirror the filesystem tree from some point on.
""" + Fuse.fusage
server = Xmp(version="%prog " + fuse.__version__,
usage=usage,
dash_s_do='setsingle')
server.parser.add_option(mountopt="root", metavar="PATH", default='/',
help="mirror filesystem from under PATH [default: %default]")
server.parse(values=server, errex=1)
try:
if server.fuse_args.mount_expected():
os.chdir(server.root)
except OSError:
print("can't enter root of underlying filesystem", file=sys.stderr)
sys.exit(1)
server.main()
if __name__ == '__main__':
main()
| libfuse/python-fuse | example/xmp.py | Python | lgpl-2.1 | 9,213 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014 Michael Krause ( http://krause-software.com/ ).
# You are free to use this code under the MIT license:
# http://opensource.org/licenses/MIT
"""Show some histograms for a directory a Xcode project files."""
from __future__ import print_function
import sys
import argparse
from os.path import abspath, dirname, join
import multiprocessing
from collections import defaultdict, Counter
import codecs
# Set up the Python path so we find the xcodeprojer module in the parent directory
# relative to this file.
sys.path.insert(1, dirname(dirname(abspath(__file__))))
import utils
import xcodeprojer
from xcodeprojer import bytestr, unistr
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY2:
text_type = unicode
binary_type = str
else:
text_type = str
binary_type = bytes
unichr = chr
try:
NARROW_BUILD = len(unichr(0x1f300)) == 2
except ValueError:
NARROW_BUILD = True
DEFAULT_FIRSTNAMES = 200
user_hash = xcodeprojer.UniqueXcodeIDGenerator.user_hash
emojis = []
def here():
return dirname(abspath(__file__))
def rel(filename):
return join(here(), filename)
def write(s, end='\n'):
s = unistr(s) + unistr(end)
s = s.encode('utf-8')
if PY2:
sys.stdout.write(s)
else:
sys.stdout.buffer.write(s)
def writeline():
write('\n')
def uniord(s):
"""ord that works on surrogate pairs.
"""
try:
return ord(s)
except TypeError:
pass
if len(s) != 2:
raise
return 0x10000 + ((ord(s[0]) - 0xd800) << 10) | (ord(s[1]) - 0xdc00)
def iterchars(text):
if not NARROW_BUILD:
for c in text:
yield c
idx = 0
while idx < len(text):
c = text[idx]
if ord(c) >= 0x100:
# When we are running on a narrow Python build
# we have to deal with surrogate pairs ourselves.
if ((0xD800 < ord(c) <= 0xDBFF)
and (idx < len(text) - 1)
and (0xDC00 < ord(text[idx + 1]) <= 0xDFFF)):
c = text[idx:idx+2]
# Skip the other half of the lead and trail surrogate
idx += 1
idx += 1
yield c
def build_emoji_table():
with codecs.open(rel('emojis.txt'), 'r', encoding='utf-8') as f:
text = f.read()
uniques = set()
for c in iterchars(text):
# Only use unicode chars >= 0x100 (emoji etc.)
if len(c) >= 2 or ord(c) >= 0x100:
if c not in uniques:
emojis.append(c)
uniques.add(c)
def print_emoji_table():
per_line = 32
for i in range(len(emojis)):
if i % per_line == 0:
write("%3d" % i, end=' ')
write(emojis[i], end=' ')
if i % per_line == per_line - 1:
writeline()
writeline()
def print_emoji_histo(histo):
all_users = set()
for year, users in histo.items():
all_users.update(users)
all_users = sorted(all_users)
num_users = len(all_users)
for year, users in histo.items():
chars = [str(year), ' ']
for i in range(num_users):
if all_users[i] in users:
c = emojis[all_users[i]] + ' '
else:
c = ' '
chars.append(c)
write(''.join(chars))
write('\n')
def print_histo(histo, utcoffset=0):
maximum = max(histo.values())
max_display = 60
for k in sorted(histo):
if utcoffset != 0:
localhour = (k - utcoffset) % 24
else:
localhour = k
v = histo.get(localhour, 0)
stars = '*' * int(v * max_display / float(maximum))
write("%3d %5d %s" % (k, v, stars))
writeline()
def gidtable(filename):
with open(filename, 'rb') as f:
xcodeproj = f.read()
root, parseinfo = xcodeprojer.parse(xcodeproj)
if root is not None:
unparser = xcodeprojer.Unparser(root)
# We don't need the parse tree, only access to the gidcomments
# that are built during the unparse.
_ = unparser.unparse(root, projectname=xcodeprojer.projectname_for_path(filename))
gidcomments = unparser.gidcomments
c = '.'
else:
gidcomments = {}
c = 'X'
sys.stdout.write(c)
sys.stdout.flush()
return filename, gidcomments
def histogram(args, utcoffset=0):
if args.emoji or args.emojitable:
write("Please be patient when your computer is caching emoji fonts for you. This might take a minute.\n")
build_emoji_table()
if args.emojitable:
print_emoji_table()
return
path = args.directory
histo_year = Counter()
histo_hour = Counter()
users_per_year = defaultdict(set)
pool = multiprocessing.Pool(initializer=utils.per_process_init)
filenames = xcodeprojer.find_projectfiles(path)
results = []
write("Looking for Xcode ids in project files...")
sys.stdout.flush()
for idx, filename in enumerate(filenames):
results.append(pool.apply_async(gidtable, [filename]))
if args.max_files is not None and idx + 1 >= args.max_files:
break
pool.close()
try:
for asyncresult in results:
filename, gids = asyncresult.get()
for gid in gids:
fields = xcodeprojer.gidfields(gids, gid)
refdate = fields['date']
dt = xcodeprojer.datetime_from_utc(refdate)
histo_hour[dt.hour] += 1
year = dt.year
if args.startyear <= year <= args.endyear:
histo_year[year] += 1
users_per_year[year].add(fields['user'])
except (KeyboardInterrupt, GeneratorExit):
pool.terminate()
finally:
pool.join()
writeline()
write("At which hours are new Xcode ids created (UTC time offset: %d)" % args.utcoffset)
print_histo(histo_hour, utcoffset=utcoffset)
write("In which years were the Xcode ids created (we only look at %s-%s)" % (args.startyear, args.endyear))
print_histo(histo_year)
write("Estimated number of users creating new Xcode ids by year")
user_histo = {k: len(v) for (k, v) in users_per_year.items()}
print_histo(user_histo)
writeline()
write("The following is a list of names that might be completely unrelated to the examined Xcode projects.")
write("For something for tangible replace firstnames.txt with your own list.")
writeline()
max_firstnames_limited = print_names(args, users_per_year, emoji=args.emoji)
if args.emoji:
write("Looking for Xcode ids in project files...")
print_emoji_histo(users_per_year)
if max_firstnames_limited and args.max_firstnames is None:
write("The number of first names to consider was limited to %d, this can be changed with --max-firstnames" % max_firstnames_limited)
def print_names(args, users_per_year, emoji=False):
userhashes = defaultdict(list)
max_firstnames = args.max_firstnames
if max_firstnames is None:
max_firstnames = DEFAULT_FIRSTNAMES
max_firstnames_limited = None
with codecs.open(rel('firstnames.txt'), 'r', encoding='utf-8') as f:
firstnames = f.read().splitlines()
for idx, name in enumerate(firstnames):
if idx >= max_firstnames:
max_firstnames_limited = max_firstnames
break
userhashes[user_hash(name)].append(name)
for year, hashes in sorted(users_per_year.items()):
write(str(year), end=' ')
for h in sorted(hashes):
candidates = userhashes[h]
if candidates:
if emoji:
symbol = emojis[h] + ' '
else:
symbol = ''
write(' (%s' % symbol + ' | '.join(candidates) + ')', end=' ')
writeline()
return max_firstnames_limited
def main():
parser = argparse.ArgumentParser(description='Show some histograms for a directory a Xcode project files.')
parser.add_argument('-u', '--utcoffset', type=int, default=-8, metavar='UTCOFFSET', help='UTC time offset, e.g. "-8" for California')
parser.add_argument('--startyear', type=int, default=2006)
parser.add_argument('--endyear', type=int, default=2014)
parser.add_argument('-n', '--max-files', action='store', type=int, default=None, help='maximum number of files to process')
parser.add_argument('--max-firstnames', action='store', type=int, default=None, help='maximum number first names to consider')
parser.add_argument('--emoji', action='store_true', help='add emoji characters to userhashes')
parser.add_argument('--emojitable', action='store_true', help='only print the emoji table')
parser.add_argument('--profile', action='store_true', help='run everything through the profiler')
parser.add_argument('directory', help='directory with Xcode project files')
args = parser.parse_args()
if args.profile:
write('Profiling...')
utils.profile('call_command(args, parser)', locals(), globals())
else:
call_command(args)
def call_command(args):
histogram(args, utcoffset=args.utcoffset)
if __name__ == '__main__':
main()
| mikr/xcodeprojer | examples/gidhistograms.py | Python | mit | 9,342 |
import os
import sys
import math
# The basis of these tests taken from:
# http://alias-i.com/lingpipe/docs/api/com/aliasi/spell/JaroWinklerDistance.html
#
# That page is the discussion of the method and the Java class. The actual tests
# are found in the source code, which I was able to download from
# http://alias-i.com/lingpipe/web/downloadJarOrDistro.html
#
# Once downloaded, the test class was found in
# /lingpipe-4.1.0/src/com/aliasi/test/unit/spell/JaroWinklerDistanceTest.java
# +-- Number of matches
# | +-- Number of half transpositions
# | | +-- Jaro metric
# | | | +-- Winkler metric
# | | | | +-- Metric calculated by
# v v v v v original reference C code
jaro_tests = r"""
SHACKLEFORD SHACKELFORD 11 2 0.96970 0.98182 0.98864
DUNNINGHAM CUNNIGHAM 8 0 0.89630 0.89630 0.93086
NICHLESON NICHULSON 8 0 0.92593 0.95556 0.97667
JONES JOHNSON 4 0 0.79048 0.83238 0.87383
MASSEY MASSIE 5 0 0.88889 0.93333 0.95333
ABROMS ABRAMS 5 0 0.88889 0.92222 0.95236
HARDIN MARTINEZ 4 0 0.72222 0.72222 0.77431
ITMAN SMITH 1 0 0.46667 0.46667 0.50667
JERALDINE GERALDINE 8 0 0.92593 0.92593 0.96630
MARTHA MARHTA 6 2 0.94444 0.96111 0.97083
MICHELLE MICHAEL 6 0 0.86905 0.92143 0.94444
JULIES JULIUS 5 0 0.88889 0.93333 0.95333
TANYA TONYA 4 0 0.86667 0.88000 0.93280
DWAYNE DUANE 4 0 0.82222 0.84000 0.89609
SEAN SUSAN 3 0 0.78333 0.80500 0.84550
JON JOHN 3 0 0.91667 0.93333 0.93333
JON JAN 2 0 0.77778 0.80000 0.86000
DWAYNE DYUANE 5 2 0.82222 0.84000 0.90250
CRATE TRACE 3 0 0.73333 0.73333 0.77778
WIBBELLY WOBRELBLY 7 3 0.83664 0.85298 0.91122
DIXON DICKSONX 4 0 0.76667 0.81333 0.85394
MARHTA MARTHA 6 2 0.94444 0.96111 0.97083
AL AL 2 0 1.00000 1.00000 1.00000
aaaaaabc aaaaaabd 7 0 0.91667 0.95000 0.96000
ABCVWXYZ CABVWXYZ 8 3 0.95833 0.95833 0.97454
ABCAWXYZ BCAWXYZ 7 3 0.91071 0.91071 0.94223
ABCVWXYZ CBAWXYZ 7 2 0.91071 0.91071 0.94223
ABCDUVWXYZ DABCUVWXYZ 10 4 0.93333 0.93333 0.96061
ABCDUVWXYZ DBCAUVWXYZ 10 2 0.96667 0.96667 0.98030
ABBBUVWXYZ BBBAUVWXYZ 10 2 0.96667 0.96667 0.98030
ABCDUV11lLZ DBCAUVWXYZ 7 2 0.73117 0.73117 0.80130
ABBBUVWXYZ BBB11L3VWXZ 7 0 0.77879 0.77879 0.83650
- - 0 0 1.00000 1.00000 1.00000
A A 1 0 1.00000 1.00000 1.00000
AB AB 2 0 1.00000 1.00000 1.00000
ABC ABC 3 0 1.00000 1.00000 1.00000
ABCD ABCD 4 0 1.00000 1.00000 1.00000
ABCDE ABCDE 5 0 1.00000 1.00000 1.00000
AA AA 2 0 1.00000 1.00000 1.00000
AAA AAA 3 0 1.00000 1.00000 1.00000
AAAA AAAA 4 0 1.00000 1.00000 1.00000
AAAAA AAAAA 5 0 1.00000 1.00000 1.00000
A B 0 0 0.00000 0.00000 0.00000
- ABC 0 0 0.00000 0.00000 0.00000
ABCD - 0 0 0.00000 0.00000 0.00000
-- - 0 0 0.00000 0.00000 0.00000
-- --- 1 0 0.83333 0.83333 0.83333
"""
# We use hyphens to encode null strings and spaces.
# A sequence of n hyphens represents a string of (n-1) spaces.
# http://richardminerich.com/tag/jaro-winkler/
# http://richardminerich.com/2011/09/record-linkage-algorithms-in-f-jaro-winkler-distance-part-1/
# http://richardminerich.com/2011/09/record-linkage-in-f-token-matching-stable-marriages-and-the-gale-shapley-algorithm/
# http://en.wikipedia.org/wiki/Stable_marriage_problem [Gale Shapely]
# https://github.com/NaturalNode/natural/blob/master/lib/natural/distance/jaro-winkler_distance.js
# http://www.gettingcirrius.com/2011/01/calculating-similarity-part-2-jaccard.html
def parse_tests(rstring):
tests = []
lines = [l.rstrip() for l in rstring.split('\n')]
for line in lines:
if not line.strip(): continue
bits = line.split()
assert len(bits) == 7
s1, s2, m, t, jaro, wink, orig = bits
m, t = [int(v) for v in [m, t]]
strings = []
for string in [s1, s2]:
if string[0] == '-':
assert set(string) == set(['-'])
string = ' ' * (len(string)-1)
# string = string.decode('utf8')
strings.append(string)
s1, s2 = strings
# print s1, s2, m, t, jaro, wink, matches
tests.append((s1, s2, m, t, jaro, wink, orig))
return tests
jaro_tests = parse_tests(jaro_tests)
def gen_test_args(test_tuples):
for tup in test_tuples:
arg1, arg2 = tup[:2]
strings1 = set([arg1, arg1.lower(), arg1.upper()])
strings2 = set([arg2, arg2.lower(), arg2.upper()])
for larger_tol in [False, True]:
for to_upper in [False, True]:
for s1 in strings1:
for s2 in strings2:
yield larger_tol, to_upper, s1, s2
def test():
from . import jaro
for test in jaro_tests:
# s1, s2, m, t, jaro, wink = test
s1, s2 = test[:2]
string_metrics = jaro.string_metrics(s1, s2)
(len1, len2, num_matches, half_transposes,
typo_score, pre_matches, adjust_long) = string_metrics
weight_jaro = jaro.metric_jaro(s1, s2)
weight_winkler = jaro.metric_jaro_winkler(s1, s2)
weight_original = jaro.metric_original(s1, s2)
# TODO: Test for the custom function?
weights = [weight_jaro, weight_winkler, weight_original]
check = [num_matches, half_transposes]
check.extend(['%7.5f' % w for w in weights])
if check != list(test[2:]):
print()
print(s1, s2)
print(check)
print(test[2:])
raise AssertionError
strings = []
for s in [s1, s2]:
if s.strip() == '':
s = '-'*(len(s)+1)
strings.append(s.ljust(12))
for n in [num_matches, half_transposes]:
strings.append(str(n).rjust(2))
for w in weights:
strings.append(' %7.5f' % w)
print(' '.join(strings))
if __name__ == '__main__':
test() | richmilne/JaroWinkler | jaro/jaro_tests.py | Python | gpl-3.0 | 6,744 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
import wsmeext.pecan as wsme_pecan
from solum.api.controllers.v1.datamodel import service
from solum.api.handlers import service_handler
from solum.common import exception
from solum import objects
class ServiceController(rest.RestController):
"""Manages operations on a single service."""
def __init__(self, service_id):
super(ServiceController, self).__init__()
self._id = service_id
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(service.Service)
def get(self):
"""Return this service."""
handler = service_handler.ServiceHandler(
pecan.request.security_context)
return service.Service.from_db_model(handler.get(self._id),
pecan.request.host_url)
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(service.Service, body=service.Service)
def put(self, data):
"""Modify this service."""
handler = service_handler.ServiceHandler(
pecan.request.security_context)
res = handler.update(self._id,
data.as_dict(objects.registry.Service))
return service.Service.from_db_model(res, pecan.request.host_url)
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(status_code=204)
def delete(self):
"""Delete this service."""
handler = service_handler.ServiceHandler(
pecan.request.security_context)
return handler.delete(self._id)
class ServicesController(rest.RestController):
"""Manages operations on the services collection."""
@pecan.expose()
def _lookup(self, service_id, *remainder):
if remainder and not remainder[-1]:
remainder = remainder[:-1]
return ServiceController(service_id), remainder
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(service.Service, body=service.Service,
status_code=201)
def post(self, data):
"""Create a new service."""
handler = service_handler.ServiceHandler(
pecan.request.security_context)
return service.Service.from_db_model(
handler.create(data.as_dict(objects.registry.Service)),
pecan.request.host_url)
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose([service.Service])
def get_all(self):
"""Return all services, based on the query provided."""
handler = service_handler.ServiceHandler(
pecan.request.security_context)
return [service.Service.from_db_model(ser, pecan.request.host_url)
for ser in handler.get_all()]
| devdattakulkarni/test-solum | solum/api/controllers/v1/service.py | Python | apache-2.0 | 3,292 |
# Copyright (C) 2011-2017 2ndQuadrant Limited
#
# This file is part of Barman.
#
# Barman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Barman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Barman. If not, see <http://www.gnu.org/licenses/>.
"""
This module represents the interface towards a PostgreSQL server.
"""
import atexit
import logging
from abc import ABCMeta
import psycopg2
from psycopg2.errorcodes import (DUPLICATE_OBJECT, OBJECT_IN_USE,
UNDEFINED_OBJECT)
from psycopg2.extensions import STATUS_IN_TRANSACTION
from psycopg2.extras import DictCursor, NamedTupleCursor
from barman.exceptions import (ConninfoException, PostgresAppNameError,
PostgresConnectionError,
PostgresDuplicateReplicationSlot,
PostgresException,
PostgresInvalidReplicationSlot,
PostgresIsInRecovery,
PostgresReplicationSlotInUse,
PostgresReplicationSlotsFull,
PostgresSuperuserRequired,
PostgresUnsupportedFeature)
from barman.infofile import Tablespace
from barman.remote_status import RemoteStatusMixin
from barman.utils import simplify_version, with_metaclass
from barman.xlog import DEFAULT_XLOG_SEG_SIZE
# This is necessary because the CONFIGURATION_LIMIT_EXCEEDED constant
# has been added in psycopg2 2.5, but Barman supports version 2.4.2+ so
# in case of import error we declare a constant providing the correct value.
try:
from psycopg2.errorcodes import CONFIGURATION_LIMIT_EXCEEDED
except ImportError:
CONFIGURATION_LIMIT_EXCEEDED = '53400'
_logger = logging.getLogger(__name__)
_live_connections = []
"""
List of connections to be closed at the interpreter shutdown
"""
@atexit.register
def _atexit():
"""
Ensure that all the connections are correctly closed
at interpreter shutdown
"""
# Take a copy of the list because the conn.close() method modify it
for conn in list(_live_connections):
_logger.warn(
"Forcing %s cleanup during process shut down.",
conn.__class__.__name__)
conn.close()
class PostgreSQL(with_metaclass(ABCMeta, RemoteStatusMixin)):
"""
This abstract class represents a generic interface to a PostgreSQL server.
"""
CHECK_QUERY = 'SELECT 1'
def __init__(self, config, conninfo):
"""
Abstract base class constructor for PostgreSQL interface.
:param barman.config.ServerConfig config: the server configuration
:param str conninfo: Connection information (aka DSN)
"""
super(PostgreSQL, self).__init__()
assert conninfo
self.config = config
self.conninfo = conninfo
self._conn = None
self.allow_reconnect = True
# Build a dictionary with connection info parameters
# This is mainly used to speed up search in conninfo
try:
self.conn_parameters = self.parse_dsn(conninfo)
except (ValueError, TypeError) as e:
_logger.debug(e)
raise ConninfoException('Cannot connect to postgres: "%s" '
'is not a valid connection string' %
conninfo)
@staticmethod
def parse_dsn(dsn):
"""
Parse connection parameters from 'conninfo'
:param str dsn: Connection information (aka DSN)
:rtype: dict[str,str]
"""
# TODO: this might be made more robust in the future
return dict(x.split('=', 1) for x in dsn.split())
@staticmethod
def encode_dsn(parameters):
"""
Build a connection string from a dictionary of connection
parameters
:param dict[str,str] parameters: Connection parameters
:rtype: str
"""
# TODO: this might be made more robust in the future
return ' '.join(
["%s=%s" % (k, v) for k, v in sorted(parameters.items())])
def get_connection_string(self, application_name=None):
"""
Return the connection string, adding the application_name parameter
if requested, unless already defined by user in the connection string
:param str application_name: the application_name to add
:return str: the connection string
"""
conn_string = self.conninfo
# check if the application name is already defined by user
if application_name and 'application_name' not in self.conn_parameters:
# Then add the it to the connection string
conn_string += ' application_name=%s' % application_name
return conn_string
def connect(self):
"""
Generic function for Postgres connection (using psycopg2)
"""
if not self._check_connection():
try:
self._conn = psycopg2.connect(self.conninfo)
# If psycopg2 fails to connect to the host,
# raise the appropriate exception
except psycopg2.DatabaseError as e:
raise PostgresConnectionError(str(e).strip())
# Register the connection to the list of live connections
_live_connections.append(self)
return self._conn
def _check_connection(self):
"""
Return false if the connection is broken
:rtype: bool
"""
# If the connection is not present return False
if not self._conn:
return False
# Check if the connection works by running 'SELECT 1'
cursor = None
try:
cursor = self._conn.cursor()
cursor.execute(self.CHECK_QUERY)
except psycopg2.DatabaseError:
# Connection is broken, so we need to reconnect
self.close()
# Raise an error if reconnect is not allowed
if not self.allow_reconnect:
raise PostgresConnectionError(
"Connection lost, reconnection not allowed")
return False
finally:
if cursor:
cursor.close()
return True
def close(self):
"""
Close the connection to PostgreSQL
"""
if self._conn:
# If the connection is still alive, rollback and close it
if not self._conn.closed:
if self._conn.status == STATUS_IN_TRANSACTION:
self._conn.rollback()
self._conn.close()
# Remove the connection from the live connections list
self._conn = None
_live_connections.remove(self)
def _cursor(self, *args, **kwargs):
"""
Return a cursor
"""
conn = self.connect()
return conn.cursor(*args, **kwargs)
@property
def server_version(self):
"""
Version of PostgreSQL (returned by psycopg2)
"""
conn = self.connect()
return conn.server_version
@property
def server_txt_version(self):
"""
Human readable version of PostgreSQL (calculated from server_version)
:rtype: str|None
"""
try:
conn = self.connect()
major = int(conn.server_version / 10000)
minor = int(conn.server_version / 100 % 100)
patch = int(conn.server_version % 100)
if major < 10:
return "%d.%d.%d" % (major, minor, patch)
if minor != 0:
_logger.warning(
"Unexpected non zero minor version %s in %s",
minor, conn.server_version)
return "%d.%d" % (major, patch)
except PostgresConnectionError as e:
_logger.debug("Error retrieving PostgreSQL version: %s",
str(e).strip())
return None
@property
def server_major_version(self):
"""
PostgreSQL major version (calculated from server_txt_version)
:rtype: str|None
"""
result = self.server_txt_version
if result is not None:
return simplify_version(result)
return None
class StreamingConnection(PostgreSQL):
"""
This class represents a streaming connection to a PostgreSQL server.
"""
CHECK_QUERY = 'IDENTIFY_SYSTEM'
def __init__(self, config):
"""
Streaming connection constructor
:param barman.config.ServerConfig config: the server configuration
"""
if config.streaming_conninfo is None:
raise ConninfoException(
"Missing 'streaming_conninfo' parameter for server '%s'"
% config.name)
super(StreamingConnection, self).__init__(config,
config.streaming_conninfo)
# Make sure we connect using the 'replication' option which
# triggers streaming replication protocol communication
self.conn_parameters['replication'] = 'true'
# Override 'dbname' parameter. This operation is required to mimic
# the behaviour of pg_receivexlog and pg_basebackup
self.conn_parameters['dbname'] = 'replication'
# Rebuild the conninfo string from the modified parameter lists
self.conninfo = self.encode_dsn(self.conn_parameters)
def connect(self):
"""
Connect to the PostgreSQL server. It reuses an existing connection.
:returns: the connection to the server
"""
if self._check_connection():
return self._conn
# Build a connection and set autocommit
self._conn = super(StreamingConnection, self).connect()
self._conn.autocommit = True
return self._conn
def fetch_remote_status(self):
"""
Returns the status of the connection to the PostgreSQL server.
This method does not raise any exception in case of errors,
but set the missing values to None in the resulting dictionary.
:rtype: dict[str, None|str]
"""
result = dict.fromkeys(
('connection_error', 'streaming_supported',
'streaming', 'systemid',
'timeline', 'xlogpos'),
None)
try:
# If the server is too old to support `pg_receivexlog`,
# exit immediately.
# This needs to be protected by the try/except because
# `self.server_version` can raise a PostgresConnectionError
if self.server_version < 90200:
result["streaming_supported"] = False
return result
result["streaming_supported"] = True
# Execute a IDENTIFY_SYSYEM to check the connection
cursor = self._cursor()
cursor.execute("IDENTIFY_SYSTEM")
row = cursor.fetchone()
# If something has been returned, barman is connected
# to a replication backend
if row:
result['streaming'] = True
# IDENTIFY_SYSTEM always return at least two values
result['systemid'] = row[0]
result['timeline'] = row[1]
# PostgreSQL 9.1+ returns also the current xlog flush location
if len(row) > 2:
result['xlogpos'] = row[2]
except psycopg2.ProgrammingError:
# This is not a streaming connection
result['streaming'] = False
except PostgresConnectionError as e:
result['connection_error'] = str(e).strip()
_logger.warn("Error retrieving PostgreSQL status: %s",
str(e).strip())
return result
def create_physical_repslot(self, slot_name):
"""
Create a physical replication slot using the streaming connection
:param str slot_name: Replication slot name
"""
cursor = self._cursor()
try:
# In the following query, the slot name is directly passed
# to the CREATE_REPLICATION_SLOT command, without any
# quoting. This is a characteristic of the streaming
# connection, otherwise if will fail with a generic
# "syntax error"
cursor.execute('CREATE_REPLICATION_SLOT %s PHYSICAL' % slot_name)
except psycopg2.DatabaseError as exc:
if exc.pgcode == DUPLICATE_OBJECT:
# A replication slot with the same name exists
raise PostgresDuplicateReplicationSlot()
elif exc.pgcode == CONFIGURATION_LIMIT_EXCEEDED:
# Unable to create a new physical replication slot.
# All slots are full.
raise PostgresReplicationSlotsFull()
else:
raise PostgresException(str(exc).strip())
def drop_repslot(self, slot_name):
"""
Drop a physical replication slot using the streaming connection
:param str slot_name: Replication slot name
"""
cursor = self._cursor()
try:
# In the following query, the slot name is directly passed
# to the DROP_REPLICATION_SLOT command, without any
# quoting. This is a characteristic of the streaming
# connection, otherwise if will fail with a generic
# "syntax error"
cursor.execute('DROP_REPLICATION_SLOT %s' % slot_name)
except psycopg2.DatabaseError as exc:
if exc.pgcode == UNDEFINED_OBJECT:
# A replication slot with the that name does not exist
raise PostgresInvalidReplicationSlot()
if exc.pgcode == OBJECT_IN_USE:
# The replication slot is still in use
raise PostgresReplicationSlotInUse()
else:
raise PostgresException(str(exc).strip())
class PostgreSQLConnection(PostgreSQL):
"""
This class represents a standard client connection to a PostgreSQL server.
"""
# Streaming replication client types
STANDBY = 1
WALSTREAMER = 2
ANY_STREAMING_CLIENT = (STANDBY, WALSTREAMER)
def __init__(self, config):
"""
PostgreSQL connection constructor.
:param barman.config.ServerConfig config: the server configuration
"""
# Check that 'conninfo' option is properly set
if config.conninfo is None:
raise ConninfoException(
"Missing 'conninfo' parameter for server '%s'" % config.name)
super(PostgreSQLConnection, self).__init__(config, config.conninfo)
self.configuration_files = None
def connect(self):
"""
Connect to the PostgreSQL server. It reuses an existing connection.
"""
if self._check_connection():
return self._conn
self._conn = super(PostgreSQLConnection, self).connect()
if (self._conn.server_version >= 90000 and
'application_name' not in self.conn_parameters):
try:
cur = self._conn.cursor()
cur.execute('SET application_name TO barman')
cur.close()
# If psycopg2 fails to set the application name,
# raise the appropriate exception
except psycopg2.ProgrammingError as e:
raise PostgresAppNameError(str(e).strip())
return self._conn
@property
def server_txt_version(self):
"""
Human readable version of PostgreSQL (returned by the server)
"""
try:
cur = self._cursor()
cur.execute("SELECT version()")
return cur.fetchone()[0].split()[1]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving PostgreSQL version: %s",
str(e).strip())
return None
@property
def has_pgespresso(self):
"""
Returns true if the `pgespresso` extension is available
"""
try:
# pg_extension is only available from Postgres 9.1+
if self.server_version < 90100:
return False
cur = self._cursor()
cur.execute("SELECT count(*) FROM pg_extension "
"WHERE extname = 'pgespresso'")
q_result = cur.fetchone()[0]
return q_result > 0
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving pgespresso information: %s",
str(e).strip())
return None
@property
def is_in_recovery(self):
"""
Returns true if PostgreSQL server is in recovery mode (hot standby)
"""
try:
# pg_is_in_recovery is only available from Postgres 9.0+
if self.server_version < 90000:
return False
cur = self._cursor()
cur.execute("SELECT pg_is_in_recovery()")
return cur.fetchone()[0]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error calling pg_is_in_recovery() function: %s",
str(e).strip())
return None
@property
def is_superuser(self):
"""
Returns true if current user has superuser privileges
"""
try:
cur = self._cursor()
cur.execute('SELECT usesuper FROM pg_user '
'WHERE usename = CURRENT_USER')
return cur.fetchone()[0]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error calling is_superuser() function: %s",
str(e).strip())
return None
@property
def current_xlog_info(self):
"""
Get detailed information about the current WAL position in PostgreSQL.
This method returns a dictionary containing the following data:
* location
* file_name
* file_offset
* timestamp
When executed on a standby server file_name and file_offset are always
None
:rtype: psycopg2.extras.DictRow
"""
try:
cur = self._cursor(cursor_factory=DictCursor)
if not self.is_in_recovery:
cur.execute(
"SELECT location, "
"({pg_walfile_name_offset}(location)).*, "
"CURRENT_TIMESTAMP AS timestamp "
"FROM {pg_current_wal_lsn}() AS location"
.format(**self.name_map))
return cur.fetchone()
else:
cur.execute(
"SELECT location, "
"NULL AS file_name, "
"NULL AS file_offset, "
"CURRENT_TIMESTAMP AS timestamp "
"FROM {pg_last_wal_replay_lsn}() AS location"
.format(**self.name_map))
return cur.fetchone()
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving current xlog "
"detailed information: %s",
str(e).strip())
return None
@property
def current_xlog_file_name(self):
"""
Get current WAL file from PostgreSQL
:return str: current WAL file in PostgreSQL
"""
current_xlog_info = self.current_xlog_info
if current_xlog_info is not None:
return current_xlog_info['file_name']
return None
@property
def xlog_segment_size(self):
"""
Retrieve the size of one WAL file.
In PostgreSQL 11, users will be able to change the WAL size
at runtime. Up to PostgreSQL 10, included, the WAL size can be changed
at compile time
:return: The wal size (In bytes)
"""
# Prior to PostgreSQL 8.4, the wal segment size was not configurable,
# even in compilation
if self.server_version < 80400:
return DEFAULT_XLOG_SEG_SIZE
try:
cur = self._cursor(cursor_factory=DictCursor)
# We can't use the `get_setting` method here, because it
# use `SHOW`, returning an human readable value such as "16MB",
# while we prefer a raw value such as 16777216.
cur.execute("SELECT setting "
"FROM pg_settings "
"WHERE name='wal_block_size'")
result = cur.fetchone()
wal_block_size = int(result[0])
cur.execute("SELECT setting "
"FROM pg_settings "
"WHERE name='wal_segment_size'")
result = cur.fetchone()
wal_segment_size = int(result[0])
return wal_block_size * wal_segment_size
except ValueError as e:
_logger.error("Error retrieving current xlog "
"segment size: %s",
str(e).strip())
return None
@property
def current_xlog_location(self):
"""
Get current WAL location from PostgreSQL
:return str: current WAL location in PostgreSQL
"""
current_xlog_info = self.current_xlog_info
if current_xlog_info is not None:
return current_xlog_info['location']
return None
@property
def current_size(self):
"""
Returns the total size of the PostgreSQL server (requires superuser)
"""
if not self.is_superuser:
return None
try:
cur = self._cursor()
cur.execute(
"SELECT sum(pg_tablespace_size(oid)) "
"FROM pg_tablespace")
return cur.fetchone()[0]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving PostgreSQL total size: %s",
str(e).strip())
return None
def get_archiver_stats(self):
"""
This method gathers statistics from pg_stat_archiver.
Only for Postgres 9.4+ or greater. If not available, returns None.
:return dict|None: a dictionary containing Postgres statistics from
pg_stat_archiver or None
"""
try:
# pg_stat_archiver is only available from Postgres 9.4+
if self.server_version < 90400:
return None
cur = self._cursor(cursor_factory=DictCursor)
# Select from pg_stat_archiver statistics view,
# retrieving statistics about WAL archiver process activity,
# also evaluating if the server is archiving without issues
# and the archived WALs per second rate.
#
# We are using current_settings to check for archive_mode=always.
# current_setting does normalise its output so we can just
# check for 'always' settings using a direct string
# comparison
cur.execute(
"SELECT *, "
"current_setting('archive_mode') IN ('on', 'always') "
"AND (last_failed_wal IS NULL "
"OR last_failed_wal LIKE '%.history' "
"AND substring(last_failed_wal from 1 for 8) "
"<= substring(last_archived_wal from 1 for 8) "
"OR last_failed_time <= last_archived_time) "
"AS is_archiving, "
"CAST (archived_count AS NUMERIC) "
"/ EXTRACT (EPOCH FROM age(now(), stats_reset)) "
"AS current_archived_wals_per_second "
"FROM pg_stat_archiver")
return cur.fetchone()
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving pg_stat_archive data: %s",
str(e).strip())
return None
def fetch_remote_status(self):
"""
Get the status of the PostgreSQL server
This method does not raise any exception in case of errors,
but set the missing values to None in the resulting dictionary.
:rtype: dict[str, None|str]
"""
# PostgreSQL settings to get from the server (requiring superuser)
pg_superuser_settings = [
'data_directory']
# PostgreSQL settings to get from the server
pg_settings = []
pg_query_keys = [
'server_txt_version',
'is_superuser',
'current_xlog',
'pgespresso_installed',
'replication_slot_support',
'replication_slot',
'synchronous_standby_names',
]
# Initialise the result dictionary setting all the values to None
result = dict.fromkeys(pg_superuser_settings +
pg_settings +
pg_query_keys,
None)
try:
# check for wal_level only if the version is >= 9.0
if self.server_version >= 90000:
pg_settings.append('wal_level')
# retrieves superuser settings
if self.is_superuser:
for name in pg_superuser_settings:
result[name] = self.get_setting(name)
# retrieves standard settings
for name in pg_settings:
result[name] = self.get_setting(name)
result['is_superuser'] = self.is_superuser
result['server_txt_version'] = self.server_txt_version
result['pgespresso_installed'] = self.has_pgespresso
result['current_xlog'] = self.current_xlog_file_name
result['current_size'] = self.current_size
result.update(self.get_configuration_files())
# Retrieve the replication_slot status
result["replication_slot_support"] = False
if self.server_version >= 90400:
result["replication_slot_support"] = True
if self.config.slot_name is not None:
result["replication_slot"] = (
self.get_replication_slot(self.config.slot_name))
# Retrieve the list of synchronous standby names
result["synchronous_standby_names"] = []
if self.server_version >= 90100:
result["synchronous_standby_names"] = (
self.get_synchronous_standby_names())
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.warn("Error retrieving PostgreSQL status: %s",
str(e).strip())
return result
def get_setting(self, name):
"""
Get a Postgres setting with a given name
:param name: a parameter name
"""
try:
cur = self._cursor()
cur.execute('SHOW "%s"' % name.replace('"', '""'))
return cur.fetchone()[0]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving PostgreSQL setting '%s': %s",
name.replace('"', '""'), str(e).strip())
return None
def get_tablespaces(self):
"""
Returns a list of tablespaces or None if not present
"""
try:
cur = self._cursor()
if self.server_version >= 90200:
cur.execute(
"SELECT spcname, oid, "
"pg_tablespace_location(oid) AS spclocation "
"FROM pg_tablespace "
"WHERE pg_tablespace_location(oid) != ''")
else:
cur.execute(
"SELECT spcname, oid, spclocation "
"FROM pg_tablespace WHERE spclocation != ''")
# Generate a list of tablespace objects
return [Tablespace._make(item) for item in cur.fetchall()]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving PostgreSQL tablespaces: %s",
str(e).strip())
return None
def get_configuration_files(self):
"""
Get postgres configuration files or an empty dictionary
in case of error
:rtype: dict
"""
if self.configuration_files:
return self.configuration_files
try:
self.configuration_files = {}
cur = self._cursor()
cur.execute(
"SELECT name, setting FROM pg_settings "
"WHERE name IN ('config_file', 'hba_file', 'ident_file')")
for cname, cpath in cur.fetchall():
self.configuration_files[cname] = cpath
# Retrieve additional configuration files
# If PostgresSQL is older than 8.4 disable this check
if self.server_version >= 80400:
cur.execute(
"SELECT DISTINCT sourcefile AS included_file "
"FROM pg_settings "
"WHERE sourcefile IS NOT NULL "
"AND sourcefile NOT IN "
"(SELECT setting FROM pg_settings "
"WHERE name = 'config_file') "
"ORDER BY 1")
# Extract the values from the containing single element tuples
included_files = [included_file
for included_file, in cur.fetchall()]
if len(included_files) > 0:
self.configuration_files['included_files'] = included_files
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving PostgreSQL configuration files "
"location: %s", str(e).strip())
self.configuration_files = {}
return self.configuration_files
def create_restore_point(self, target_name):
"""
Create a restore point with the given target name
The method executes the pg_create_restore_point() function through
a PostgreSQL connection. Only for Postgres versions >= 9.1 when not
in replication.
If requirements are not met, the operation is skipped.
:param str target_name: name of the restore point
:returns: the restore point LSN
:rtype: str|None
"""
if self.server_version < 90100:
return None
# Not possible if on a standby
# Called inside the pg_connect context to reuse the connection
if self.is_in_recovery:
return None
try:
cur = self._cursor()
cur.execute(
"SELECT pg_create_restore_point(%s)", [target_name])
return cur.fetchone()[0]
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug('Error issuing pg_create_restore_point()'
'command: %s', str(e).strip())
return None
def start_exclusive_backup(self, label):
"""
Calls pg_start_backup() on the PostgreSQL server
This method returns a dictionary containing the following data:
* location
* file_name
* file_offset
* timestamp
:param str label: descriptive string to identify the backup
:rtype: psycopg2.extras.DictRow
"""
try:
conn = self.connect()
# Rollback to release the transaction, as the pg_start_backup
# invocation can last up to PostgreSQL's checkpoint_timeout
conn.rollback()
# Start an exclusive backup
cur = conn.cursor(cursor_factory=DictCursor)
if self.server_version < 80400:
cur.execute(
"SELECT location, "
"({pg_walfile_name_offset}(location)).*, "
"now() AS timestamp "
"FROM pg_start_backup(%s) AS location"
.format(**self.name_map),
(label,))
else:
cur.execute(
"SELECT location, "
"({pg_walfile_name_offset}(location)).*, "
"now() AS timestamp "
"FROM pg_start_backup(%s,%s) AS location"
.format(**self.name_map),
(label, self.config.immediate_checkpoint))
start_row = cur.fetchone()
# Rollback to release the transaction, as the connection
# is to be retained until the end of backup
conn.rollback()
return start_row
except (PostgresConnectionError, psycopg2.Error) as e:
msg = "pg_start_backup(): %s" % str(e).strip()
_logger.debug(msg)
raise PostgresException(msg)
def start_concurrent_backup(self, label):
"""
Calls pg_start_backup on the PostgreSQL server using the
API introduced with version 9.6
This method returns a dictionary containing the following data:
* location
* timeline
* timestamp
:param str label: descriptive string to identify the backup
:rtype: psycopg2.extras.DictRow
"""
try:
conn = self.connect()
# Rollback to release the transaction, as the pg_start_backup
# invocation can last up to PostgreSQL's checkpoint_timeout
conn.rollback()
# Start the backup using the api introduced in postgres 9.6
cur = conn.cursor(cursor_factory=DictCursor)
cur.execute(
"SELECT location, "
"(SELECT timeline_id "
"FROM pg_control_checkpoint()) AS timeline, "
"now() AS timestamp "
"FROM pg_start_backup(%s, %s, FALSE) AS location",
(label, self.config.immediate_checkpoint))
start_row = cur.fetchone()
# Rollback to release the transaction, as the connection
# is to be retained until the end of backup
conn.rollback()
return start_row
except (PostgresConnectionError, psycopg2.Error) as e:
msg = "pg_start_backup command: %s" % (str(e).strip(),)
_logger.debug(msg)
raise PostgresException(msg)
def stop_exclusive_backup(self):
"""
Calls pg_stop_backup() on the PostgreSQL server
This method returns a dictionary containing the following data:
* location
* file_name
* file_offset
* timestamp
:rtype: psycopg2.extras.DictRow
"""
try:
conn = self.connect()
# Rollback to release the transaction, as the pg_stop_backup
# invocation could will wait until the current WAL file is shipped
conn.rollback()
# Stop the backup
cur = conn.cursor(cursor_factory=DictCursor)
cur.execute(
"SELECT location, "
"({pg_walfile_name_offset}(location)).*, "
"now() AS timestamp "
"FROM pg_stop_backup() AS location"
.format(**self.name_map)
)
return cur.fetchone()
except (PostgresConnectionError, psycopg2.Error) as e:
msg = "Error issuing pg_stop_backup command: %s" % str(e).strip()
_logger.debug(msg)
raise PostgresException(
'Cannot terminate exclusive backup. '
'You might have to manually execute pg_stop_backup '
'on your PostgreSQL server')
def stop_concurrent_backup(self):
"""
Calls pg_stop_backup on the PostgreSQL server using the
API introduced with version 9.6
This method returns a dictionary containing the following data:
* location
* timeline
* backup_label
* timestamp
:rtype: psycopg2.extras.DictRow
"""
try:
conn = self.connect()
# Rollback to release the transaction, as the pg_stop_backup
# invocation could will wait until the current WAL file is shipped
conn.rollback()
# Stop the backup using the api introduced with version 9.6
cur = conn.cursor(cursor_factory=DictCursor)
cur.execute(
'SELECT end_row.lsn AS location, '
'(SELECT CASE WHEN pg_is_in_recovery() '
'THEN min_recovery_end_timeline ELSE timeline_id END '
'FROM pg_control_checkpoint(), pg_control_recovery()'
') AS timeline, '
'end_row.labelfile AS backup_label, '
'now() AS timestamp FROM pg_stop_backup(FALSE) AS end_row')
return cur.fetchone()
except (PostgresConnectionError, psycopg2.Error) as e:
msg = "Error issuing pg_stop_backup command: %s" % str(e).strip()
_logger.debug(msg)
raise PostgresException(msg)
def pgespresso_start_backup(self, label):
"""
Execute a pgespresso_start_backup
This method returns a dictionary containing the following data:
* backup_label
* timestamp
:param str label: descriptive string to identify the backup
:rtype: psycopg2.extras.DictRow
"""
try:
conn = self.connect()
# Rollback to release the transaction,
# as the pgespresso_start_backup invocation can last
# up to PostgreSQL's checkpoint_timeout
conn.rollback()
# Start the concurrent backup using pgespresso
cur = conn.cursor(cursor_factory=DictCursor)
cur.execute(
'SELECT pgespresso_start_backup(%s,%s) AS backup_label, '
'now() AS timestamp',
(label, self.config.immediate_checkpoint))
start_row = cur.fetchone()
# Rollback to release the transaction, as the connection
# is to be retained until the end of backup
conn.rollback()
return start_row
except (PostgresConnectionError, psycopg2.Error) as e:
msg = "pgespresso_start_backup(): %s" % str(e).strip()
_logger.debug(msg)
raise PostgresException(msg)
def pgespresso_stop_backup(self, backup_label):
"""
Execute a pgespresso_stop_backup
This method returns a dictionary containing the following data:
* end_wal
* timestamp
:param str backup_label: backup label as returned
by pgespress_start_backup
:rtype: psycopg2.extras.DictRow
"""
try:
conn = self.connect()
# Issue a rollback to release any unneeded lock
conn.rollback()
cur = conn.cursor(cursor_factory=DictCursor)
cur.execute("SELECT pgespresso_stop_backup(%s) AS end_wal, "
"now() AS timestamp",
(backup_label,))
return cur.fetchone()
except (PostgresConnectionError, psycopg2.Error) as e:
msg = "Error issuing pgespresso_stop_backup() command: %s" % (
str(e).strip())
_logger.debug(msg)
raise PostgresException(
'%s\n'
'HINT: You might have to manually execute '
'pgespresso_abort_backup() on your PostgreSQL '
'server' % msg)
def switch_xlog(self):
"""
Execute a pg_switch_xlog()
To be SURE of the switch of a xlog, we collect the xlogfile name
before and after the switch.
The method returns the just closed xlog file name if the current xlog
file has changed, it returns an empty string otherwise.
The method returns None if something went wrong during the execution
of the pg_switch_xlog command.
:rtype: str|None
"""
try:
conn = self.connect()
# Requires superuser privilege
if not self.is_superuser:
raise PostgresSuperuserRequired()
# If this server is in recovery there is nothing to do
if self.is_in_recovery:
raise PostgresIsInRecovery()
cur = conn.cursor()
# Collect the xlog file name before the switch
cur.execute('SELECT {pg_walfile_name}('
'{pg_current_wal_insert_lsn}())'
.format(**self.name_map))
pre_switch = cur.fetchone()[0]
# Switch
cur.execute('SELECT {pg_walfile_name}({pg_switch_wal}())'
.format(**self.name_map))
# Collect the xlog file name after the switch
cur.execute('SELECT {pg_walfile_name}('
'{pg_current_wal_insert_lsn}())'
.format(**self.name_map))
post_switch = cur.fetchone()[0]
if pre_switch < post_switch:
return pre_switch
else:
return ''
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug(
"Error issuing pg_switch_xlog() command: %s",
str(e).strip())
return None
def checkpoint(self):
"""
Execute a checkpoint
"""
try:
conn = self.connect()
# Requires superuser privilege
if not self.is_superuser:
raise PostgresSuperuserRequired()
cur = conn.cursor()
cur.execute("CHECKPOINT")
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug(
"Error issuing CHECKPOINT: %s",
str(e).strip())
def get_replication_stats(self, client_type=STANDBY):
"""
Returns streaming replication information
"""
try:
cur = self._cursor(cursor_factory=NamedTupleCursor)
# Without superuser rights, this function is useless
# TODO: provide a simplified version for non-superusers
if not self.is_superuser:
raise PostgresSuperuserRequired()
# pg_stat_replication is a system view that contains one
# row per WAL sender process with information about the
# replication status of a standby server. It has been
# introduced in PostgreSQL 9.1. Current fields are:
#
# - pid (procpid in 9.1)
# - usesysid
# - usename
# - application_name
# - client_addr
# - client_hostname
# - client_port
# - backend_start
# - backend_xmin (9.4+)
# - state
# - sent_location
# - write_location
# - flush_location
# - replay_location
# - sync_priority
# - sync_state
#
if self.server_version < 90100:
raise PostgresUnsupportedFeature('9.1')
from_repslot = ""
if self.server_version >= 90500:
# Current implementation (9.5+)
what = "r.*, rs.slot_name"
# Look for replication slot name
from_repslot = "LEFT JOIN pg_replication_slots rs " \
"ON (r.pid = rs.active_pid) "
elif self.server_version >= 90400:
# PostgreSQL 9.4
what = "*"
elif self.server_version >= 90200:
# PostgreSQL 9.2/9.3
what = "pid," \
"usesysid," \
"usename," \
"application_name," \
"client_addr," \
"client_hostname," \
"client_port," \
"backend_start," \
"CAST (NULL AS xid) AS backend_xmin," \
"state," \
"sent_location," \
"write_location," \
"flush_location," \
"replay_location," \
"sync_priority," \
"sync_state "
else:
# PostgreSQL 9.1
what = "procpid AS pid," \
"usesysid," \
"usename," \
"application_name," \
"client_addr," \
"client_hostname," \
"client_port," \
"backend_start," \
"CAST (NULL AS xid) AS backend_xmin," \
"state," \
"sent_location," \
"write_location," \
"flush_location," \
"replay_location," \
"sync_priority," \
"sync_state "
# Streaming client
if client_type == self.STANDBY:
# Standby server
where = 'WHERE replay_location IS NOT NULL '
elif client_type == self.WALSTREAMER:
# WAL streamer
where = 'WHERE replay_location IS NULL '
else:
where = ''
# Execute the query
cur.execute(
"SELECT %s, "
"pg_is_in_recovery() AS is_in_recovery,"
"CASE WHEN pg_is_in_recovery() "
" THEN {pg_last_wal_receive_lsn}() "
" ELSE {pg_current_wal_lsn}() "
"END AS current_location "
"FROM pg_stat_replication r "
"%s"
"%s"
"ORDER BY sync_state DESC, sync_priority"
.format(**self.name_map)
% (what, from_repslot, where))
# Generate a list of standby objects
return cur.fetchall()
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving status of standby servers: %s",
str(e).strip())
return None
def get_replication_slot(self, slot_name):
"""
Retrieve from the PostgreSQL server a physical replication slot
with a specific slot_name.
This method returns a dictionary containing the following data:
* slot_name
* active
* restart_lsn
:param str slot_name: the replication slot name
:rtype: psycopg2.extras.DictRow
"""
if self.server_version < 90400:
# Raise exception if replication slot are not supported
# by PostgreSQL version
raise PostgresUnsupportedFeature('9.4')
else:
cur = self._cursor(cursor_factory=NamedTupleCursor)
try:
cur.execute("SELECT slot_name, "
"active, "
"restart_lsn "
"FROM pg_replication_slots "
"WHERE slot_type = 'physical' "
"AND slot_name = '%s'" % slot_name)
# Retrieve the replication slot information
return cur.fetchone()
except (PostgresConnectionError, psycopg2.Error) as e:
_logger.debug("Error retrieving replication_slots: %s",
str(e).strip())
raise
def get_synchronous_standby_names(self):
"""
Retrieve the list of named synchronous standby servers from PostgreSQL
This method returns a list of names
:return list: synchronous standby names
"""
if self.server_version < 90100:
# Raise exception if synchronous replication is not supported
raise PostgresUnsupportedFeature('9.1')
else:
synchronous_standby_names = (
self.get_setting('synchronous_standby_names'))
# Normalise the list of sync standby names
# On PostgreSQL 9.6 it is possible to specify the number of
# required synchronous standby using this format:
# n (name1, name2, ... nameN).
# We only need the name list, so we discard everything else.
# The name list starts after the first parenthesis or at pos 0
names_start = synchronous_standby_names.find('(') + 1
names_end = synchronous_standby_names.rfind(')')
if names_end < 0:
names_end = len(synchronous_standby_names)
names_list = synchronous_standby_names[names_start:names_end]
return [x.strip() for x in names_list.split(',')]
@property
def name_map(self):
"""
Return a map with function and directory names according to the current
PostgreSQL version.
Each entry has the `current` name as key and the name for the specific
version as value.
:rtype: dict[str]
"""
if self.server_version < 100000:
return {
'pg_switch_wal': 'pg_switch_xlog',
'pg_walfile_name': 'pg_xlogfile_name',
'pg_wal': 'pg_xlog',
'pg_walfile_name_offset': 'pg_xlogfile_name_offset',
'pg_last_wal_replay_lsn': 'pg_last_xlog_replay_location',
'pg_current_wal_lsn': 'pg_current_xlog_location',
'pg_current_wal_insert_lsn': 'pg_current_xlog_insert_location',
'pg_last_wal_receive_lsn': 'pg_last_xlog_receive_location',
}
else:
return {
'pg_switch_wal': 'pg_switch_wal',
'pg_walfile_name': 'pg_walfile_name',
'pg_wal': 'pg_wal',
'pg_walfile_name_offset': 'pg_walfile_name_offset',
'pg_last_wal_replay_lsn': 'pg_last_wal_replay_lsn',
'pg_current_wal_lsn': 'pg_current_wal_lsn',
'pg_current_wal_insert_lsn': 'pg_current_wal_insert_lsn',
'pg_last_wal_receive_lsn': 'pg_last_wal_receive_lsn',
}
| infoxchange/barman | barman/postgres.py | Python | gpl-3.0 | 50,903 |
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
from flask_login import current_user
from .models import Permission
'''
如果用户不具有指定权限,则返
回 403 错误码 :HTTP“禁止”错误
'''
#检查常规权限
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
#检查作者权限
def author_required(f):
return permission_required(Permission.WRITE_ARTICLES)(f)
#检查管理员权限
def admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
| staneyffer/my_blog | app/decorators.py | Python | mit | 764 |
#!/usr/bin/python3
"""
================================================
ABElectronics ADC Pi V2 8-Channel ADC
Version 1.0 Created 29/02/2015
Requires python 3 smbus to be installed
================================================
"""
class ADCPi:
# internal variables
__address = 0x68 # default address for adc 1 on adc pi and delta-sigma pi
__address2 = 0x69 # default address for adc 2 on adc pi and delta-sigma pi
__config1 = 0x9C # PGAx1, 18 bit, continuous conversion, channel 1
__currentchannel1 = 1 # channel variable for adc 1
__config2 = 0x9C # PGAx1, 18 bit, continuous-shot conversion, channel 1
__currentchannel2 = 1 # channel variable for adc2
__bitrate = 18 # current bitrate
__conversionmode = 1 # Conversion Mode
__pga = float(0.5) # current pga setting
__lsb = float(0.0000078125) # default lsb value for 18 bit
# create byte array and fill with initial values to define size
__adcreading = bytearray()
__adcreading.append(0x00)
__adcreading.append(0x00)
__adcreading.append(0x00)
__adcreading.append(0x00)
global _bus
# local methods
def __updatebyte(self, byte, bit, value):
# internal method for setting the value of a single bit within a
# byte
if value == 0:
return byte & ~(1 << bit)
elif value == 1:
return byte | (1 << bit)
def __checkbit(self, byte, bit):
# internal method for reading the value of a single bit within a
# byte
bitval = ((byte & (1 << bit)) != 0)
if (bitval == 1):
return True
else:
return False
def __twos_comp(self, val, bits):
if((val & (1 << (bits - 1))) != 0):
val = val - (1 << bits)
return val
def __setchannel(self, channel):
# internal method for updating the config to the selected channel
if channel < 5:
if channel != self.__currentchannel1:
if channel == 1:
self.__config1 = self.__updatebyte(self.__config1, 5, 0)
self.__config1 = self.__updatebyte(self.__config1, 6, 0)
self.__currentchannel1 = 1
if channel == 2:
self.__config1 = self.__updatebyte(self.__config1, 5, 1)
self.__config1 = self.__updatebyte(self.__config1, 6, 0)
self.__currentchannel1 = 2
if channel == 3:
self.__config1 = self.__updatebyte(self.__config1, 5, 0)
self.__config1 = self.__updatebyte(self.__config1, 6, 1)
self.__currentchannel1 = 3
if channel == 4:
self.__config1 = self.__updatebyte(self.__config1, 5, 1)
self.__config1 = self.__updatebyte(self.__config1, 6, 1)
self.__currentchannel1 = 4
else:
if channel != self.__currentchannel2:
if channel == 5:
self.__config2 = self.__updatebyte(self.__config2, 5, 0)
self.__config2 = self.__updatebyte(self.__config2, 6, 0)
self.__currentchannel2 = 5
if channel == 6:
self.__config2 = self.__updatebyte(self.__config2, 5, 1)
self.__config2 = self.__updatebyte(self.__config2, 6, 0)
self.__currentchannel2 = 6
if channel == 7:
self.__config2 = self.__updatebyte(self.__config2, 5, 0)
self.__config2 = self.__updatebyte(self.__config2, 6, 1)
self.__currentchannel2 = 7
if channel == 8:
self.__config2 = self.__updatebyte(self.__config2, 5, 1)
self.__config2 = self.__updatebyte(self.__config2, 6, 1)
self.__currentchannel2 = 8
return
# init object with i2caddress, default is 0x68, 0x69 for ADCoPi board
def __init__(self, bus, address=0x68, address2=0x69, rate=18):
self._bus = bus
self.__address = address
self.__address2 = address2
self.set_bit_rate(rate)
def read_voltage(self, channel):
# returns the voltage from the selected adc channel - channels 1 to
# 8
raw = self.read_raw(channel)
if (self.__signbit):
return float(0.0) # returned a negative voltage so return 0
else:
voltage = float(
(raw * (self.__lsb / self.__pga)) * 2.471)
return float(voltage)
def read_raw(self, channel):
# reads the raw value from the selected adc channel - channels 1 to 8
h = 0
l = 0
m = 0
s = 0
# get the config and i2c address for the selected channel
self.__setchannel(channel)
if (channel < 5):
config = self.__config1
address = self.__address
else:
config = self.__config2
address = self.__address2
# if the conversion mode is set to one-shot update the ready bit to 1
if (self.__conversionmode == 0):
config = self.__updatebyte(config, 7, 1)
self._bus.write_byte(address, config)
config = self.__updatebyte(config, 7, 0)
# keep reading the adc data until the conversion result is ready
while True:
__adcreading = self._bus.read_i2c_block_data(address, config, 4)
if self.__bitrate == 18:
h = __adcreading[0]
m = __adcreading[1]
l = __adcreading[2]
s = __adcreading[3]
else:
h = __adcreading[0]
m = __adcreading[1]
s = __adcreading[2]
if self.__checkbit(s, 7) == 0:
break
self.__signbit = False
t = 0.0
# extract the returned bytes and combine in the correct order
if self.__bitrate == 18:
t = ((h & 0b00000011) << 16) | (m << 8) | l
self.__signbit = bool(self.__checkbit(t, 17))
if self.__signbit:
t = self.__updatebyte(t, 17, 0)
if self.__bitrate == 16:
t = (h << 8) | m
self.__signbit = bool(self.__checkbit(t, 15))
if self.__signbit:
t = self.__updatebyte(t, 15, 0)
if self.__bitrate == 14:
t = ((h & 0b00111111) << 8) | m
self.__signbit = self.__checkbit(t, 13)
if self.__signbit:
t = self.__updatebyte(t, 13, 0)
if self.__bitrate == 12:
t = ((h & 0b00001111) << 8) | m
self.__signbit = self.__checkbit(t, 11)
if self.__signbit:
t = self.__updatebyte(t, 11, 0)
return t
def set_pga(self, gain):
"""
PGA gain selection
1 = 1x
2 = 2x
4 = 4x
8 = 8x
"""
if gain == 1:
self.__config1 = self.__updatebyte(self.__config1, 0, 0)
self.__config1 = self.__updatebyte(self.__config1, 1, 0)
self.__config2 = self.__updatebyte(self.__config2, 0, 0)
self.__config2 = self.__updatebyte(self.__config2, 1, 0)
self.__pga = 0.5
if gain == 2:
self.__config1 = self.__updatebyte(self.__config1, 0, 1)
self.__config1 = self.__updatebyte(self.__config1, 1, 0)
self.__config2 = self.__updatebyte(self.__config2, 0, 1)
self.__config2 = self.__updatebyte(self.__config2, 1, 0)
self.__pga = 1
if gain == 4:
self.__config1 = self.__updatebyte(self.__config1, 0, 0)
self.__config1 = self.__updatebyte(self.__config1, 1, 1)
self.__config2 = self.__updatebyte(self.__config2, 0, 0)
self.__config2 = self.__updatebyte(self.__config2, 1, 1)
self.__pga = 2
if gain == 8:
self.__config1 = self.__updatebyte(self.__config1, 0, 1)
self.__config1 = self.__updatebyte(self.__config1, 1, 1)
self.__config2 = self.__updatebyte(self.__config2, 0, 1)
self.__config2 = self.__updatebyte(self.__config2, 1, 1)
self.__pga = 4
self._bus.write_byte(self.__address, self.__config1)
self._bus.write_byte(self.__address2, self.__config2)
return
def set_bit_rate(self, rate):
"""
sample rate and resolution
12 = 12 bit (240SPS max)
14 = 14 bit (60SPS max)
16 = 16 bit (15SPS max)
18 = 18 bit (3.75SPS max)
"""
if rate == 12:
self.__config1 = self.__updatebyte(self.__config1, 2, 0)
self.__config1 = self.__updatebyte(self.__config1, 3, 0)
self.__config2 = self.__updatebyte(self.__config2, 2, 0)
self.__config2 = self.__updatebyte(self.__config2, 3, 0)
self.__bitrate = 12
self.__lsb = 0.0005
if rate == 14:
self.__config1 = self.__updatebyte(self.__config1, 2, 1)
self.__config1 = self.__updatebyte(self.__config1, 3, 0)
self.__config2 = self.__updatebyte(self.__config2, 2, 1)
self.__config2 = self.__updatebyte(self.__config2, 3, 0)
self.__bitrate = 14
self.__lsb = 0.000125
if rate == 16:
self.__config1 = self.__updatebyte(self.__config1, 2, 0)
self.__config1 = self.__updatebyte(self.__config1, 3, 1)
self.__config2 = self.__updatebyte(self.__config2, 2, 0)
self.__config2 = self.__updatebyte(self.__config2, 3, 1)
self.__bitrate = 16
self.__lsb = 0.00003125
if rate == 18:
self.__config1 = self.__updatebyte(self.__config1, 2, 1)
self.__config1 = self.__updatebyte(self.__config1, 3, 1)
self.__config2 = self.__updatebyte(self.__config2, 2, 1)
self.__config2 = self.__updatebyte(self.__config2, 3, 1)
self.__bitrate = 18
self.__lsb = 0.0000078125
self._bus.write_byte(self.__address, self.__config1)
self._bus.write_byte(self.__address2, self.__config2)
return
def set_conversion_mode(self, mode):
"""
conversion mode for adc
0 = One shot conversion mode
1 = Continuous conversion mode
"""
if (mode == 0):
self.__config1 = self.__updatebyte(self.__config1, 4, 0)
self.__config2 = self.__updatebyte(self.__config2, 4, 0)
self.__conversionmode = 0
if (mode == 1):
self.__config1 = self.__updatebyte(self.__config1, 4, 1)
self.__config2 = self.__updatebyte(self.__config2, 4, 1)
self.__conversionmode = 1
#self._bus.write_byte(self.__address, self.__config1)
#self._bus.write_byte(self.__address2, self.__config2)
return | cmac4603/Home-Utilities-App | aux_files/ABE_ADCPi.py | Python | gpl-2.0 | 11,057 |
#!/galaxy/home/mgehrin/hiclib/bin/python
"""
Read a maf file from stdin and write out a new maf with only blocks having all
of the required in species, after dropping any other species and removing
columns containing only gaps.
usage: %prog species,species2,... < maf
"""
import psyco_full
import bx.align.maf
import copy
import sys
from itertools import *
def main():
species = sys.argv[1].split( ',' )
maf_reader = bx.align.maf.Reader( sys.stdin )
maf_writer = bx.align.maf.Writer( sys.stdout )
for m in maf_reader:
new_components = []
for comp in m.components:
if comp.src.split( '.' )[0] in species:
new_components.append( comp )
m.components = new_components
m.remove_all_gap_columns()
if len( m.components ) > 1:
maf_writer.write( m )
maf_reader.close()
maf_writer.close()
if __name__ == "__main__":
main()
| bxlab/HiFive_Paper | Scripts/HiCLib/bx-python-0.7.1/build/scripts-2.7/maf_limit_to_species.py | Python | bsd-3-clause | 953 |
# coding=utf-8
# Copyright 2022 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Hyperparameter sweep to retrain on train+val. the hyper-deep ensemble."""
DEFAULT_L2 = 1e-4
TRAIN_SET_SIZE = 0.95 * 50000
# NOTE: below, we normalize by TRAIN_SET_SIZE because the models from the random
# search used a custom convention for l2, normalized by the train dataset size.
SELECTED_HPS = [
{
'bn_l2': DEFAULT_L2,
'input_conv_l2': DEFAULT_L2,
'group_1_conv_l2': DEFAULT_L2,
'group_2_conv_l2': DEFAULT_L2,
'group_3_conv_l2': DEFAULT_L2,
'dense_kernel_l2': DEFAULT_L2,
'dense_bias_l2': DEFAULT_L2,
'label_smoothing': 0.,
'seed': 0,
},
{
'bn_l2': DEFAULT_L2,
'input_conv_l2': 0.012100236214494308 / TRAIN_SET_SIZE,
'group_1_conv_l2': 10.78423537517882 / TRAIN_SET_SIZE,
'group_2_conv_l2': 18.538256915276644 / TRAIN_SET_SIZE,
'group_3_conv_l2': 22.437062519429173 / TRAIN_SET_SIZE,
'dense_kernel_l2': 0.04348043221587187 / TRAIN_SET_SIZE,
'dense_bias_l2': 0.4853927238831955 / TRAIN_SET_SIZE,
'label_smoothing': 0.00001974636306895064,
'seed': 9979,
},
{
'bn_l2': DEFAULT_L2,
'input_conv_l2': 1.8733092610235154 / TRAIN_SET_SIZE,
'group_1_conv_l2': 2.149858832455459 / TRAIN_SET_SIZE,
'group_2_conv_l2': 0.02283349328860761 / TRAIN_SET_SIZE,
'group_3_conv_l2': 0.06493892708718176 / TRAIN_SET_SIZE,
'dense_kernel_l2': 6.729067408174627 / TRAIN_SET_SIZE,
'dense_bias_l2': 96.88491593762551 / TRAIN_SET_SIZE,
'label_smoothing': 0.000014114745706823372,
'seed': 3709,
},
{
'bn_l2': DEFAULT_L2,
'input_conv_l2': 0.015678701837246456 / TRAIN_SET_SIZE,
'group_1_conv_l2': 0.4258111048922535 / TRAIN_SET_SIZE,
'group_2_conv_l2': 7.349544939683454 / TRAIN_SET_SIZE,
'group_3_conv_l2': 47.917791858938074 / TRAIN_SET_SIZE,
'dense_kernel_l2': 0.09229319107759451 / TRAIN_SET_SIZE,
'dense_bias_l2': 0.012796648147884173 / TRAIN_SET_SIZE,
'label_smoothing': 0.000017578836421073824,
'seed': 1497,
},
]
def _get_domain(hyper):
"""Get hyperparemeter search domain."""
hyperparameters = []
for hps in SELECTED_HPS:
hyperparameters_ = [
hyper.fixed('l2', None, length=1), # disable global l2
hyper.fixed('train_proportion', 1.0, length=1),
hyper.fixed('dataset', 'cifar100', length=1),
hyper.fixed('train_epochs', 250, length=1),
]
for name, value in hps.items():
hyperparameters_.append(hyper.fixed(name, value, length=1))
hyperparameters.append(hyper.product(hyperparameters_))
return hyper.chainit(hyperparameters)
def get_sweep(hyper):
"""Returns hyperparameter sweep."""
return _get_domain(hyper)
| google/uncertainty-baselines | baselines/cifar/hyperdeepensemble_configs/cifar100_retraining_sweep.py | Python | apache-2.0 | 3,405 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper to make getting a sandbox simpler."""
from aquilon.aqdb.model import Sandbox
from aquilon.worker.dbwrappers.user_principal import get_user_principal
def get_sandbox(session, logger, sandbox, query_options=None):
"""Allow an optional author field."""
sbx_split = sandbox.split('/')
first, second = '', ''
if len(sbx_split) <= 1:
dbsandbox = Sandbox.get_unique(session, sandbox, compel=True,
query_options=query_options)
dbauthor = None
return (dbsandbox, dbauthor)
first = '/'.join(sbx_split[:-1])
second = sbx_split[-1]
dbsandbox = Sandbox.get_unique(session, second, compel=True,
query_options=query_options)
dbauthor = get_user_principal(session, first)
return (dbsandbox, dbauthor)
| stdweird/aquilon | lib/python2.6/aquilon/worker/dbwrappers/sandbox.py | Python | apache-2.0 | 1,542 |
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mockito import mock, when, verify, unstub, any as any_value
from mock import patch
from pybuilder.core import (Project,
Logger,
Dependency,
RequirementsFile)
from pybuilder.plugins.python.install_dependencies_plugin import (
install_runtime_dependencies,
install_build_dependencies,
install_dependencies,
install_dependency)
import pybuilder.plugins.python.install_dependencies_plugin
__author__ = "Alexander Metzner"
class InstallDependencyTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("dir_install_logs", "any_directory")
self.logger = mock(Logger)
when(
pybuilder.plugins.python.install_dependencies_plugin).execute_command(any_value(), any_value(),
shell=True).thenReturn(0)
def tearDown(self):
unstub()
def test_should_install_dependency_without_version(self):
dependency = Dependency("spam")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install 'spam'", any_value(), shell=True)
def test_should_install_requirements_file_dependency(self):
dependency = RequirementsFile("requirements.txt")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install '-rrequirements.txt'", any_value(), shell=True)
@patch("pybuilder.plugins.python.install_dependencies_plugin.sys.platform")
def test_should_install_dependency_without_version_on_windows_derivate(self, platform):
platform.return_value = "win32"
dependency = Dependency("spam")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install spam", any_value(), shell=True)
def test_should_install_dependency_insecurely_when_property_is_set(self):
dependency = Dependency("spam")
self.project.set_property("install_dependencies_insecure_installation", ["spam"])
when(pybuilder.plugins.python.install_dependencies_plugin)._pip_disallows_insecure_packages_by_default().thenReturn(True)
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install --allow-unverified spam --allow-external spam 'spam'", any_value(), shell=True)
def test_should_install_dependency_securely_when_property_is_not_set_to_dependency(self):
dependency = Dependency("spam")
self.project.set_property("install_dependencies_insecure_installation", ["some-other-dependency"])
when(pybuilder.plugins.python.install_dependencies_plugin)._pip_disallows_insecure_packages_by_default().thenReturn(True)
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install --allow-unverified some-other-dependency --allow-external some-other-dependency 'spam'",
any_value(), shell=True)
# some-other-dependency might be a dependency of 'spam'
# so we always have to put the insecure dependencies in the command line :-(
def test_should_not_use_insecure_flags_when_pip_version_is_too_low(self):
dependency = Dependency("spam")
self.project.set_property("install_dependencies_insecure_installation", ["spam"])
when(pybuilder.plugins.python.install_dependencies_plugin)._pip_disallows_insecure_packages_by_default().thenReturn(False)
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install 'spam'", any_value(), shell=True)
def test_should_install_dependency_using_custom_index_url(self):
self.project.set_property(
"install_dependencies_index_url", "some_index_url")
dependency = Dependency("spam")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install --index-url some_index_url 'spam'", any_value(), shell=True)
def test_should_not_use_extra_index_url_when_index_url_is_not_set(self):
self.project.set_property(
"install_dependencies_extra_index_url", "some_index_url")
dependency = Dependency("spam")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install 'spam'", any_value(), shell=True)
def test_should_not_use_index_and_extra_index_url_when_index_and_extra_index_url_are_set(self):
self.project.set_property(
"install_dependencies_index_url", "some_index_url")
self.project.set_property(
"install_dependencies_extra_index_url", "some_extra_index_url")
dependency = Dependency("spam")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install --index-url some_index_url --extra-index-url some_extra_index_url 'spam'", any_value(
), shell=True)
def test_should_upgrade_dependencies(self):
self.project.set_property("install_dependencies_upgrade", True)
dependency = Dependency("spam")
install_dependency(self.logger, self.project, dependency)
verify(pybuilder.plugins.python.install_dependencies_plugin).execute_command(
"pip install --upgrade 'spam'", any_value(), shell=True)
def test_should_install_dependency_with_version(self):
dependency = Dependency("spam", "0.1.2")
install_dependency(self.logger, self.project, dependency)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'spam>=0.1.2'",
any_value(), shell=True)
def test_should_install_dependency_with_version_and_operator(self):
dependency = Dependency("spam", "==0.1.2")
install_dependency(self.logger, self.project, dependency)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'spam==0.1.2'",
any_value(), shell=True)
def test_should_install_dependency_with_url(self):
dependency = Dependency("spam", url="some_url")
install_dependency(self.logger, self.project, dependency)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'some_url'",
any_value(), shell=True)
def test_should_install_dependency_with_url_even_if_version_is_given(self):
dependency = Dependency("spam", version="0.1.2", url="some_url")
install_dependency(self.logger, self.project, dependency)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'some_url'",
any_value(), shell=True)
class InstallRuntimeDependenciesTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("dir_install_logs", "any_directory")
self.logger = mock(Logger)
when(
pybuilder.plugins.python.install_dependencies_plugin).execute_command(any_value(), any_value(),
shell=True).thenReturn(0)
def tearDown(self):
unstub()
def test_should_install_multiple_dependencies(self):
self.project.depends_on("spam")
self.project.depends_on("eggs")
self.project.depends_on_requirements("requirements.txt")
install_runtime_dependencies(self.logger, self.project)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'spam'",
any_value(), shell=True)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'eggs'",
any_value(), shell=True)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install '-rrequirements.txt'",
any_value(), shell=True)
class InstallBuildDependenciesTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("dir_install_logs", "any_directory")
self.logger = mock(Logger)
when(
pybuilder.plugins.python.install_dependencies_plugin).execute_command(any_value(), any_value(),
shell=True).thenReturn(0)
def tearDown(self):
unstub()
def test_should_install_multiple_dependencies(self):
self.project.build_depends_on("spam")
self.project.build_depends_on("eggs")
self.project.build_depends_on_requirements("requirements-dev.txt")
install_build_dependencies(self.logger, self.project)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'spam'",
any_value(), shell=True)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'eggs'",
any_value(), shell=True)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install '-rrequirements-dev.txt'",
any_value(), shell=True)
class InstallDependenciesTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("dir_install_logs", "any_directory")
self.logger = mock(Logger)
when(
pybuilder.plugins.python.install_dependencies_plugin).execute_command(any_value(), any_value(),
shell=True).thenReturn(0)
def tearDown(self):
unstub()
def test_should_install_single_dependency_without_version(self):
self.project.depends_on("spam")
self.project.build_depends_on("eggs")
install_dependencies(self.logger, self.project)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'spam'",
any_value(), shell=True)
verify(
pybuilder.plugins.python.install_dependencies_plugin).execute_command("pip install 'eggs'",
any_value(), shell=True)
| Danielweber7624/pybuilder | src/unittest/python/plugins/python/install_dependencies_plugin_tests.py | Python | apache-2.0 | 12,566 |
import requests
import json
from config import DEBUG
def get(url, query_data=None, headers=None, return_json=False):
res = requests.get(url, params=query_data, headers=headers)
if DEBUG:
print res.status_code
if return_json is True:
return res.json()
else:
return res.text
def post(url, data=None, headers=None, return_json=False):
res = requests.post(url, data=json.dumps(data), headers=headers)
if return_json is True:
return res.json()
else:
return res.text
def put(url, data=None, headers=None, return_json=False):
res = requests.put(url, data=json.dumps(data), headers=headers)
if return_json is True:
return res.json()
else:
return res.text
def delete(url, data=None, headers=None, return_json=False):
res = requests.delete(url, data=json.dumps(data), headers=headers)
if return_json is True:
return res.json()
else:
return res.text | jijoy/ptpc | url_runner.py | Python | mit | 968 |
#!/usr/bin/env python
############################################################################
#
# Copyright (c) 2012-2017 PX4 Development Team. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name PX4 nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
############################################################################
#
# Serial firmware uploader for the PX4FMU bootloader
#
# The PX4 firmware file is a JSON-encoded Python object, containing
# metadata fields and a zlib-compressed base64-encoded firmware image.
#
# The uploader uses the following fields from the firmware file:
#
# image
# The firmware that will be uploaded.
# image_size
# The size of the firmware in bytes.
# board_id
# The board for which the firmware is intended.
# board_revision
# Currently only used for informational purposes.
#
# AP_FLAKE8_CLEAN
# for python2.7 compatibility
from __future__ import print_function
import sys
import argparse
import binascii
import serial
import struct
import json
import zlib
import base64
import time
import array
import os
import platform
import re
from sys import platform as _platform
is_WSL = bool("Microsoft" in platform.uname()[2])
# default list of port names to look for autopilots
default_ports = ['/dev/serial/by-id/usb-Ardu*',
'/dev/serial/by-id/usb-3D*',
'/dev/serial/by-id/usb-APM*',
'/dev/serial/by-id/usb-Radio*',
'/dev/serial/by-id/usb-*_3DR_*',
'/dev/serial/by-id/usb-Hex_Technology_Limited*',
'/dev/serial/by-id/usb-Hex_ProfiCNC*',
'/dev/serial/by-id/usb-Holybro*',
'/dev/serial/by-id/usb-mRo*',
'/dev/serial/by-id/usb-modalFC*',
'/dev/serial/by-id/usb-*-BL_*',
'/dev/serial/by-id/usb-*_BL_*',
'/dev/tty.usbmodem*']
if "cygwin" in _platform or is_WSL:
default_ports += ['/dev/ttyS*']
# Detect python version
if sys.version_info[0] < 3:
runningPython3 = False
else:
runningPython3 = True
# dictionary of bootloader {boardID: (firmware boardID, boardname), ...}
# designating firmware builds compatible with multiple boardIDs
compatible_IDs = {33: (9, 'AUAVX2.1')}
# CRC equivalent to crc_crc32() in AP_Math/crc.cpp
crctab = array.array('I', [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3,
0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91,
0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5,
0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f,
0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d,
0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457,
0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb,
0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9,
0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad,
0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683,
0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7,
0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79,
0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f,
0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21,
0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45,
0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db,
0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf,
0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d])
def crc32(bytes, state=0):
'''crc32 exposed for use by chibios.py'''
for byte in bytes:
index = (state ^ byte) & 0xff
state = crctab[index] ^ (state >> 8)
return state
class firmware(object):
'''Loads a firmware file'''
desc = {}
image = bytes()
crcpad = bytearray(b'\xff\xff\xff\xff')
def __init__(self, path):
# read the file
f = open(path, "r")
self.desc = json.load(f)
f.close()
self.image = bytearray(zlib.decompress(base64.b64decode(self.desc['image'])))
if 'extf_image' in self.desc:
self.extf_image = bytearray(zlib.decompress(base64.b64decode(self.desc['extf_image'])))
else:
self.extf_image = None
# pad image to 4-byte length
while ((len(self.image) % 4) != 0):
self.image.append('\xff')
# pad image to 4-byte length
if self.extf_image is not None:
while ((len(self.extf_image) % 4) != 0):
self.extf_image.append('\xff')
def property(self, propname, default=None):
if propname in self.desc:
return self.desc[propname]
return default
def extf_crc(self, size):
state = crc32(self.extf_image[:size], int(0))
return state
def crc(self, padlen):
state = crc32(self.image, int(0))
for i in range(len(self.image), (padlen - 1), 4):
state = crc32(self.crcpad, state)
return state
class uploader(object):
'''Uploads a firmware file to the PX FMU bootloader'''
# protocol bytes
INSYNC = b'\x12'
EOC = b'\x20'
# reply bytes
OK = b'\x10'
FAILED = b'\x11'
INVALID = b'\x13' # rev3+
BAD_SILICON_REV = b'\x14' # rev5+
# command bytes
NOP = b'\x00' # guaranteed to be discarded by the bootloader
GET_SYNC = b'\x21'
GET_DEVICE = b'\x22'
CHIP_ERASE = b'\x23'
CHIP_VERIFY = b'\x24' # rev2 only
PROG_MULTI = b'\x27'
READ_MULTI = b'\x28' # rev2 only
GET_CRC = b'\x29' # rev3+
GET_OTP = b'\x2a' # rev4+ , get a word from OTP area
GET_SN = b'\x2b' # rev4+ , get a word from SN area
GET_CHIP = b'\x2c' # rev5+ , get chip version
SET_BOOT_DELAY = b'\x2d' # rev5+ , set boot delay
GET_CHIP_DES = b'\x2e' # rev5+ , get chip description in ASCII
MAX_DES_LENGTH = 20
REBOOT = b'\x30'
SET_BAUD = b'\x33' # set baud
EXTF_ERASE = b'\x34' # erase sectors from external flash
EXTF_PROG_MULTI = b'\x35' # write bytes at external flash program address and increment
EXTF_READ_MULTI = b'\x36' # read bytes at address and increment
EXTF_GET_CRC = b'\x37' # compute & return a CRC of data in external flash
INFO_BL_REV = b'\x01' # bootloader protocol revision
BL_REV_MIN = 2 # minimum supported bootloader protocol
BL_REV_MAX = 5 # maximum supported bootloader protocol
INFO_BOARD_ID = b'\x02' # board type
INFO_BOARD_REV = b'\x03' # board revision
INFO_FLASH_SIZE = b'\x04' # max firmware size in bytes
INFO_EXTF_SIZE = b'\x06' # available external flash size
PROG_MULTI_MAX = 252 # protocol max is 255, must be multiple of 4
READ_MULTI_MAX = 252 # protocol max is 255
NSH_INIT = bytearray(b'\x0d\x0d\x0d')
NSH_REBOOT_BL = b"reboot -b\n"
NSH_REBOOT = b"reboot\n"
def __init__(self,
portname,
baudrate_bootloader,
baudrate_flightstack,
baudrate_bootloader_flash=None,
target_system=None,
target_component=None,
source_system=None,
source_component=None,
no_extf=False):
self.MAVLINK_REBOOT_ID1 = bytearray(b'\xfe\x21\x72\xff\x00\x4c\x00\x00\x40\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf6\x00\x01\x00\x00\x53\x6b') # NOQA
self.MAVLINK_REBOOT_ID0 = bytearray(b'\xfe\x21\x45\xff\x00\x4c\x00\x00\x40\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf6\x00\x00\x00\x00\xcc\x37') # NOQA
if target_component is None:
target_component = 1
if source_system is None:
source_system = 255
if source_component is None:
source_component = 1
self.no_extf = no_extf
# open the port, keep the default timeout short so we can poll quickly
self.port = serial.Serial(portname, baudrate_bootloader, timeout=2.0)
self.baudrate_bootloader = baudrate_bootloader
if baudrate_bootloader_flash is not None:
self.baudrate_bootloader_flash = baudrate_bootloader_flash
else:
self.baudrate_bootloader_flash = self.baudrate_bootloader
self.baudrate_flightstack = baudrate_flightstack
self.baudrate_flightstack_idx = -1
# generate mavlink reboot message:
if target_system is not None:
from pymavlink import mavutil
m = mavutil.mavlink.MAVLink_command_long_message(
target_system,
target_component,
mavutil.mavlink.MAV_CMD_PREFLIGHT_REBOOT_SHUTDOWN,
1, # confirmation
3, # remain in bootloader
0,
0,
0,
0,
0,
0)
mav = mavutil.mavlink.MAVLink(self,
srcSystem=source_system,
srcComponent=source_component)
self.MAVLINK_REBOOT_ID1 = m.pack(mav)
self.MAVLINK_REBOOT_ID0 = None
def close(self):
if self.port is not None:
self.port.close()
def open(self):
timeout = time.time() + 0.2
# Attempt to open the port while it exists and until timeout occurs
while self.port is not None:
portopen = True
try:
portopen = self.port.is_open
except AttributeError:
portopen = self.port.isOpen()
if not portopen and time.time() < timeout:
try:
self.port.open()
except OSError:
# wait for the port to be ready
time.sleep(0.04)
except serial.SerialException:
# if open fails, try again later
time.sleep(0.04)
else:
break
def __send(self, c):
self.port.write(c)
def __recv(self, count=1):
c = self.port.read(count)
if len(c) < 1:
raise RuntimeError("timeout waiting for data (%u bytes)" % count)
# print("recv " + binascii.hexlify(c))
return c
def __recv_int(self):
raw = self.__recv(4)
val = struct.unpack("<I", raw)
return val[0]
def __recv_uint8(self):
raw = self.__recv(1)
val = struct.unpack("<B", raw)
return val[0]
def __getSync(self):
self.port.flush()
c = bytes(self.__recv())
if c != self.INSYNC:
raise RuntimeError("unexpected %s instead of INSYNC" % c)
c = self.__recv()
if c == self.INVALID:
raise RuntimeError("bootloader reports INVALID OPERATION")
if c == self.FAILED:
raise RuntimeError("bootloader reports OPERATION FAILED")
if c != self.OK:
raise RuntimeError("unexpected response 0x%x instead of OK" % ord(c))
# attempt to get back into sync with the bootloader
def __sync(self):
# send a stream of ignored bytes longer than the longest possible conversation
# that we might still have in progress
# self.__send(uploader.NOP * (uploader.PROG_MULTI_MAX + 2))
self.port.flushInput()
self.__send(uploader.GET_SYNC +
uploader.EOC)
self.__getSync()
def __trySync(self):
try:
self.port.flush()
if (self.__recv() != self.INSYNC):
# print("unexpected 0x%x instead of INSYNC" % ord(c))
return False
c = self.__recv()
if (c == self.BAD_SILICON_REV):
raise NotImplementedError()
if (c != self.OK):
# print("unexpected 0x%x instead of OK" % ord(c))
return False
return True
except NotImplementedError:
raise RuntimeError("Programing not supported for this version of silicon!\n"
"See https://pixhawk.org/help/errata")
except RuntimeError:
# timeout, no response yet
return False
# send the GET_DEVICE command and wait for an info parameter
def __getInfo(self, param):
self.__send(uploader.GET_DEVICE + param + uploader.EOC)
value = self.__recv_int()
self.__getSync()
return value
# send the GET_OTP command and wait for an info parameter
def __getOTP(self, param):
t = struct.pack("I", param) # int param as 32bit ( 4 byte ) char array.
self.__send(uploader.GET_OTP + t + uploader.EOC)
value = self.__recv(4)
self.__getSync()
return value
# send the GET_SN command and wait for an info parameter
def __getSN(self, param):
t = struct.pack("I", param) # int param as 32bit ( 4 byte ) char array.
self.__send(uploader.GET_SN + t + uploader.EOC)
value = self.__recv(4)
self.__getSync()
return value
# send the GET_CHIP command
def __getCHIP(self):
self.__send(uploader.GET_CHIP + uploader.EOC)
value = self.__recv_int()
self.__getSync()
return value
# send the GET_CHIP command
def __getCHIPDes(self):
self.__send(uploader.GET_CHIP_DES + uploader.EOC)
length = self.__recv_int()
value = self.__recv(length)
self.__getSync()
if runningPython3:
value = value.decode('ascii')
peices = value.split(",")
return peices
def __drawProgressBar(self, label, progress, maxVal):
if maxVal < progress:
progress = maxVal
percent = (float(progress) / float(maxVal)) * 100.0
sys.stdout.write("\r%s: [%-20s] %.1f%%" % (label, '='*int(percent/5.0), percent))
sys.stdout.flush()
# send the CHIP_ERASE command and wait for the bootloader to become ready
def __erase(self, label):
print("\n", end='')
self.__send(uploader.CHIP_ERASE +
uploader.EOC)
# erase is very slow, give it 20s
deadline = time.time() + 20.0
while time.time() < deadline:
# Draw progress bar (erase usually takes about 9 seconds to complete)
estimatedTimeRemaining = deadline-time.time()
if estimatedTimeRemaining >= 9.0:
self.__drawProgressBar(label, 20.0-estimatedTimeRemaining, 9.0)
else:
self.__drawProgressBar(label, 10.0, 10.0)
sys.stdout.write(" (timeout: %d seconds) " % int(deadline-time.time()))
sys.stdout.flush()
if self.__trySync():
self.__drawProgressBar(label, 10.0, 10.0)
return
raise RuntimeError("timed out waiting for erase")
# send a PROG_MULTI command to write a collection of bytes
def __program_multi(self, data):
if runningPython3:
length = len(data).to_bytes(1, byteorder='big')
else:
length = chr(len(data))
self.__send(uploader.PROG_MULTI)
self.__send(length)
self.__send(data)
self.__send(uploader.EOC)
self.__getSync()
# send a PROG_EXTF_MULTI command to write a collection of bytes to external flash
def __program_multi_extf(self, data):
if runningPython3:
length = len(data).to_bytes(1, byteorder='big')
else:
length = chr(len(data))
self.__send(uploader.EXTF_PROG_MULTI)
self.__send(length)
self.__send(data)
self.__send(uploader.EOC)
self.__getSync()
# verify multiple bytes in flash
def __verify_multi(self, data):
if runningPython3:
length = len(data).to_bytes(1, byteorder='big')
else:
length = chr(len(data))
self.__send(uploader.READ_MULTI)
self.__send(length)
self.__send(uploader.EOC)
self.port.flush()
programmed = self.__recv(len(data))
if programmed != data:
print("got " + binascii.hexlify(programmed))
print("expect " + binascii.hexlify(data))
return False
self.__getSync()
return True
# read multiple bytes from flash
def __read_multi(self, length):
if runningPython3:
clength = length.to_bytes(1, byteorder='big')
else:
clength = chr(length)
self.__send(uploader.READ_MULTI)
self.__send(clength)
self.__send(uploader.EOC)
self.port.flush()
ret = self.__recv(length)
self.__getSync()
return ret
# send the reboot command
def __reboot(self):
self.__send(uploader.REBOOT +
uploader.EOC)
self.port.flush()
# v3+ can report failure if the first word flash fails
if self.bl_rev >= 3:
self.__getSync()
# split a sequence into a list of size-constrained pieces
def __split_len(self, seq, length):
return [seq[i:i+length] for i in range(0, len(seq), length)]
# upload code
def __program(self, label, fw):
print("\n", end='')
code = fw.image
groups = self.__split_len(code, uploader.PROG_MULTI_MAX)
uploadProgress = 0
for bytes in groups:
self.__program_multi(bytes)
# Print upload progress (throttled, so it does not delay upload progress)
uploadProgress += 1
if uploadProgress % 256 == 0:
self.__drawProgressBar(label, uploadProgress, len(groups))
self.__drawProgressBar(label, 100, 100)
# download code
def __download(self, label, fw):
print("\n", end='')
f = open(fw, 'wb')
downloadProgress = 0
readsize = uploader.READ_MULTI_MAX
total = 0
while True:
n = min(self.fw_maxsize - total, readsize)
bb = self.__read_multi(n)
f.write(bb)
total += len(bb)
# Print download progress (throttled, so it does not delay download progress)
downloadProgress += 1
if downloadProgress % 256 == 0:
self.__drawProgressBar(label, total, self.fw_maxsize)
if len(bb) < readsize:
break
f.close()
self.__drawProgressBar(label, total, self.fw_maxsize)
print("\nReceived %u bytes to %s" % (total, fw))
# verify code
def __verify_v2(self, label, fw):
print("\n", end='')
self.__send(uploader.CHIP_VERIFY +
uploader.EOC)
self.__getSync()
code = fw.image
groups = self.__split_len(code, uploader.READ_MULTI_MAX)
verifyProgress = 0
for bytes in groups:
verifyProgress += 1
if verifyProgress % 256 == 0:
self.__drawProgressBar(label, verifyProgress, len(groups))
if (not self.__verify_multi(bytes)):
raise RuntimeError("Verification failed")
self.__drawProgressBar(label, 100, 100)
def __verify_v3(self, label, fw):
print("\n", end='')
self.__drawProgressBar(label, 1, 100)
expect_crc = fw.crc(self.fw_maxsize)
self.__send(uploader.GET_CRC +
uploader.EOC)
report_crc = self.__recv_int()
self.__getSync()
if report_crc != expect_crc:
print("Expected 0x%x" % expect_crc)
print("Got 0x%x" % report_crc)
raise RuntimeError("Program CRC failed")
self.__drawProgressBar(label, 100, 100)
def __set_boot_delay(self, boot_delay):
self.__send(uploader.SET_BOOT_DELAY +
struct.pack("b", boot_delay) +
uploader.EOC)
self.__getSync()
def __setbaud(self, baud):
self.__send(uploader.SET_BAUD +
struct.pack("I", baud) +
uploader.EOC)
self.__getSync()
def erase_extflash(self, label, size):
if runningPython3:
size_bytes = size.to_bytes(4, byteorder='little')
else:
size_bytes = chr(size)
self.__send(uploader.EXTF_ERASE + size_bytes + uploader.EOC)
self.__getSync()
last_pct = 0
while(True):
if last_pct < 90:
pct = self.__recv_uint8()
if last_pct != pct:
self.__drawProgressBar(label, pct, 100)
last_pct = pct
elif self.__trySync():
self.__drawProgressBar(label, 10.0, 10.0)
return
def __program_extf(self, label, fw):
print("\n", end='')
code = fw.extf_image
groups = self.__split_len(code, uploader.PROG_MULTI_MAX)
uploadProgress = 0
for bytes in groups:
self.__program_multi_extf(bytes)
# Print upload progress (throttled, so it does not delay upload progress)
uploadProgress += 1
if uploadProgress % 32 == 0:
self.__drawProgressBar(label, uploadProgress, len(groups))
self.__drawProgressBar(label, 100, 100)
def __verify_extf(self, label, fw, size):
if runningPython3:
size_bytes = size.to_bytes(4, byteorder='little')
else:
size_bytes = chr(size)
print("\n", end='')
self.__drawProgressBar(label, 1, 100)
expect_crc = fw.extf_crc(size)
self.__send(uploader.EXTF_GET_CRC +
size_bytes + uploader.EOC)
report_crc = self.__recv_int()
self.__getSync()
if report_crc != expect_crc:
print("\nExpected 0x%x" % expect_crc)
print("Got 0x%x" % report_crc)
raise RuntimeError("Program CRC failed")
self.__drawProgressBar(label, 100, 100)
# get basic data about the board
def identify(self):
# make sure we are in sync before starting
self.__sync()
# get the bootloader protocol ID first
self.bl_rev = self.__getInfo(uploader.INFO_BL_REV)
if (self.bl_rev < uploader.BL_REV_MIN) or (self.bl_rev > uploader.BL_REV_MAX):
print("Unsupported bootloader protocol %d" % self.bl_rev)
raise RuntimeError("Bootloader protocol mismatch")
self.board_type = self.__getInfo(uploader.INFO_BOARD_ID)
self.board_rev = self.__getInfo(uploader.INFO_BOARD_REV)
self.fw_maxsize = self.__getInfo(uploader.INFO_FLASH_SIZE)
if self.no_extf:
self.extf_maxsize = 0
else:
try:
self.extf_maxsize = self.__getInfo(uploader.INFO_EXTF_SIZE)
except Exception:
print("Could not get external flash size, assuming 0")
self.extf_maxsize = 0
def dump_board_info(self):
# OTP added in v4:
print("Bootloader Protocol: %u" % self.bl_rev)
if self.bl_rev > 3:
otp = b''
for byte in range(0, 32*6, 4):
x = self.__getOTP(byte)
otp = otp + x
# print(binascii.hexlify(x).decode('Latin-1') + ' ', end='')
# see src/modules/systemlib/otp.h in px4 code:
otp_id = otp[0:4]
otp_idtype = otp[4:5]
otp_vid = otp[8:4:-1]
otp_pid = otp[12:8:-1]
otp_coa = otp[32:160]
# show user:
try:
print("OTP:")
print(" type: " + otp_id.decode('Latin-1'))
print(" idtype: " + binascii.b2a_qp(otp_idtype).decode('Latin-1'))
print(" vid: " + binascii.hexlify(otp_vid).decode('Latin-1'))
print(" pid: " + binascii.hexlify(otp_pid).decode('Latin-1'))
print(" coa: " + binascii.b2a_base64(otp_coa).decode('Latin-1'), end='')
print(" sn: ", end='')
for byte in range(0, 12, 4):
x = self.__getSN(byte)
x = x[::-1] # reverse the bytes
print(binascii.hexlify(x).decode('Latin-1'), end='') # show user
print('')
except Exception:
# ignore bad character encodings
pass
if self.bl_rev >= 5:
des = self.__getCHIPDes()
if (len(des) == 2):
print("ChipDes:")
print(" family: %s" % des[0])
print(" revision: %s" % des[1])
print("Chip:")
if self.bl_rev > 4:
chip = self.__getCHIP()
mcu_id = chip & 0xfff
revs = {}
F4_IDS = {
0x413: "STM32F40x_41x",
0x419: "STM32F42x_43x",
0x421: "STM32F42x_446xx",
}
F7_IDS = {
0x449: "STM32F74x_75x",
0x451: "STM32F76x_77x",
}
H7_IDS = {
0x450: "STM32H74x_75x",
}
family = mcu_id & 0xfff
if family in F4_IDS:
mcu = F4_IDS[family]
MCU_REV_STM32F4_REV_A = 0x1000
MCU_REV_STM32F4_REV_Z = 0x1001
MCU_REV_STM32F4_REV_Y = 0x1003
MCU_REV_STM32F4_REV_1 = 0x1007
MCU_REV_STM32F4_REV_3 = 0x2001
revs = {
MCU_REV_STM32F4_REV_A: ("A", True),
MCU_REV_STM32F4_REV_Z: ("Z", True),
MCU_REV_STM32F4_REV_Y: ("Y", True),
MCU_REV_STM32F4_REV_1: ("1", True),
MCU_REV_STM32F4_REV_3: ("3", False),
}
rev = (chip & 0xFFFF0000) >> 16
if rev in revs:
(label, flawed) = revs[rev]
if flawed and family == 0x419:
print(" %x %s rev%s (flawed; 1M limit, see STM32F42XX Errata sheet sec. 2.1.10)" %
(chip, mcu, label,))
elif family == 0x419:
print(" %x %s rev%s (no 1M flaw)" % (chip, mcu, label,))
else:
print(" %x %s rev%s" % (chip, mcu, label,))
elif family in F7_IDS:
print(" %s %08x" % (F7_IDS[family], chip))
elif family in H7_IDS:
print(" %s %08x" % (H7_IDS[family], chip))
else:
print(" [unavailable; bootloader too old]")
print("Info:")
print(" flash size: %u" % self.fw_maxsize)
print(" ext flash size: %u" % self.extf_maxsize)
name = self.board_name_for_board_id(self.board_type)
if name is not None:
print(" board_type: %u (%s)" % (self.board_type, name))
else:
print(" board_type: %u" % self.board_type)
print(" board_rev: %u" % self.board_rev)
print("Identification complete")
def board_name_for_board_id(self, board_id):
'''return name for board_id, None if it can't be found'''
shared_ids = {
9: "fmuv3",
50: "fmuv5",
}
if board_id in shared_ids:
return shared_ids[board_id]
try:
ret = []
hwdef_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"..", "..", "libraries", "AP_HAL_ChibiOS", "hwdef")
# uploader.py is swiped into other places, so if the dir
# doesn't exist then fail silently
if os.path.exists(hwdef_dir):
dirs = [x if (x not in ["scripts", "common", "STM32CubeConf"] and os.path.isdir(os.path.join(hwdef_dir, x))) else None for x in os.listdir(hwdef_dir)] # NOQA
for adir in dirs:
if adir is None:
continue
filepath = os.path.join(hwdef_dir, adir, "hwdef.dat")
if not os.path.exists(filepath):
continue
fh = open(filepath)
if fh is None:
continue
text = fh.readlines()
for line in text:
m = re.match(r"^\s*APJ_BOARD_ID\s+(\d+)\s*$", line)
if m is None:
continue
if int(m.group(1)) == board_id:
ret.append(adir)
if len(ret) == 0:
return None
return " or ".join(ret)
except Exception as e:
print("Failed to get name: %s" % str(e))
return None
# upload the firmware
def upload(self, fw, force=False, boot_delay=None):
# Make sure we are doing the right thing
if self.board_type != fw.property('board_id'):
# ID mismatch: check compatibility
incomp = True
if self.board_type in compatible_IDs:
comp_fw_id = compatible_IDs[self.board_type][0]
board_name = compatible_IDs[self.board_type][1]
if comp_fw_id == fw.property('board_id'):
msg = "Target %s (board_id: %d) is compatible with firmware for board_id=%u)" % (
board_name, self.board_type, fw.property('board_id'))
print("INFO: %s" % msg)
incomp = False
if incomp:
msg = "Firmware not suitable for this board (board_type=%u (%s) board_id=%u (%s))" % (
self.board_type,
self.board_name_for_board_id(self.board_type),
fw.property('board_id'),
self.board_name_for_board_id(fw.property('board_id')))
print("WARNING: %s" % msg)
if force:
print("FORCED WRITE, FLASHING ANYWAY!")
else:
raise IOError(msg)
self.dump_board_info()
if self.fw_maxsize < fw.property('image_size') or self.extf_maxsize < fw.property('extf_image_size', 0):
raise RuntimeError("Firmware image is too large for this board")
if self.baudrate_bootloader_flash != self.baudrate_bootloader:
print("Setting baudrate to %u" % self.baudrate_bootloader_flash)
self.__setbaud(self.baudrate_bootloader_flash)
self.port.baudrate = self.baudrate_bootloader_flash
self.__sync()
if (fw.property('extf_image_size', 0) > 0):
self.erase_extflash("Erase ExtF ", fw.property('extf_image_size', 0))
self.__program_extf("Program ExtF", fw)
self.__verify_extf("Verify ExtF ", fw, fw.property('extf_image_size', 0))
if (fw.property('image_size') > 0):
self.__erase("Erase ")
self.__program("Program", fw)
if self.bl_rev == 2:
self.__verify_v2("Verify ", fw)
else:
self.__verify_v3("Verify ", fw)
if boot_delay is not None:
self.__set_boot_delay(boot_delay)
print("\nRebooting.\n")
self.__reboot()
self.port.close()
def __next_baud_flightstack(self):
self.baudrate_flightstack_idx = self.baudrate_flightstack_idx + 1
if self.baudrate_flightstack_idx >= len(self.baudrate_flightstack):
return False
try:
self.port.baudrate = self.baudrate_flightstack[self.baudrate_flightstack_idx]
except Exception:
return False
return True
def send_reboot(self):
if (not self.__next_baud_flightstack()):
return False
print("Attempting reboot on %s with baudrate=%d..." % (self.port.port, self.port.baudrate), file=sys.stderr)
print("If the board does not respond, unplug and re-plug the USB connector.", file=sys.stderr)
try:
# try MAVLINK command first
self.port.flush()
if self.MAVLINK_REBOOT_ID1 is not None:
self.__send(self.MAVLINK_REBOOT_ID1)
if self.MAVLINK_REBOOT_ID0 is not None:
self.__send(self.MAVLINK_REBOOT_ID0)
# then try reboot via NSH
self.__send(uploader.NSH_INIT)
self.__send(uploader.NSH_REBOOT_BL)
self.__send(uploader.NSH_INIT)
self.__send(uploader.NSH_REBOOT)
self.port.flush()
self.port.baudrate = self.baudrate_bootloader
except Exception:
try:
self.port.flush()
self.port.baudrate = self.baudrate_bootloader
except Exception:
pass
return True
# upload the firmware
def download(self, fw):
if self.baudrate_bootloader_flash != self.baudrate_bootloader:
print("Setting baudrate to %u" % self.baudrate_bootloader_flash)
self.__setbaud(self.baudrate_bootloader_flash)
self.port.baudrate = self.baudrate_bootloader_flash
self.__sync()
self.__download("Download", fw)
self.port.close()
def ports_to_try(args):
portlist = []
if args.port is None:
patterns = default_ports
else:
patterns = args.port.split(",")
# use glob to support wildcard ports. This allows the use of
# /dev/serial/by-id/usb-ArduPilot on Linux, which prevents the
# upload from causing modem hangups etc
if "linux" in _platform or "darwin" in _platform or "cygwin" in _platform:
import glob
for pattern in patterns:
portlist += glob.glob(pattern)
else:
portlist = patterns
# filter ports based on platform:
if "cygwin" in _platform:
# Cygwin, don't open MAC OS and Win ports, we are more like
# Linux. Cygwin needs to be before Windows test
pass
elif "darwin" in _platform:
# OS X, don't open Windows and Linux ports
portlist = [port for port in portlist if "COM" not in port and "ACM" not in port]
elif "win" in _platform:
# Windows, don't open POSIX ports
portlist = [port for port in portlist if "/" not in port]
return portlist
def modemmanager_check():
if os.path.exists("/usr/sbin/ModemManager"):
print("""
==========================================================================================================
WARNING: You should uninstall ModemManager as it conflicts with any non-modem serial device (like Pixhawk)
==========================================================================================================
""")
def find_bootloader(up, port):
while (True):
up.open()
# port is open, try talking to it
try:
# identify the bootloader
up.identify()
print("Found board %x,%x bootloader rev %x on %s" % (up.board_type, up.board_rev, up.bl_rev, port))
return True
except Exception:
pass
reboot_sent = up.send_reboot()
# wait for the reboot, without we might run into Serial I/O Error 5
time.sleep(0.25)
# always close the port
up.close()
# wait for the close, without we might run into Serial I/O Error 6
time.sleep(0.3)
if not reboot_sent:
return False
def main():
# Parse commandline arguments
parser = argparse.ArgumentParser(description="Firmware uploader for the PX autopilot system.")
parser.add_argument(
'--port',
action="store",
help="Comma-separated list of serial port(s) to which the FMU may be attached",
default=None
)
parser.add_argument(
'--baud-bootloader',
action="store",
type=int,
default=115200,
help="Baud rate of the serial port (default is 115200) when communicating with bootloader, only required for true serial ports." # NOQA
)
parser.add_argument(
'--baud-bootloader-flash',
action="store",
type=int,
default=None,
help="Attempt to negotiate this baudrate with bootloader for flashing."
)
parser.add_argument(
'--baud-flightstack',
action="store",
default="57600",
help="Comma-separated list of baud rate of the serial port (default is 57600) when communicating with flight stack (Mavlink or NSH), only required for true serial ports." # NOQA
)
parser.add_argument('--force', action='store_true', default=False, help='Override board type check and continue loading')
parser.add_argument('--boot-delay', type=int, default=None, help='minimum boot delay to store in flash')
parser.add_argument('--target-system', type=int, action="store", help="System ID to update")
parser.add_argument('--target-component', type=int, action="store", help="Component ID to update")
parser.add_argument(
'--source-system',
type=int,
action="store",
help="Source system to send reboot mavlink packets from",
default=255
)
parser.add_argument(
'--source-component',
type=int,
action="store",
help="Source component to send reboot mavlink packets from",
default=0
)
parser.add_argument('--download', action='store_true', default=False, help='download firmware from board')
parser.add_argument('--identify', action="store_true", help="Do not flash firmware; simply dump information about board")
parser.add_argument('--no-extf', action="store_true", help="Do not attempt external flash operations")
parser.add_argument('--erase-extflash', type=lambda x: int(x, 0), default=None,
help="Erase sectors containing specified amount of bytes from ext flash")
parser.add_argument('firmware', nargs="?", action="store", default=None, help="Firmware file to be uploaded")
args = parser.parse_args()
# warn people about ModemManager which interferes badly with Pixhawk
modemmanager_check()
if args.firmware is None and not args.identify and not args.erase_extflash:
parser.error("Firmware filename required for upload or download")
sys.exit(1)
# Load the firmware file
if not args.download and not args.identify and not args.erase_extflash:
fw = firmware(args.firmware)
print("Loaded firmware for %x,%x, size: %d bytes, waiting for the bootloader..." %
(fw.property('board_id'), fw.property('board_revision'), fw.property('image_size')))
print("If the board does not respond within 1-2 seconds, unplug and re-plug the USB connector.")
baud_flightstack = [int(x) for x in args.baud_flightstack.split(',')]
# Spin waiting for a device to show up
try:
while True:
for port in ports_to_try(args):
# print("Trying %s" % port)
# create an uploader attached to the port
try:
up = uploader(port,
args.baud_bootloader,
baud_flightstack,
args.baud_bootloader_flash,
args.target_system,
args.target_component,
args.source_system,
args.source_component,
args.no_extf)
except Exception as e:
if not is_WSL:
# open failed, WSL must cycle through all ttyS* ports quickly but rate limit everything else
print("Exception creating uploader: %s" % str(e))
time.sleep(0.05)
# and loop to the next port
continue
if not find_bootloader(up, port):
# Go to the next port
continue
try:
# ok, we have a bootloader, try flashing it
if args.identify:
up.dump_board_info()
elif args.download:
up.download(args.firmware)
elif args.erase_extflash:
up.erase_extflash('Erase ExtF', args.erase_extflash)
print("\nExtF Erase Finished")
else:
up.upload(fw, force=args.force, boot_delay=args.boot_delay)
except RuntimeError as ex:
# print the error
print("\nERROR: %s" % ex.args)
except IOError:
up.close()
continue
finally:
# always close the port
up.close()
# we could loop here if we wanted to wait for more boards...
sys.exit(0)
# Delay retries to < 20 Hz to prevent spin-lock from hogging the CPU
time.sleep(0.05)
# CTRL+C aborts the upload/spin-lock by interrupt mechanics
except KeyboardInterrupt:
print("\n Upload aborted by user.")
sys.exit(0)
if __name__ == '__main__':
main()
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| diydrones/ardupilot | Tools/scripts/uploader.py | Python | gpl-3.0 | 45,137 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-09-02 14:39
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reference', '0004_educationinstitution'),
]
operations = [
migrations.CreateModel(
name='AssimilationCriteria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('criteria', models.CharField(max_length=255, unique=True)),
('order', models.IntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='EducationType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('type', models.CharField(choices=[('TRANSITION', 'Transition'), ('QUALIFICATION', 'Qualification'), ('ANOTHER', 'Autre')], max_length=20)),
('name', models.CharField(max_length=100)),
('adhoc', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='ExternalOffer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('changed', models.DateTimeField(null=True)),
('name', models.CharField(max_length=150, unique=True)),
('adhoc', models.BooleanField(default=True)),
('national', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='GradeType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('name', models.CharField(max_length=255)),
('coverage', models.CharField(choices=[('HIGH_EDUC_NOT_UNIVERSITY', 'HIGH_EDUC_NOT_UNIVERSITY'), ('UNIVERSITY', 'UNIVERSITY'), ('UNKNOWN', 'UNKNOWN')], default='UNKNOWN', max_length=30)),
('adhoc', models.BooleanField(default=True)),
('institutional', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='InstitutionalGradeType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('name', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='domain',
name='adhoc',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='domain',
name='national',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='domain',
name='reference',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AddField(
model_name='domain',
name='type',
field=models.CharField(choices=[('HIGH_EDUC_NOT_UNIVERSITY', 'HIGH_EDUC_NOT_UNIVERSITY'), ('UNIVERSITY', 'UNIVERSITY'), ('UNKNOWN', 'UNKNOWN')], default='UNKNOWN', max_length=50),
),
migrations.AddField(
model_name='language',
name='external_id',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='domain',
name='decree',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='reference.Decree'),
),
migrations.AlterField(
model_name='educationinstitution',
name='adhoc',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='educationinstitution',
name='institution_type',
field=models.CharField(choices=[('SECONDARY', 'SECONDARY'), ('UNIVERSITY', 'UNIVERSITY'), ('HIGHER_NON_UNIVERSITY', 'HIGHER_NON_UNIVERSITY')], max_length=25),
),
migrations.AlterField(
model_name='educationinstitution',
name='national_community',
field=models.CharField(blank=True, choices=[('FRENCH', 'FRENCH'), ('GERMAN', 'GERMAN'), ('DUTCH', 'DUTCH')], max_length=20, null=True),
),
migrations.AddField(
model_name='gradetype',
name='institutional_grade_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='reference.InstitutionalGradeType'),
),
migrations.AddField(
model_name='externaloffer',
name='domain',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reference.Domain'),
),
migrations.AddField(
model_name='externaloffer',
name='grade_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='reference.GradeType'),
),
migrations.AddField(
model_name='externaloffer',
name='offer_year',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base.OfferYear'),
),
]
| uclouvain/osis | reference/migrations/0005_auto_20160902_1639.py | Python | agpl-3.0 | 5,965 |
from django.forms.util import flatatt
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django import forms
from django.forms import widgets
class GridWidget(widgets.Widget):
pass
class DateIntervalWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
super(DateIntervalWidget, self).__init__([forms.DateField.widget(), forms.DateField.widget()], attrs)
def decompress(self, value):
return [None, None]
| mrmuxl/keops | keops/forms/widgets.py | Python | agpl-3.0 | 484 |
import logging
from datetime import date
from app import app
from flask import render_template
log = logging.getLogger(__name__)
year = date.today().year
@app.errorhandler(404)
def page_not_found(e):
log.warning("Request returned error 404")
return render_template('404.html', posts=[], year=year), 404
@app.errorhandler(500)
def internal_server_error(e):
log.error("Request returned error 500")
return render_template('500.html', posts=[], year=year), 500
| siketh/TRBlog | app/views/errors.py | Python | mit | 479 |
from django.core.exceptions import ValidationError
import mkt.site.tests
from mkt.ratings.validators import validate_rating
class TestValidateRating(mkt.site.tests.TestCase):
def test_valid(self):
for value in [1, 2, 3, 4, 5]:
validate_rating(value)
def test_invalid(self):
for value in [-4, 0, 3.5, 6]:
with self.assertRaises(ValidationError):
validate_rating(value)
| mudithkr/zamboni | mkt/ratings/tests/test_validators.py | Python | bsd-3-clause | 437 |
__author__ = 'civa'
import json
from shared import security
def pass_test():
str = '{"username":"test","password":"testpass"}'
data = json.loads(str)
input_pass = 'disaster'
hashed_pass = '+IOsYZLzXA9n5gbqURCGh7+2wObuZ9GuQgIyv35HtPPGLx7a'
result = security.check_hash(input_pass, hashed_pass, False)
if result:
print 'Valid'
else:
print 'Invalid'
#pass_hash = security.make_hash('disaster')
#print pass_hash
if __name__ == "__main__":
#get_vizier()
pass_test()
| Civa/Zenith | src/Backend/Distributed/hubs/auth/tests/utils_tests.py | Python | gpl-3.0 | 528 |
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
import hashlib
from default import Test
from pybossa.cache import get_key_to_hash, get_hash_key
class TestCache(Test):
def test_00_get_key_to_hash_with_args(self):
"""Test CACHE get_key_to_hash with args works."""
expected = ':1:a'
key_to_hash = get_key_to_hash(1, 'a')
err_msg = "Different key_to_hash %s != %s" % (key_to_hash, expected)
assert key_to_hash == expected, err_msg
def test_01_get_key_to_hash_with_kwargs(self):
"""Test CACHE get_key_to_hash with kwargs works."""
expected = ':1:a'
key_to_hash = get_key_to_hash(page=1, vowel='a')
err_msg = "Different key_to_hash %s != %s" % (key_to_hash, expected)
assert key_to_hash == expected, err_msg
def test_02_get_key_to_hash_with_args_and_kwargs(self):
"""Test CACHE get_key_to_hash with args and kwargs works."""
expected = ':1:a'
key_to_hash = get_key_to_hash(1, vowel='a')
err_msg = "Different key_to_hash %s != %s" % (key_to_hash, expected)
assert key_to_hash == expected, err_msg
def test_03_get_hash_key(self):
"""Test CACHE get_hash_key works."""
prefix = 'prefix'
key_to_hash = get_key_to_hash(1, vowel=u'ñ')
tmp = key_to_hash.encode('utf-8')
expected = prefix + ":" + hashlib.md5(tmp).hexdigest()
key = get_hash_key(prefix, key_to_hash)
err_msg = "The expected key is different %s != %s" % (expected, key)
assert expected == key, err_msg
| proyectos-analizo-info/pybossa-analizo-info | test/test_cache/__init__.py | Python | agpl-3.0 | 2,254 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module provides widgets to use aptdaemon in a GTK application.
"""
# Copyright (C) 2008-2009 Sebastian Heinlein <[email protected]>
#
# Licensed under the GNU General Public License Version 2
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__author__ = "Sebastian Heinlein <[email protected]>"
__all__ = ("AptConfigFileConflictDialog", "AptCancelButton",
"AptConfirmDialog",
"AptProgressDialog", "AptTerminalExpander", "AptStatusIcon",
"AptRoleIcon", "AptStatusAnimation", "AptRoleLabel",
"AptStatusLabel", "AptMediumRequiredDialog", "AptMessageDialog",
"AptErrorDialog", "AptProgressBar", "DiffView",
"AptTerminal"
)
import difflib
import gettext
import os
import pty
import re
import apt_pkg
from gi.repository import GObject
from gi.repository import GLib
from gi.repository import Gdk
from gi.repository import Gtk
from gi.repository import Pango
from gi.repository import Vte
from .enums import *
from defer import inline_callbacks
from defer.utils import deferable
_ = lambda msg: gettext.dgettext("aptdaemon", msg)
(COLUMN_ID,
COLUMN_PACKAGE) = list(range(2))
class AptStatusIcon(Gtk.Image):
"""
Provides a Gtk.Image which shows an icon representing the status of a
aptdaemon transaction
"""
def __init__(self, transaction=None, size=Gtk.IconSize.DIALOG):
Gtk.Image.__init__(self)
# note: icon_size is a property which you can't set with GTK 2, so use
# a different name
self._icon_size = size
self.icon_name = None
self._signals = []
self.set_alignment(0, 0)
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect to the given transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(transaction.connect("status-changed",
self._on_status_changed))
def set_icon_size(self, size):
"""Set the icon size to gtk stock icon size value"""
self._icon_size = size
def _on_status_changed(self, transaction, status):
"""Set the status icon according to the changed status"""
icon_name = get_status_icon_name_from_enum(status)
if icon_name is None:
icon_name = Gtk.STOCK_MISSING_IMAGE
if icon_name != self.icon_name:
self.set_from_icon_name(icon_name, self._icon_size)
self.icon_name = icon_name
class AptRoleIcon(AptStatusIcon):
"""
Provides a Gtk.Image which shows an icon representing the role of an
aptdaemon transaction
"""
def set_transaction(self, transaction):
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(transaction.connect("role-changed",
self._on_role_changed))
self._on_role_changed(transaction, transaction.role)
def _on_role_changed(self, transaction, role_enum):
"""Show an icon representing the role"""
icon_name = get_role_icon_name_from_enum(role_enum)
if icon_name is None:
icon_name = Gtk.STOCK_MISSING_IMAGE
if icon_name != self.icon_name:
self.set_from_icon_name(icon_name, self._icon_size)
self.icon_name = icon_name
class AptStatusAnimation(AptStatusIcon):
"""
Provides a Gtk.Image which shows an animation representing the
transaction status
"""
def __init__(self, transaction=None, size=Gtk.IconSize.DIALOG):
AptStatusIcon.__init__(self, transaction, size)
self.animation = []
self.ticker = 0
self.frame_counter = 0
self.iter = 0
name = get_status_animation_name_from_enum(STATUS_WAITING)
fallback = get_status_icon_name_from_enum(STATUS_WAITING)
self.set_animation(name, fallback)
def set_animation(self, name, fallback=None, size=None):
"""Show and start the animation of the given name and size"""
if name == self.icon_name:
return
if size is not None:
self._icon_size = size
self.stop_animation()
animation = []
(width, height) = Gtk.icon_size_lookup(self._icon_size)
theme = Gtk.IconTheme.get_default()
if name is not None and theme.has_icon(name):
pixbuf = theme.load_icon(name, width, 0)
rows = pixbuf.get_height() / height
cols = pixbuf.get_width() / width
for r in range(rows):
for c in range(cols):
animation.append(pixbuf.subpixbuf(c * width, r * height,
width, height))
if len(animation) > 0:
self.animation = animation
self.iter = 0
self.set_from_pixbuf(self.animation[0])
self.start_animation()
else:
self.set_from_pixbuf(pixbuf)
self.icon_name = name
elif fallback is not None and theme.has_icon(fallback):
self.set_from_icon_name(fallback, self._icon_size)
self.icon_name = fallback
else:
self.set_from_icon_name(Gtk.STOCK_MISSING_IMAGE)
def start_animation(self):
"""Start the animation"""
if self.ticker == 0:
self.ticker = GLib.timeout_add(200, self._advance)
def stop_animation(self):
"""Stop the animation"""
if self.ticker != 0:
GLib.source_remove(self.ticker)
self.ticker = 0
def _advance(self):
"""
Show the next frame of the animation and stop the animation if the
widget is no longer visible
"""
if self.get_property("visible") is False:
self.ticker = 0
return False
self.iter = self.iter + 1
if self.iter >= len(self.animation):
self.iter = 0
self.set_from_pixbuf(self.animation[self.iter])
return True
def _on_status_changed(self, transaction, status):
"""
Set the animation according to the changed status
"""
name = get_status_animation_name_from_enum(status)
fallback = get_status_icon_name_from_enum(status)
self.set_animation(name, fallback)
class AptRoleLabel(Gtk.Label):
"""
Status label for the running aptdaemon transaction
"""
def __init__(self, transaction=None):
GtkLabel.__init__(self)
self.set_alignment(0, 0)
self.set_ellipsize(Pango.EllipsizeMode.END)
self.set_max_width_chars(15)
self._signals = []
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect the status label to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._on_role_changed(transaction, transaction.role)
self._signals.append(transaction.connect("role-changed",
self._on_role_changed))
def _on_role_changed(self, transaction, role):
"""Set the role text."""
self.set_markup(get_role_localised_present_from_enum(role))
class AptStatusLabel(Gtk.Label):
"""
Status label for the running aptdaemon transaction
"""
def __init__(self, transaction=None):
Gtk.Label.__init__(self)
self.set_alignment(0, 0)
self.set_ellipsize(Pango.EllipsizeMode.END)
self.set_max_width_chars(15)
self._signals = []
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect the status label to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(
transaction.connect("status-changed", self._on_status_changed))
self._signals.append(
transaction.connect("status-details-changed",
self._on_status_details_changed))
def _on_status_changed(self, transaction, status):
"""Set the status text according to the changed status"""
self.set_markup(get_status_string_from_enum(status))
def _on_status_details_changed(self, transaction, text):
"""Set the status text to the one reported by apt"""
self.set_markup(text)
class AptProgressBar(Gtk.ProgressBar):
"""
Provides a Gtk.Progress which represents the progress of an aptdaemon
transactions
"""
def __init__(self, transaction=None):
Gtk.ProgressBar.__init__(self)
self.set_ellipsize(Pango.EllipsizeMode.END)
self.set_text(" ")
self.set_pulse_step(0.05)
self._signals = []
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect the progress bar to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(
transaction.connect("finished", self._on_finished))
self._signals.append(
transaction.connect("progress-changed", self._on_progress_changed))
self._signals.append(transaction.connect("progress-details-changed",
self._on_progress_details))
def _on_progress_changed(self, transaction, progress):
"""
Update the progress according to the latest progress information
"""
if progress > 100:
self.pulse()
else:
self.set_fraction(progress / 100.0)
def _on_progress_details(self, transaction, items_done, items_total,
bytes_done, bytes_total, speed, eta):
"""
Update the progress bar text according to the latest progress details
"""
if items_total == 0 and bytes_total == 0:
self.set_text(" ")
return
if speed != 0:
self.set_text(_("Downloaded %sB of %sB at %sB/s") %
(apt_pkg.size_to_str(bytes_done),
apt_pkg.size_to_str(bytes_total),
apt_pkg.size_to_str(speed)))
else:
self.set_text(_("Downloaded %sB of %sB") %
(apt_pkg.size_to_str(bytes_done),
apt_pkg.size_to_str(bytes_total)))
def _on_finished(self, transaction, exit):
"""Set the progress to 100% when the transaction is complete"""
self.set_fraction(1)
class AptDetailsExpander(Gtk.Expander):
def __init__(self, transaction=None, terminal=True):
Gtk.Expander.__init__(self, label=_("Details"))
self.show_terminal = terminal
self._signals = []
self.set_sensitive(False)
self.set_expanded(False)
if self.show_terminal:
self.terminal = AptTerminal()
else:
self.terminal = None
self.download_view = AptDownloadsView()
self.download_scrolled = Gtk.ScrolledWindow()
self.download_scrolled.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
self.download_scrolled.set_policy(Gtk.PolicyType.NEVER,
Gtk.PolicyType.AUTOMATIC)
self.download_scrolled.add(self.download_view)
hbox = Gtk.HBox()
hbox.pack_start(self.download_scrolled, True, True, 0)
if self.terminal:
hbox.pack_start(self.terminal, True, True, 0)
self.add(hbox)
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect the status label to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals.append(
transaction.connect("status-changed", self._on_status_changed))
self._signals.append(
transaction.connect("terminal-attached-changed",
self._on_terminal_attached_changed))
if self.terminal:
self.terminal.set_transaction(transaction)
self.download_view.set_transaction(transaction)
def _on_status_changed(self, trans, status):
if status in (STATUS_DOWNLOADING, STATUS_DOWNLOADING_REPO):
self.set_sensitive(True)
self.download_scrolled.show()
if self.terminal:
self.terminal.hide()
elif status == STATUS_COMMITTING:
self.download_scrolled.hide()
if self.terminal:
self.terminal.show()
self.set_sensitive(True)
else:
self.set_expanded(False)
self.set_sensitive(False)
else:
self.download_scrolled.hide()
if self.terminal:
self.terminal.hide()
self.set_sensitive(False)
self.set_expanded(False)
def _on_terminal_attached_changed(self, transaction, attached):
"""Connect the terminal to the pty device"""
if attached and self.terminal:
self.set_sensitive(True)
class AptTerminal(Vte.Terminal):
def __init__(self, transaction=None):
Vte.Terminal.__init__(self)
self._signals = []
self._master, self._slave = pty.openpty()
self._ttyname = os.ttyname(self._slave)
self.set_size(80, 24)
self.set_pty_object(Vte.Pty.new_foreign(self._master))
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect the status label to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals.append(
transaction.connect("terminal-attached-changed",
self._on_terminal_attached_changed))
self._transaction = transaction
self._transaction.set_terminal(self._ttyname)
def _on_terminal_attached_changed(self, transaction, attached):
"""Show the terminal"""
self.set_sensitive(attached)
class AptCancelButton(Gtk.Button):
"""
Provides a Gtk.Button which allows to cancel a running aptdaemon
transaction
"""
def __init__(self, transaction=None):
Gtk.Button.__init__(self)
self.set_use_stock(True)
self.set_label(Gtk.STOCK_CANCEL)
self.set_sensitive(True)
self._signals = []
if transaction is not None:
self.set_transaction(transaction)
def set_transaction(self, transaction):
"""Connect the status label to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(
transaction.connect("finished", self._on_finished))
self._signals.append(
transaction.connect("cancellable-changed",
self._on_cancellable_changed))
self.connect("clicked", self._on_clicked, transaction)
def _on_cancellable_changed(self, transaction, cancellable):
"""
Enable the button if cancel is allowed and disable it in the other case
"""
self.set_sensitive(cancellable)
def _on_finished(self, transaction, status):
self.set_sensitive(False)
def _on_clicked(self, button, transaction):
transaction.cancel()
self.set_sensitive(False)
class AptDownloadsView(Gtk.TreeView):
"""A Gtk.TreeView which displays the progress and status of each dowload
of a transaction.
"""
COL_TEXT, COL_PROGRESS, COL_URI = list(range(3))
def __init__(self, transaction=None):
Gtk.TreeView.__init__(self)
model = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_INT,
GObject.TYPE_STRING)
self.set_model(model)
self.props.headers_visible = False
self.set_rules_hint(True)
self._download_map = {}
self._signals = []
if transaction is not None:
self.set_transaction(transaction)
cell_uri = Gtk.CellRendererText()
cell_uri.props.ellipsize = Pango.EllipsizeMode.END
column_download = Gtk.TreeViewColumn(_("File"))
column_download.pack_start(cell_uri, True)
column_download.add_attribute(cell_uri, "markup", self.COL_TEXT)
cell_progress = Gtk.CellRendererProgress()
#TRANSLATORS: header of the progress download column
column_progress = Gtk.TreeViewColumn(_("%"))
column_progress.pack_start(cell_progress, True)
column_progress.set_cell_data_func(cell_progress, self._data_progress,
None)
self.append_column(column_progress)
self.append_column(column_download)
self.set_tooltip_column(self.COL_URI)
def set_transaction(self, transaction):
"""Connect the download view to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(transaction.connect("progress-download-changed",
self._on_download_changed))
def _on_download_changed(self, transaction, uri, status, desc, full_size,
downloaded, message):
"""Callback for a changed download progress."""
try:
progress = int(downloaded * 100 / full_size)
except ZeroDivisionError:
progress = -1
if status == DOWNLOAD_DONE:
progress = 100
if progress > 100:
progress = 100
text = desc[:]
text += "\n<small>"
#TRANSLATORS: %s is the full size in Bytes, e.g. 198M
if status == DOWNLOAD_FETCHING:
text += (_("Downloaded %sB of %sB") %
(apt_pkg.size_to_str(downloaded),
apt_pkg.size_to_str(full_size)))
elif status == DOWNLOAD_DONE:
if full_size != 0:
text += _("Downloaded %sB") % apt_pkg.size_to_str(full_size)
else:
text += _("Downloaded")
else:
text += get_download_status_from_enum(status)
text += "</small>"
model = self.get_model()
if not model:
return
try:
iter = self._download_map[uri]
except KeyError:
# we we haven't seen the uri yet, add it now
iter = model.append((text, progress, uri))
self._download_map[uri] = iter
# and update the adj if needed
adj = self.get_vadjustment()
# this may be None (LP: #1024590)
if adj:
is_scrolled_down = (
adj.get_value() + adj.get_page_size() == adj.get_upper())
if is_scrolled_down:
# If the treeview was scrolled to the end, do this again
# after appending a new item
self.scroll_to_cell(
model.get_path(iter), None, False, False, False)
else:
model.set_value(iter, self.COL_TEXT, text)
model.set_value(iter, self.COL_PROGRESS, progress)
def _data_progress(self, column, cell, model, iter, data):
progress = model.get_value(iter, self.COL_PROGRESS)
if progress == -1:
cell.props.pulse = progress
else:
cell.props.value = progress
class AptProgressDialog(Gtk.Dialog):
"""
Complete progress dialog for long taking aptdaemon transactions, which
features a progress bar, cancel button, status icon and label
"""
__gsignals__ = {"finished": (GObject.SIGNAL_RUN_FIRST,
GObject.TYPE_NONE, ())}
def __init__(self, transaction=None, parent=None, terminal=True,
debconf=True):
Gtk.Dialog.__init__(self, parent=parent)
self._expanded_size = None
self.debconf = debconf
# Setup the dialog
self.set_border_width(6)
self.set_resizable(False)
self.get_content_area().set_spacing(6)
# Setup the cancel button
self.button_cancel = AptCancelButton(transaction)
self.get_action_area().pack_start(self.button_cancel, False, False, 0)
# Setup the status icon, label and progressbar
hbox = Gtk.HBox()
hbox.set_spacing(12)
hbox.set_border_width(6)
self.icon = AptRoleIcon()
hbox.pack_start(self.icon, False, True, 0)
vbox = Gtk.VBox()
vbox.set_spacing(12)
self.label_role = Gtk.Label()
self.label_role.set_alignment(0, 0)
vbox.pack_start(self.label_role, False, True, 0)
vbox_progress = Gtk.VBox()
vbox_progress.set_spacing(6)
self.progress = AptProgressBar()
vbox_progress.pack_start(self.progress, False, True, 0)
self.label = AptStatusLabel()
self.label._on_status_changed(None, STATUS_WAITING)
vbox_progress.pack_start(self.label, False, True, 0)
vbox.pack_start(vbox_progress, False, True, 0)
hbox.pack_start(vbox, True, True, 0)
self.expander = AptDetailsExpander(terminal=terminal)
self.expander.connect("notify::expanded", self._on_expanded)
vbox.pack_start(self.expander, True, True, 0)
self.get_content_area().pack_start(hbox, True, True, 0)
self._transaction = None
self._signals = []
self.set_title("")
self.realize()
self.progress.set_size_request(350, -1)
functions = Gdk.WMFunction.MOVE | Gdk.WMFunction.RESIZE
try:
self.get_window().set_functions(functions)
except TypeError:
# workaround for older and broken GTK typelibs
self.get_window().set_functions(Gdk.WMFunction(functions))
if transaction is not None:
self.set_transaction(transaction)
# catch ESC and behave as if cancel was clicked
self.connect("delete-event", self._on_dialog_delete_event)
def _on_dialog_delete_event(self, dialog, event):
self.button_cancel.clicked()
return True
def _on_expanded(self, expander, param):
# Make the dialog resizable if the expander is expanded
# try to restore a previous size
if not expander.get_expanded():
self._expanded_size = (self.expander.terminal.get_visible(),
self.get_size())
self.set_resizable(False)
elif self._expanded_size:
self.set_resizable(True)
term_visible, (stored_width, stored_height) = self._expanded_size
# Check if the stored size was for the download details or
# the terminal widget
if term_visible != self.expander.terminal.get_visible():
# The stored size was for the download details, so we need
# get a new size for the terminal widget
self._resize_to_show_details()
else:
self.resize(stored_width, stored_height)
else:
self.set_resizable(True)
self._resize_to_show_details()
def _resize_to_show_details(self):
"""Resize the window to show the expanded details.
Unfortunately the expander only expands to the preferred size of the
child widget (e.g showing all 80x24 chars of the Vte terminal) if
the window is rendered the first time and the terminal is also visible.
If the expander is expanded afterwards the window won't change its
size anymore. So we have to do this manually. See LP#840942
"""
win_width, win_height = self.get_size()
exp_width = self.expander.get_allocation().width
exp_height = self.expander.get_allocation().height
if self.expander.terminal.get_visible():
terminal_width = self.expander.terminal.get_char_width() * 80
terminal_height = self.expander.terminal.get_char_height() * 24
self.resize(terminal_width - exp_width + win_width,
terminal_height - exp_height + win_height)
else:
self.resize(win_width + 100, win_height + 200)
def _on_status_changed(self, trans, status):
# Also resize the window if we switch from download details to
# the terminal window
if (status == STATUS_COMMITTING and
self.expander.terminal.get_visible()):
self._resize_to_show_details()
@deferable
def run(self, attach=False, close_on_finished=True, show_error=True,
reply_handler=None, error_handler=None):
"""Run the transaction and show the progress in the dialog.
Keyword arguments:
attach -- do not start the transaction but instead only monitor
an already running one
close_on_finished -- if the dialog should be closed when the
transaction is complete
show_error -- show a dialog with the error message
"""
return self._run(attach, close_on_finished, show_error,
reply_handler, error_handler)
@inline_callbacks
def _run(self, attach, close_on_finished, show_error,
reply_handler, error_handler):
try:
sig = self._transaction.connect("finished", self._on_finished,
close_on_finished, show_error)
self._signals.append(sig)
if attach:
yield self._transaction.sync()
else:
if self.debconf:
yield self._transaction.set_debconf_frontend("gnome")
yield self._transaction.run()
self.show_all()
except Exception as error:
if error_handler:
error_handler(error)
else:
raise
else:
if reply_handler:
reply_handler()
def _on_role_changed(self, transaction, role_enum):
"""Show the role of the transaction in the dialog interface"""
role = get_role_localised_present_from_enum(role_enum)
self.set_title(role)
self.label_role.set_markup("<big><b>%s</b></big>" % role)
def set_transaction(self, transaction):
"""Connect the dialog to the given aptdaemon transaction"""
for sig in self._signals:
GLib.source_remove(sig)
self._signals = []
self._signals.append(
transaction.connect_after("status-changed",
self._on_status_changed))
self._signals.append(transaction.connect("role-changed",
self._on_role_changed))
self._signals.append(transaction.connect("medium-required",
self._on_medium_required))
self._signals.append(transaction.connect("config-file-conflict",
self._on_config_file_conflict))
self._on_role_changed(transaction, transaction.role)
self.progress.set_transaction(transaction)
self.icon.set_transaction(transaction)
self.label.set_transaction(transaction)
self.expander.set_transaction(transaction)
self._transaction = transaction
def _on_medium_required(self, transaction, medium, drive):
dialog = AptMediumRequiredDialog(medium, drive, self)
res = dialog.run()
dialog.hide()
if res == Gtk.ResponseType.OK:
self._transaction.provide_medium(medium)
else:
self._transaction.cancel()
def _on_config_file_conflict(self, transaction, old, new):
dialog = AptConfigFileConflictDialog(old, new, self)
res = dialog.run()
dialog.hide()
if res == Gtk.ResponseType.YES:
self._transaction.resolve_config_file_conflict(old, "replace")
else:
self._transaction.resolve_config_file_conflict(old, "keep")
def _on_finished(self, transaction, status, close, show_error):
if close:
self.hide()
if status == EXIT_FAILED and show_error:
err_dia = AptErrorDialog(self._transaction.error, self)
err_dia.run()
err_dia.hide()
self.emit("finished")
class _ExpandableDialog(Gtk.Dialog):
"""Dialog with an expander."""
def __init__(self, parent=None, stock_type=None, expanded_child=None,
expander_label=None, title=None, message=None, buttons=None):
"""Return an _AptDaemonDialog instance.
Keyword arguments:
parent -- set the dialog transient for the given Gtk.Window
stock_type -- type of the Dialog, defaults to Gtk.STOCK_DIALOG_QUESTION
expanded_child -- Widget which should be expanded
expander_label -- label for the expander
title -- a news header like title of the dialog
message -- the message which should be shown in the dialog
buttons -- tuple containing button text/reponse id pairs, defaults
to a close button
"""
if not buttons:
buttons = (Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
Gtk.Dialog.__init__(self, parent=parent)
self.set_title("")
self.add_buttons(*buttons)
self.set_resizable(False)
self.set_border_width(6)
self.get_content_area().set_spacing(12)
if not stock_type:
stock_type = Gtk.STOCK_DIALOG_QUESTION
icon = Gtk.Image.new_from_stock(stock_type, Gtk.IconSize.DIALOG)
icon.set_alignment(0, 0)
hbox_base = Gtk.HBox()
hbox_base.set_spacing(12)
hbox_base.set_border_width(6)
vbox_left = Gtk.VBox()
vbox_left.set_spacing(12)
hbox_base.pack_start(icon, False, True, 0)
hbox_base.pack_start(vbox_left, True, True, 0)
self.label = Gtk.Label()
self.label.set_selectable(True)
self.label.set_alignment(0, 0)
self.label.set_line_wrap(True)
vbox_left.pack_start(self.label, False, True, 0)
self.get_content_area().pack_start(hbox_base, True, True, 0)
# The expander widget
self.expander = Gtk.Expander(label=expander_label)
self.expander.set_spacing(6)
self.expander.set_use_underline(True)
self.expander.connect("notify::expanded", self._on_expanded)
self._expanded_size = None
vbox_left.pack_start(self.expander, True, True, 0)
# Set some initial data
text = ""
if title:
text = "<b><big>%s</big></b>" % title
if message:
if text:
text += "\n\n"
text += message
self.label.set_markup(text)
if expanded_child:
self.expander.add(expanded_child)
else:
self.expander.set_sensitive(False)
def _on_expanded(self, expander, param):
if expander.get_expanded():
self.set_resizable(True)
if self._expanded_size:
# Workaround a random crash during progress dialog expanding
# It seems that either the gtk.Window.get_size() method
# doesn't always return a tuple or that the
# gtk.Window.set_size() method doesn't correctly handle *
# arguments correctly, see LP#898851
try:
self.resize(self._expanded_size[0], self._expanded_size[1])
except (IndexError, TypeError):
pass
else:
self._expanded_size = self.get_size()
self.set_resizable(False)
class AptMediumRequiredDialog(Gtk.MessageDialog):
"""Dialog to ask for medium change."""
def __init__(self, medium, drive, parent=None):
Gtk.MessageDialog.__init__(self, parent=parent,
type=Gtk.MessageType.INFO)
#TRANSLATORS: %s represents the name of a CD or DVD
text = _("CD/DVD '%s' is required") % medium
#TRANSLATORS: %s is the name of the CD/DVD drive
desc = _("Please insert the above CD/DVD into the drive '%s' to "
"install software packages from it.") % drive
self.set_markup("<big><b>%s</b></big>\n\n%s" % (text, desc))
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
_("C_ontinue"), Gtk.ResponseType.OK)
self.set_default_response(Gtk.ResponseType.OK)
class AptConfirmDialog(Gtk.Dialog):
"""Dialog to confirm the changes that would be required by a
transaction.
"""
def __init__(self, trans, cache=None, parent=None):
"""Return an AptConfirmDialog instance.
Keyword arguments:
trans -- the transaction of which the dependencies should be shown
cache -- an optional apt.cache.Cache() instance to provide more details
about packages
parent -- set the dialog transient for the given Gtk.Window
"""
Gtk.Dialog.__init__(self, parent=parent)
self.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
self.add_button(_("C_ontinue"), Gtk.ResponseType.OK)
self.cache = cache
self.trans = trans
if isinstance(parent, Gdk.Window):
self.realize()
self.window.set_transient_for(parent)
else:
self.set_transient_for(parent)
self.set_resizable(True)
self.set_border_width(6)
self.get_content_area().set_spacing(12)
icon = Gtk.Image.new_from_stock(Gtk.STOCK_DIALOG_QUESTION,
Gtk.IconSize.DIALOG)
icon.set_alignment(0, 0)
hbox_base = Gtk.HBox()
hbox_base.set_spacing(12)
hbox_base.set_border_width(6)
vbox_left = Gtk.VBox()
vbox_left.set_spacing(12)
hbox_base.pack_start(icon, False, True, 0)
hbox_base.pack_start(vbox_left, True, True, 0)
self.label = Gtk.Label()
self.label.set_selectable(True)
self.label.set_alignment(0, 0)
vbox_left.pack_start(self.label, False, True, 0)
self.get_content_area().pack_start(hbox_base, True, True, 0)
self.treestore = Gtk.TreeStore(GObject.TYPE_STRING)
self.treeview = Gtk.TreeView.new_with_model(self.treestore)
self.treeview.set_headers_visible(False)
self.treeview.set_rules_hint(True)
self.column = Gtk.TreeViewColumn()
self.treeview.append_column(self.column)
cell_icon = Gtk.CellRendererPixbuf()
self.column.pack_start(cell_icon, False)
self.column.set_cell_data_func(cell_icon, self.render_package_icon,
None)
cell_desc = Gtk.CellRendererText()
self.column.pack_start(cell_desc, True)
self.column.set_cell_data_func(cell_desc, self.render_package_desc,
None)
self.scrolled = Gtk.ScrolledWindow()
self.scrolled.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
self.scrolled.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
self.scrolled.add(self.treeview)
vbox_left.pack_start(self.scrolled, True, True, 0)
self.set_default_response(Gtk.ResponseType.CANCEL)
def _show_changes(self):
"""Show a message and the dependencies in the dialog."""
self.treestore.clear()
for index, msg in enumerate([_("Install"),
_("Reinstall"),
_("Remove"),
_("Purge"),
_("Upgrade"),
_("Downgrade"),
_("Skip upgrade")]):
if self.trans.dependencies[index]:
piter = self.treestore.append(None, ["<b>%s</b>" % msg])
for pkg in self.trans.dependencies[index]:
for object in self.map_package(pkg):
self.treestore.append(piter, [str(object)])
# If there is only one type of changes (e.g. only installs) expand the
# tree
#FIXME: adapt the title and message accordingly
#FIXME: Should we have different modes? Only show dependencies, only
# initial packages or both?
msg = _("Please take a look at the list of changes below.")
if len(self.treestore) == 1:
filtered_store = self.treestore.filter_new(
Gtk.TreePath.new_first())
self.treeview.expand_all()
self.treeview.set_model(filtered_store)
self.treeview.set_show_expanders(False)
if self.trans.dependencies[PKGS_INSTALL]:
title = _("Additional software has to be installed")
elif self.trans.dependencies[PKGS_REINSTALL]:
title = _("Additional software has to be re-installed")
elif self.trans.dependencies[PKGS_REMOVE]:
title = _("Additional software has to be removed")
elif self.trans.dependencies[PKGS_PURGE]:
title = _("Additional software has to be purged")
elif self.trans.dependencies[PKGS_UPGRADE]:
title = _("Additional software has to be upgraded")
elif self.trans.dependencies[PKGS_DOWNGRADE]:
title = _("Additional software has to be downgraded")
elif self.trans.dependencies[PKGS_KEEP]:
title = _("Updates will be skipped")
if len(filtered_store) < 6:
self.set_resizable(False)
self.scrolled.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.NEVER)
else:
self.treeview.set_size_request(350, 200)
else:
title = _("Additional changes are required")
self.treeview.set_size_request(350, 200)
self.treeview.collapse_all()
if self.trans.download:
msg += "\n"
msg += (_("%sB will be downloaded in total.") %
apt_pkg.size_to_str(self.trans.download))
if self.trans.space < 0:
msg += "\n"
msg += (_("%sB of disk space will be freed.") %
apt_pkg.size_to_str(self.trans.space))
elif self.trans.space > 0:
msg += "\n"
msg += (_("%sB more disk space will be used.") %
apt_pkg.size_to_str(self.trans.space))
self.label.set_markup("<b><big>%s</big></b>\n\n%s" % (title, msg))
def map_package(self, pkg):
"""Map a package to a different object type, e.g. applications
and return a list of those.
By default return the package itself inside a list.
Override this method if you don't want to store package names
in the treeview.
"""
return [pkg]
def render_package_icon(self, column, cell, model, iter, data):
"""Data func for the Gtk.CellRendererPixbuf which shows the package.
Override this method if you want to show custom icons for
a package or map it to applications.
"""
path = model.get_path(iter)
if path.get_depth() == 0:
cell.props.visible = False
else:
cell.props.visible = True
cell.props.icon_name = "applications-other"
def render_package_desc(self, column, cell, model, iter, data):
"""Data func for the Gtk.CellRendererText which shows the package.
Override this method if you want to show more information about
a package or map it to applications.
"""
value = model.get_value(iter, 0)
if not value:
return
try:
pkg_name, pkg_version = value.split("=")[0:2]
except ValueError:
pkg_name = value
pkg_version = None
try:
if pkg_version:
text = "%s (%s)\n<small>%s</small>" % (
pkg_name, pkg_version, self.cache[pkg_name].summary)
else:
text = "%s\n<small>%s</small>" % (
pkg_name, self.cache[pkg_name].summary)
except (KeyError, TypeError):
if pkg_version:
text = "%s (%s)" % (pkg_name, pkg_version)
else:
text = "%s" % pkg_name
cell.set_property("markup", text)
def run(self):
self._show_changes()
self.show_all()
return Gtk.Dialog.run(self)
class AptConfigFileConflictDialog(_ExpandableDialog):
"""Dialog to resolve conflicts between local and shipped
configuration files.
"""
def __init__(self, from_path, to_path, parent=None):
self.from_path = from_path
self.to_path = to_path
#TRANSLATORS: %s is a file path
title = _("Replace your changes in '%s' with a later version of "
"the configuration file?") % from_path
msg = _("If you don't know why the file is there already, it is "
"usually safe to replace it.")
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
self.diffview = DiffView()
self.diffview.set_size_request(-1, 200)
scrolled.add(self.diffview)
_ExpandableDialog.__init__(self, parent=parent,
expander_label=_("_Changes"),
expanded_child=scrolled,
title=title, message=msg,
buttons=(_("_Keep"), Gtk.ResponseType.NO,
_("_Replace"),
Gtk.ResponseType.YES))
self.set_default_response(Gtk.ResponseType.YES)
def run(self):
self.show_all()
self.diffview.show_diff(self.from_path, self.to_path)
return _ExpandableDialog.run(self)
REGEX_RANGE = "^@@ \-(?P<from_start>[0-9]+)(?:,(?P<from_context>[0-9]+))? " \
"\+(?P<to_start>[0-9]+)(?:,(?P<to_context>[0-9]+))? @@"
class DiffView(Gtk.TextView):
"""Shows the difference between two files."""
ELLIPSIS = "[…]\n"
def __init__(self):
self.textbuffer = Gtk.TextBuffer()
Gtk.TextView.__init__(self, buffer=self.textbuffer)
self.set_property("editable", False)
self.set_cursor_visible(False)
tags = self.textbuffer.get_tag_table()
#FIXME: How to get better colors?
tag_default = Gtk.TextTag.new("default")
tag_default.set_properties(font="Mono")
tags.add(tag_default)
tag_add = Gtk.TextTag.new("add")
tag_add.set_properties(font="Mono",
background='#8ae234')
tags.add(tag_add)
tag_remove = Gtk.TextTag.new("remove")
tag_remove.set_properties(font="Mono",
background='#ef2929')
tags.add(tag_remove)
tag_num = Gtk.TextTag.new("number")
tag_num.set_properties(font="Mono",
background='#eee')
tags.add(tag_num)
def show_diff(self, from_path, to_path):
"""Show the difference between two files."""
#FIXME: Use gio
try:
with open(from_path) as fp:
from_lines = fp.readlines()
with open(to_path) as fp:
to_lines = fp.readlines()
except IOError:
return
# helper function to work around current un-introspectability of
# varargs methods like insert_with_tags_by_name()
def insert_tagged_text(iter, text, tag):
#self.textbuffer.insert_with_tags_by_name(iter, text, tag)
offset = iter.get_offset()
self.textbuffer.insert(iter, text)
self.textbuffer.apply_tag_by_name(
tag, self.textbuffer.get_iter_at_offset(offset), iter)
line_number = 0
iter = self.textbuffer.get_start_iter()
for line in difflib.unified_diff(from_lines, to_lines, lineterm=""):
if line.startswith("@@"):
match = re.match(REGEX_RANGE, line)
if not match:
continue
line_number = int(match.group("from_start"))
if line_number > 1:
insert_tagged_text(iter, self.ELLIPSIS, "default")
elif line.startswith("---") or line.startswith("+++"):
continue
elif line.startswith(" "):
line_number += 1
insert_tagged_text(iter, str(line_number), "number")
insert_tagged_text(iter, line, "default")
elif line.startswith("-"):
line_number += 1
insert_tagged_text(iter, str(line_number), "number")
insert_tagged_text(iter, line, "remove")
elif line.startswith("+"):
spaces = " " * len(str(line_number))
insert_tagged_text(iter, spaces, "number")
insert_tagged_text(iter, line, "add")
class _DetailsExpanderMessageDialog(_ExpandableDialog):
"""
Common base class for Apt*Dialog
"""
def __init__(self, text, desc, type, details=None, parent=None):
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
textview = Gtk.TextView()
textview.set_wrap_mode(Gtk.WrapMode.WORD)
buffer = textview.get_buffer()
scrolled.add(textview)
#TRANSLATORS: expander label in the error dialog
_ExpandableDialog.__init__(self, parent=parent,
expander_label=_("_Details"),
expanded_child=scrolled,
title=text, message=desc,
stock_type=type)
self.show_all()
if details:
buffer.insert_at_cursor(details)
else:
self.expander.set_visible(False)
class AptErrorDialog(_DetailsExpanderMessageDialog):
"""
Dialog for aptdaemon errors with details in an expandable text view
"""
def __init__(self, error=None, parent=None):
text = get_error_string_from_enum(error.code)
desc = get_error_description_from_enum(error.code)
_DetailsExpanderMessageDialog.__init__(
self, text, desc, Gtk.STOCK_DIALOG_ERROR, error.details, parent)
| yasoob/PythonRSSReader | venv/lib/python2.7/dist-packages/aptdaemon/gtk3widgets.py | Python | mit | 47,931 |
# //////////////////////////////////////////////////////////////////////
# //
# // Copyright (c) 2012 Audiokinetic Inc. / All Rights Reserved
# //
# //////////////////////////////////////////////////////////////////////
import platform
import BuildUtil
from BuildUtil import *
class DeployManager(object):
SupportedPlatforms = ['Android']
def __init__(self, pathMan, Config=None, BuildDir=None, Arch=None):
self.pathMan = pathMan
self.Product = self.__CreatePluginBinaryPaths(Config, BuildDir, Arch)
self.logger = CreateLogger(pathMan.Paths['Log'], __file__, self.__class__.__name__)
self.isDeployOnly = BuildDir == None
def __CreatePluginBinaryPaths(self, Config, BuildDir, Arch):
if self.pathMan.PlatformName == 'Android':
return NdkProduct(self.pathMan, Config, Arch)
else:
msg = 'Unsupported platform for post-build deployment: {}, available options: {}'.format(self.pathMan.PlatformName, ', '.join(SupportedPlatforms))
self.logger.error(msg)
raise RuntimeError(msg)
return None
def Deploy(self):
'''Copy plugin binaries from build folder to deploy source folder.'''
self.Product.PreProcess()
for b in self.Product.Binaries:
src = join(self.Product.BuildDir, b)
dest = join(self.Product.DeploySrcDir, b)
self.logger.info('Copy plugin from build folder to deployment folder:\n Source: {}\n Destination: {}'.format(src, dest))
BuildUtil.RecursiveReplace(src, dest)
for b in self.Product.CommonBinaries:
src = join(self.Product.BuildDir, b)
dest = join(self.Product.CommonDeploySrcDir, b)
self.logger.info('Copy plugin from build folder to deployment folder:\n Source: {}\n Destination: {}'.format(src, dest))
BuildUtil.RecursiveReplace(src, dest)
self.logger.info(BuildUtil.Messages['Prog_Success'])
def ParseAndValidateArgs(argStr=None):
'''Read command line arguments and create variables.'''
parser = argparse.ArgumentParser(description='Post-build event to deploy Wwise Unity Integartion')
parser.add_argument('targetPlatform', metavar='platform', action='store', help='Target platform name, accepted options: {}.'.format(', '.join(DeployManager.SupportedPlatforms)))
parser.add_argument('-a', '--arch', action='store', nargs=1, dest='arch', default=None, help='Target architecture name if available, default to None.')
parser.add_argument('-b', '--builddir', action='store', nargs=1, dest='buildDir', default=None, help='Full path to build output folder. The build product is copied from this source location to destination deployemtn location.')
parser.add_argument('-c', '--config', action='store', nargs=1, dest='config', default=None, help='Build configuration, available options: {}'.format(', '.join(BuildUtil.SupportedConfigs)))
parser.add_argument('-u', '--unityprojroot', action='store', nargs=1, dest='unityProjectRoot', default=None, help='Path to Unity project root directory. If this option is set, then deploy the Integration to the Unity project assets folder.')
if argStr == None:
args = parser.parse_args()
else: # for unittest
args = parser.parse_args(argStr.split())
# argparse outputs a list when nargs is specified.
# So we take out the first arg in the list as the single arg.
targetPlatform = args.targetPlatform
arch = args.arch
if not arch is None:
arch = arch[0]
buildDir = args.buildDir
if not buildDir is None:
buildDir = buildDir[0]
config = args.config
if not config is None:
config = config[0]
unityProjRoot = args.unityProjectRoot
if not unityProjRoot is None:
unityProjRoot = unityProjRoot[0]
pathMan = PathManager(targetPlatform, arch, unityProjRoot)
# Verify and fail if necessary
logger = CreateLogger(pathMan.Paths['Log'], __file__, ParseAndValidateArgs.__name__)
if targetPlatform in BuildUtil.SupportedArches.keys() and arch is None:
msg = 'Target platform {} is a multi-architecture platform but no architecture is given (-a). Aborted.'.format(targetPlatform)
logger.error(msg)
raise RuntimeError(msg)
if (not buildDir is None) and not os.path.exists(buildDir):
msg = 'Failed to find build folder: {}. Aborted.'.format(buildDir)
logger.error(msg)
raise RuntimeError(msg)
if config == None or not config in SupportedConfigs:
msg = 'Unsupported build configuration: {}. Aborted.'.format(config)
logger.error(msg)
raise RuntimeError(msg)
return [pathMan, config, buildDir, arch]
def main():
CheckPythonVersion()
[pathMan, Config, BuildDir, Arch] = ParseAndValidateArgs()
deployMan = DeployManager(pathMan, Config, BuildDir, Arch)
deployMan.Deploy()
if __name__ == '__main__':
main()
| Khoyo/mini_ld_48 | Assets/Code/AkSoundEngine/Common/DeployIntegration.py | Python | gpl-3.0 | 4,595 |
from qtpy.QtCore import Qt
from qtpy.QtWidgets import QVBoxLayout
from addie.rietveld.braggview import BraggView
from addie.rietveld.braggtree import BraggTree
def run(main_window=None):
# frame_graphicsView_bragg
graphicsView_layout = QVBoxLayout()
main_window.rietveld_ui.frame_graphicsView_bragg.setLayout(graphicsView_layout)
main_window.rietveld_ui.graphicsView_bragg = BraggView(main_window)
graphicsView_layout.addWidget(main_window.rietveld_ui.graphicsView_bragg)
# frame_treeWidget_braggWSList
temp_layout = QVBoxLayout()
main_window.rietveld_ui.frame_treeWidget_braggWSList.setLayout(temp_layout)
main_window.rietveld_ui.treeWidget_braggWSList = BraggTree(main_window)
temp_layout.addWidget(main_window.rietveld_ui.treeWidget_braggWSList)
main_window.rietveld_ui.splitter_2.setStyleSheet("""
QSplitter::handle {
image: url(':/MPL Toolbar/vertical_splitter_icon.png');
}
""")
main_window.rietveld_ui.splitter_2.setSizes([1000, 1])
main_window.rietveld_ui.comboBox_xUnit.clear()
main_window.rietveld_ui.comboBox_xUnit.addItems(['TOF', 'dSpacing', 'Q'])
index = main_window.rietveld_ui.comboBox_xUnit.findText('dSpacing', Qt.MatchFixedString)
main_window.rietveld_ui.comboBox_xUnit.setCurrentIndex(index)
main_window.rietveld_ui.treeWidget_braggWSList.set_main_window(main_window)
main_window.rietveld_ui.treeWidget_braggWSList.add_main_item('workspaces',
append=True,
as_current_index=False)
main_window.rietveld_ui.radioButton_multiBank.setChecked(True)
# organize widgets group
main_window._braggBankWidgets = {1: main_window.rietveld_ui.checkBox_bank1,
2: main_window.rietveld_ui.checkBox_bank2,
3: main_window.rietveld_ui.checkBox_bank3,
4: main_window.rietveld_ui.checkBox_bank4,
5: main_window.rietveld_ui.checkBox_bank5,
6: main_window.rietveld_ui.checkBox_bank6}
main_window._braggBankWidgetRecords = dict()
for bank_id in main_window._braggBankWidgets:
checked = main_window._braggBankWidgets[bank_id].isChecked()
main_window._braggBankWidgetRecords[bank_id] = checked
# some controlling variables
main_window._currBraggXUnit = str(main_window.rietveld_ui.comboBox_xUnit.currentText())
if main_window._currBraggXUnit == 'Q':
main_window._currBraggXUnit = 'MomentumTransfer'
main_window._onCanvasGSSBankList = list()
| neutrons/FastGR | addie/initialization/widgets/rietveld_tab.py | Python | mit | 2,725 |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.topology_read', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper [class]
module.add_class('TopologyReaderHelper')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader [class]
module.add_class('TopologyReader', parent=root_module['ns3::Object'])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link [class]
module.add_class('Link', outer_class=root_module['ns3::TopologyReader'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## inet-topology-reader.h (module 'topology-read'): ns3::InetTopologyReader [class]
module.add_class('InetTopologyReader', parent=root_module['ns3::TopologyReader'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## orbis-topology-reader.h (module 'topology-read'): ns3::OrbisTopologyReader [class]
module.add_class('OrbisTopologyReader', parent=root_module['ns3::TopologyReader'])
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::RocketfuelTopologyReader [class]
module.add_class('RocketfuelTopologyReader', parent=root_module['ns3::TopologyReader'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_container('std::map< std::string, std::string >', ('std::string', 'std::string'), container_type=u'map')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TopologyReaderHelper_methods(root_module, root_module['ns3::TopologyReaderHelper'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TopologyReader_methods(root_module, root_module['ns3::TopologyReader'])
register_Ns3TopologyReaderLink_methods(root_module, root_module['ns3::TopologyReader::Link'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3InetTopologyReader_methods(root_module, root_module['ns3::InetTopologyReader'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3OrbisTopologyReader_methods(root_module, root_module['ns3::OrbisTopologyReader'])
register_Ns3RocketfuelTopologyReader_methods(root_module, root_module['ns3::RocketfuelTopologyReader'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TopologyReaderHelper_methods(root_module, cls):
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper::TopologyReaderHelper(ns3::TopologyReaderHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TopologyReaderHelper const &', 'arg0')])
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper::TopologyReaderHelper() [constructor]
cls.add_constructor([])
## topology-reader-helper.h (module 'topology-read'): ns3::Ptr<ns3::TopologyReader> ns3::TopologyReaderHelper::GetTopologyReader() [member function]
cls.add_method('GetTopologyReader',
'ns3::Ptr< ns3::TopologyReader >',
[])
## topology-reader-helper.h (module 'topology-read'): void ns3::TopologyReaderHelper::SetFileName(std::string const fileName) [member function]
cls.add_method('SetFileName',
'void',
[param('std::string const', 'fileName')])
## topology-reader-helper.h (module 'topology-read'): void ns3::TopologyReaderHelper::SetFileType(std::string const fileType) [member function]
cls.add_method('SetFileType',
'void',
[param('std::string const', 'fileType')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]
cls.add_constructor([param('long double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TopologyReader_methods(root_module, cls):
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::TopologyReader() [constructor]
cls.add_constructor([])
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::AddLink(ns3::TopologyReader::Link link) [member function]
cls.add_method('AddLink',
'void',
[param('ns3::TopologyReader::Link', 'link')])
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::GetFileName() const [member function]
cls.add_method('GetFileName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::TopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## topology-reader.h (module 'topology-read'): std::_List_const_iterator<ns3::TopologyReader::Link> ns3::TopologyReader::LinksBegin() const [member function]
cls.add_method('LinksBegin',
'std::_List_const_iterator< ns3::TopologyReader::Link >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): bool ns3::TopologyReader::LinksEmpty() const [member function]
cls.add_method('LinksEmpty',
'bool',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::_List_const_iterator<ns3::TopologyReader::Link> ns3::TopologyReader::LinksEnd() const [member function]
cls.add_method('LinksEnd',
'std::_List_const_iterator< ns3::TopologyReader::Link >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): int ns3::TopologyReader::LinksSize() const [member function]
cls.add_method('LinksSize',
'int',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::TopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_pure_virtual=True, is_virtual=True)
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::SetFileName(std::string const & fileName) [member function]
cls.add_method('SetFileName',
'void',
[param('std::string const &', 'fileName')])
return
def register_Ns3TopologyReaderLink_methods(root_module, cls):
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::Link(ns3::TopologyReader::Link const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TopologyReader::Link const &', 'arg0')])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::Link(ns3::Ptr<ns3::Node> fromPtr, std::string const & fromName, ns3::Ptr<ns3::Node> toPtr, std::string const & toName) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'fromPtr'), param('std::string const &', 'fromName'), param('ns3::Ptr< ns3::Node >', 'toPtr'), param('std::string const &', 'toName')])
## topology-reader.h (module 'topology-read'): std::_Rb_tree_const_iterator<std::pair<const std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > > ns3::TopologyReader::Link::AttributesBegin() const [member function]
cls.add_method('AttributesBegin',
'std::_Rb_tree_const_iterator< std::pair< std::basic_string< char, std::char_traits< char >, std::allocator< char > > const, std::basic_string< char, std::char_traits< char >, std::allocator< char > > > >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::_Rb_tree_const_iterator<std::pair<const std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > > ns3::TopologyReader::Link::AttributesEnd() const [member function]
cls.add_method('AttributesEnd',
'std::_Rb_tree_const_iterator< std::pair< std::basic_string< char, std::char_traits< char >, std::allocator< char > > const, std::basic_string< char, std::char_traits< char >, std::allocator< char > > > >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetAttribute(std::string const & name) const [member function]
cls.add_method('GetAttribute',
'std::string',
[param('std::string const &', 'name')],
is_const=True)
## topology-reader.h (module 'topology-read'): bool ns3::TopologyReader::Link::GetAttributeFailSafe(std::string const & name, std::string & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string const &', 'name'), param('std::string &', 'value')],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::Ptr<ns3::Node> ns3::TopologyReader::Link::GetFromNode() const [member function]
cls.add_method('GetFromNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetFromNodeName() const [member function]
cls.add_method('GetFromNodeName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::Ptr<ns3::Node> ns3::TopologyReader::Link::GetToNode() const [member function]
cls.add_method('GetToNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetToNodeName() const [member function]
cls.add_method('GetToNodeName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::Link::SetAttribute(std::string const & name, std::string const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string const &', 'name'), param('std::string const &', 'value')])
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3InetTopologyReader_methods(root_module, cls):
## inet-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::InetTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## inet-topology-reader.h (module 'topology-read'): ns3::InetTopologyReader::InetTopologyReader() [constructor]
cls.add_constructor([])
## inet-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::InetTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3OrbisTopologyReader_methods(root_module, cls):
## orbis-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::OrbisTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## orbis-topology-reader.h (module 'topology-read'): ns3::OrbisTopologyReader::OrbisTopologyReader() [constructor]
cls.add_constructor([])
## orbis-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::OrbisTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3RocketfuelTopologyReader_methods(root_module, cls):
## rocketfuel-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::RocketfuelTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::RocketfuelTopologyReader::RocketfuelTopologyReader() [constructor]
cls.add_constructor([])
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::RocketfuelTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| letiangit/802.11ah-ns3 | src/topology-read/bindings/modulegen__gcc_LP64.py | Python | gpl-2.0 | 171,132 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import sys
import uuid
import eventlet
import mock
from oslo.config import cfg
import testtools
from neutron.agent.common import config
from neutron.agent import dhcp_agent
from neutron.agent.linux import dhcp
from neutron.agent.linux import interface
from neutron.common import constants as const
from neutron.common import exceptions
from neutron.common import rpc_compat
from neutron.tests import base
HOSTNAME = 'hostname'
dev_man = dhcp.DeviceManager
rpc_api = dhcp_agent.DhcpPluginApi
DEVICE_MANAGER = '%s.%s' % (dev_man.__module__, dev_man.__name__)
DHCP_PLUGIN = '%s.%s' % (rpc_api.__module__, rpc_api.__name__)
fake_tenant_id = 'aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'
fake_subnet1_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.9.2',
end='172.9.9.254'))
fake_subnet1 = dhcp.DictModel(dict(id='bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='172.9.9.0/24', enable_dhcp=True, name='',
tenant_id=fake_tenant_id,
gateway_ip='172.9.9.1', host_routes=[],
dns_nameservers=[], ip_version=4,
allocation_pools=fake_subnet1_allocation_pools))
fake_subnet2_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.8.2',
end='172.9.8.254'))
fake_subnet2 = dhcp.DictModel(dict(id='dddddddd-dddd-dddd-dddddddddddd',
network_id='12345678-1234-5678-1234567890ab',
cidr='172.9.8.0/24', enable_dhcp=False, name='',
tenant_id=fake_tenant_id, gateway_ip='172.9.8.1',
host_routes=[], dns_nameservers=[], ip_version=4,
allocation_pools=fake_subnet2_allocation_pools))
fake_subnet3 = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='192.168.1.1/24', enable_dhcp=True))
fake_meta_subnet = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='169.254.169.252/30',
gateway_ip='169.254.169.253',
enable_dhcp=True))
fake_fixed_ip1 = dhcp.DictModel(dict(id='', subnet_id=fake_subnet1.id,
ip_address='172.9.9.9'))
fake_meta_fixed_ip = dhcp.DictModel(dict(id='', subnet=fake_meta_subnet,
ip_address='169.254.169.254'))
fake_allocation_pool_subnet1 = dhcp.DictModel(dict(id='', start='172.9.9.2',
end='172.9.9.254'))
fake_port1 = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab',
device_id='dhcp-12345678-1234-aaaa-1234567890ab',
allocation_pools=fake_subnet1_allocation_pools,
mac_address='aa:bb:cc:dd:ee:ff',
network_id='12345678-1234-5678-1234567890ab',
fixed_ips=[fake_fixed_ip1]))
fake_port2 = dhcp.DictModel(dict(id='12345678-1234-aaaa-123456789000',
mac_address='aa:bb:cc:dd:ee:99',
network_id='12345678-1234-5678-1234567890ab',
fixed_ips=[]))
fake_meta_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff',
network_id='12345678-1234-5678-1234567890ab',
device_owner=const.DEVICE_OWNER_ROUTER_INTF,
device_id='forzanapoli',
fixed_ips=[fake_meta_fixed_ip]))
fake_network = dhcp.NetModel(True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet2],
ports=[fake_port1]))
fake_meta_network = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_meta_subnet],
ports=[fake_meta_port]))
fake_down_network = dhcp.NetModel(
True, dict(id='12345678-dddd-dddd-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=False,
subnets=[],
ports=[]))
class TestDhcpAgent(base.BaseTestCase):
def setUp(self):
super(TestDhcpAgent, self).setUp()
dhcp_agent.register_options()
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
# disable setting up periodic state reporting
cfg.CONF.set_override('report_interval', 0, 'AGENT')
self.driver_cls_p = mock.patch(
'neutron.agent.dhcp_agent.importutils.import_class')
self.driver = mock.Mock(name='driver')
self.driver.existing_dhcp_networks.return_value = []
self.driver_cls = self.driver_cls_p.start()
self.driver_cls.return_value = self.driver
self.mock_makedirs_p = mock.patch("os.makedirs")
self.mock_makedirs = self.mock_makedirs_p.start()
def test_dhcp_agent_manager(self):
state_rpc_str = 'neutron.agent.rpc.PluginReportStateAPI'
# sync_state is needed for this test
cfg.CONF.set_override('report_interval', 1, 'AGENT')
with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport,
'sync_state',
autospec=True) as mock_sync_state:
with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport,
'periodic_resync',
autospec=True) as mock_periodic_resync:
with mock.patch(state_rpc_str) as state_rpc:
with mock.patch.object(sys, 'argv') as sys_argv:
sys_argv.return_value = [
'dhcp', '--config-file',
base.etcdir('neutron.conf.test')]
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.register_opts(interface.OPTS)
cfg.CONF(project='neutron')
agent_mgr = dhcp_agent.DhcpAgentWithStateReport(
'testhost')
eventlet.greenthread.sleep(1)
agent_mgr.after_start()
mock_sync_state.assert_called_once_with(agent_mgr)
mock_periodic_resync.assert_called_once_with(agent_mgr)
state_rpc.assert_has_calls(
[mock.call(mock.ANY),
mock.call().report_state(mock.ANY, mock.ANY,
mock.ANY)])
def test_dhcp_agent_main_agent_manager(self):
logging_str = 'neutron.agent.common.config.setup_logging'
launcher_str = 'neutron.openstack.common.service.ServiceLauncher'
with mock.patch(logging_str):
with mock.patch.object(sys, 'argv') as sys_argv:
with mock.patch(launcher_str) as launcher:
sys_argv.return_value = ['dhcp', '--config-file',
base.etcdir('neutron.conf.test')]
dhcp_agent.main()
launcher.assert_has_calls(
[mock.call(), mock.call().launch_service(mock.ANY),
mock.call().wait()])
def test_run_completes_single_pass(self):
with mock.patch(DEVICE_MANAGER):
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['sync_state', 'periodic_resync']])
with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks:
dhcp.run()
mocks['sync_state'].assert_called_once_with()
mocks['periodic_resync'].assert_called_once_with()
def test_call_driver(self):
network = mock.Mock()
network.id = '1'
dhcp = dhcp_agent.DhcpAgent(cfg.CONF)
self.assertTrue(dhcp.call_driver('foo', network))
self.driver.assert_called_once_with(cfg.CONF,
mock.ANY,
'sudo',
mock.ANY,
mock.ANY)
def _test_call_driver_failure(self, exc=None,
trace_level='exception', expected_sync=True):
network = mock.Mock()
network.id = '1'
self.driver.return_value.foo.side_effect = exc or Exception
with mock.patch.object(dhcp_agent.LOG, trace_level) as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp,
'schedule_resync') as schedule_resync:
self.assertIsNone(dhcp.call_driver('foo', network))
self.driver.assert_called_once_with(cfg.CONF,
mock.ANY,
'sudo',
mock.ANY,
mock.ANY)
self.assertEqual(log.call_count, 1)
self.assertEqual(expected_sync, schedule_resync.called)
def test_call_driver_failure(self):
self._test_call_driver_failure()
def test_call_driver_remote_error_net_not_found(self):
self._test_call_driver_failure(
exc=rpc_compat.RemoteError(exc_type='NetworkNotFound'),
trace_level='warning')
def test_call_driver_network_not_found(self):
self._test_call_driver_failure(
exc=exceptions.NetworkNotFound(net_id='1'),
trace_level='warning')
def test_call_driver_conflict(self):
self._test_call_driver_failure(
exc=exceptions.Conflict(),
trace_level='warning',
expected_sync=False)
def _test_sync_state_helper(self, known_networks, active_networks):
with mock.patch(DHCP_PLUGIN) as plug:
mock_plugin = mock.Mock()
mock_plugin.get_active_networks_info.return_value = active_networks
plug.return_value = mock_plugin
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['refresh_dhcp_helper', 'disable_dhcp_helper', 'cache']])
with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks:
mocks['cache'].get_network_ids.return_value = known_networks
dhcp.sync_state()
exp_refresh = [
mock.call(net_id) for net_id in active_networks]
diff = set(known_networks) - set(active_networks)
exp_disable = [mock.call(net_id) for net_id in diff]
mocks['cache'].assert_has_calls([mock.call.get_network_ids()])
mocks['refresh_dhcp_helper'].assert_has_called(exp_refresh)
mocks['disable_dhcp_helper'].assert_has_called(exp_disable)
def test_sync_state_initial(self):
self._test_sync_state_helper([], ['a'])
def test_sync_state_same(self):
self._test_sync_state_helper(['a'], ['a'])
def test_sync_state_disabled_net(self):
self._test_sync_state_helper(['b'], ['a'])
def test_sync_state_waitall(self):
class mockNetwork():
id = '0'
admin_state_up = True
subnets = []
def __init__(self, id):
self.id = id
with mock.patch.object(dhcp_agent.eventlet.GreenPool, 'waitall') as w:
active_networks = [mockNetwork('1'), mockNetwork('2'),
mockNetwork('3'), mockNetwork('4'),
mockNetwork('5')]
known_networks = ['1', '2', '3', '4', '5']
self._test_sync_state_helper(known_networks, active_networks)
w.assert_called_once_with()
def test_sync_state_plugin_error(self):
with mock.patch(DHCP_PLUGIN) as plug:
mock_plugin = mock.Mock()
mock_plugin.get_active_networks_info.side_effect = Exception
plug.return_value = mock_plugin
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp,
'schedule_resync') as schedule_resync:
dhcp.sync_state()
self.assertTrue(log.called)
self.assertTrue(schedule_resync.called)
def test_periodic_resync(self):
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp_agent.eventlet, 'spawn') as spawn:
dhcp.periodic_resync()
spawn.assert_called_once_with(dhcp._periodic_resync_helper)
def test_periodoc_resync_helper(self):
with mock.patch.object(dhcp_agent.eventlet, 'sleep') as sleep:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
dhcp.needs_resync_reasons = ['reason1', 'reason2']
with mock.patch.object(dhcp, 'sync_state') as sync_state:
sync_state.side_effect = RuntimeError
with testtools.ExpectedException(RuntimeError):
dhcp._periodic_resync_helper()
sync_state.assert_called_once_with()
sleep.assert_called_once_with(dhcp.conf.resync_interval)
self.assertEqual(len(dhcp.needs_resync_reasons), 0)
def test_populate_cache_on_start_without_active_networks_support(self):
# emul dhcp driver that doesn't support retrieving of active networks
self.driver.existing_dhcp_networks.side_effect = NotImplementedError
with mock.patch.object(dhcp_agent.LOG, 'debug') as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.driver.existing_dhcp_networks.assert_called_once_with(
dhcp.conf,
cfg.CONF.root_helper
)
self.assertFalse(dhcp.cache.get_network_ids())
self.assertTrue(log.called)
def test_populate_cache_on_start(self):
networks = ['aaa', 'bbb']
self.driver.existing_dhcp_networks.return_value = networks
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.driver.existing_dhcp_networks.assert_called_once_with(
dhcp.conf,
cfg.CONF.root_helper
)
self.assertEqual(set(networks), set(dhcp.cache.get_network_ids()))
def test_none_interface_driver(self):
cfg.CONF.set_override('interface_driver', None)
with mock.patch.object(dhcp, 'LOG') as log:
self.assertRaises(SystemExit, dhcp.DeviceManager,
cfg.CONF, 'sudo', None)
msg = 'An interface driver must be specified'
log.error.assert_called_once_with(msg)
def test_nonexistent_interface_driver(self):
# Temporarily turn off mock, so could use the real import_class
# to import interface_driver.
self.driver_cls_p.stop()
self.addCleanup(self.driver_cls_p.start)
cfg.CONF.set_override('interface_driver', 'foo')
with mock.patch.object(dhcp, 'LOG') as log:
self.assertRaises(SystemExit, dhcp.DeviceManager,
cfg.CONF, 'sudo', None)
self.assertEqual(log.error.call_count, 1)
class TestLogArgs(base.BaseTestCase):
def test_log_args_without_log_dir_and_file(self):
conf_dict = {'debug': True,
'verbose': False,
'log_dir': None,
'log_file': None,
'use_syslog': True,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--use-syslog',
'--syslog-log-facility=LOG_USER']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_without_log_file(self):
conf_dict = {'debug': True,
'verbose': True,
'log_dir': '/etc/tests',
'log_file': None,
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--verbose',
'--log-file=log_file_name',
'--log-dir=/etc/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_with_log_dir_and_file(self):
conf_dict = {'debug': True,
'verbose': False,
'log_dir': '/etc/tests',
'log_file': 'tests/filelog',
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--log-file=log_file_name',
'--log-dir=/etc/tests/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_without_log_dir(self):
conf_dict = {'debug': True,
'verbose': False,
'log_file': 'tests/filelog',
'log_dir': None,
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--log-file=log_file_name',
'--log-dir=tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_with_filelog_and_syslog(self):
conf_dict = {'debug': True,
'verbose': True,
'log_file': 'tests/filelog',
'log_dir': '/etc/tests',
'use_syslog': True,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--verbose',
'--log-file=log_file_name',
'--log-dir=/etc/tests/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
class TestDhcpAgentEventHandler(base.BaseTestCase):
def setUp(self):
super(TestDhcpAgentEventHandler, self).setUp()
config.register_interface_driver_opts_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
self.plugin_p = mock.patch(DHCP_PLUGIN)
plugin_cls = self.plugin_p.start()
self.plugin = mock.Mock()
plugin_cls.return_value = self.plugin
self.cache_p = mock.patch('neutron.agent.dhcp_agent.NetworkCache')
cache_cls = self.cache_p.start()
self.cache = mock.Mock()
cache_cls.return_value = self.cache
self.mock_makedirs_p = mock.patch("os.makedirs")
self.mock_makedirs = self.mock_makedirs_p.start()
self.mock_init_p = mock.patch('neutron.agent.dhcp_agent.'
'DhcpAgent._populate_networks_cache')
self.mock_init = self.mock_init_p.start()
with mock.patch.object(dhcp.Dnsmasq,
'check_version') as check_v:
check_v.return_value = dhcp.Dnsmasq.MINIMUM_VERSION
self.dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.call_driver_p = mock.patch.object(self.dhcp, 'call_driver')
self.call_driver = self.call_driver_p.start()
self.schedule_resync_p = mock.patch.object(self.dhcp,
'schedule_resync')
self.schedule_resync = self.schedule_resync_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager'
)
self.external_process = self.external_process_p.start()
def _enable_dhcp_helper(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.plugin.get_network_info.return_value = fake_network
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.call_driver.assert_called_once_with('enable', fake_network)
self.cache.assert_has_calls([mock.call.put(fake_network)])
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().enable(mock.ANY)
])
else:
self.assertFalse(self.external_process.call_count)
def test_enable_dhcp_helper_enable_isolated_metadata(self):
self._enable_dhcp_helper(isolated_metadata=True)
def test_enable_dhcp_helper(self):
self._enable_dhcp_helper()
def test_enable_dhcp_helper_down_network(self):
self.plugin.get_network_info.return_value = fake_down_network
self.dhcp.enable_dhcp_helper(fake_down_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_down_network.id)])
self.assertFalse(self.call_driver.called)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def test_enable_dhcp_helper_network_none(self):
self.plugin.get_network_info.return_value = None
with mock.patch.object(dhcp_agent.LOG, 'warn') as log:
self.dhcp.enable_dhcp_helper('fake_id')
self.plugin.assert_has_calls(
[mock.call.get_network_info('fake_id')])
self.assertFalse(self.call_driver.called)
self.assertTrue(log.called)
self.assertFalse(self.dhcp.schedule_resync.called)
def test_enable_dhcp_helper_exception_during_rpc(self):
self.plugin.get_network_info.side_effect = Exception
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.assertFalse(self.call_driver.called)
self.assertTrue(log.called)
self.assertTrue(self.schedule_resync.called)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def test_enable_dhcp_helper_driver_failure(self):
self.plugin.get_network_info.return_value = fake_network
self.call_driver.return_value = False
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.call_driver.assert_called_once_with('enable', fake_network)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def _disable_dhcp_helper_known_network(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.cache.get_network_by_id.return_value = fake_network
self.dhcp.disable_dhcp_helper(fake_network.id)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
self.call_driver.assert_called_once_with('disable', fake_network)
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
else:
self.assertFalse(self.external_process.call_count)
def test_disable_dhcp_helper_known_network_isolated_metadata(self):
self._disable_dhcp_helper_known_network(isolated_metadata=True)
def test_disable_dhcp_helper_known_network(self):
self._disable_dhcp_helper_known_network()
def test_disable_dhcp_helper_unknown_network(self):
self.cache.get_network_by_id.return_value = None
self.dhcp.disable_dhcp_helper('abcdef')
self.cache.assert_has_calls(
[mock.call.get_network_by_id('abcdef')])
self.assertEqual(0, self.call_driver.call_count)
self.assertFalse(self.external_process.called)
def _disable_dhcp_helper_driver_failure(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.cache.get_network_by_id.return_value = fake_network
self.call_driver.return_value = False
self.dhcp.disable_dhcp_helper(fake_network.id)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
self.call_driver.assert_called_once_with('disable', fake_network)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
else:
self.assertFalse(self.external_process.call_count)
def test_disable_dhcp_helper_driver_failure_isolated_metadata(self):
self._disable_dhcp_helper_driver_failure(isolated_metadata=True)
def test_disable_dhcp_helper_driver_failure(self):
self._disable_dhcp_helper_driver_failure()
def test_enable_isolated_metadata_proxy(self):
class_path = 'neutron.agent.linux.external_process.ProcessManager'
with mock.patch(class_path) as ext_process:
self.dhcp.enable_isolated_metadata_proxy(fake_network)
ext_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().enable(mock.ANY)
])
def test_disable_isolated_metadata_proxy(self):
class_path = 'neutron.agent.linux.external_process.ProcessManager'
with mock.patch(class_path) as ext_process:
self.dhcp.disable_isolated_metadata_proxy(fake_network)
ext_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
def test_enable_isolated_metadata_proxy_with_metadata_network(self):
cfg.CONF.set_override('enable_metadata_network', True)
cfg.CONF.set_override('debug', True)
cfg.CONF.set_override('log_file', 'test.log')
class_path = 'neutron.agent.linux.ip_lib.IPWrapper'
self.external_process_p.stop()
# Ensure the mock is restored if this test fail
try:
with mock.patch(class_path) as ip_wrapper:
self.dhcp.enable_isolated_metadata_proxy(fake_meta_network)
ip_wrapper.assert_has_calls([mock.call(
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().netns.execute([
'neutron-ns-metadata-proxy',
mock.ANY,
mock.ANY,
'--router_id=forzanapoli',
mock.ANY,
mock.ANY,
'--debug',
('--log-file=neutron-ns-metadata-proxy-%s.log' %
fake_meta_network.id)])
])
finally:
self.external_process_p.start()
def test_network_create_end(self):
payload = dict(network=dict(id=fake_network.id))
with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable:
self.dhcp.network_create_end(None, payload)
enable.assertCalledOnceWith(fake_network.id)
def test_network_update_end_admin_state_up(self):
payload = dict(network=dict(id=fake_network.id, admin_state_up=True))
with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable:
self.dhcp.network_update_end(None, payload)
enable.assertCalledOnceWith(fake_network.id)
def test_network_update_end_admin_state_down(self):
payload = dict(network=dict(id=fake_network.id, admin_state_up=False))
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.network_update_end(None, payload)
disable.assertCalledOnceWith(fake_network.id)
def test_network_delete_end(self):
payload = dict(network_id=fake_network.id)
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.network_delete_end(None, payload)
disable.assertCalledOnceWith(fake_network.id)
def test_refresh_dhcp_helper_no_dhcp_enabled_networks(self):
network = dhcp.NetModel(True, dict(id='net-id',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[],
ports=[]))
self.cache.get_network_by_id.return_value = network
self.plugin.get_network_info.return_value = network
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.refresh_dhcp_helper(network.id)
disable.assert_called_once_with(network.id)
self.assertFalse(self.cache.called)
self.assertFalse(self.call_driver.called)
self.cache.assert_has_calls(
[mock.call.get_network_by_id('net-id')])
def test_refresh_dhcp_helper_exception_during_rpc(self):
network = dhcp.NetModel(True, dict(id='net-id',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[],
ports=[]))
self.cache.get_network_by_id.return_value = network
self.plugin.get_network_info.side_effect = Exception
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
self.dhcp.refresh_dhcp_helper(network.id)
self.assertFalse(self.call_driver.called)
self.cache.assert_has_calls(
[mock.call.get_network_by_id('net-id')])
self.assertTrue(log.called)
self.assertTrue(self.dhcp.schedule_resync.called)
def test_subnet_update_end(self):
payload = dict(subnet=dict(network_id=fake_network.id))
self.cache.get_network_by_id.return_value = fake_network
self.plugin.get_network_info.return_value = fake_network
self.dhcp.subnet_update_end(None, payload)
self.cache.assert_has_calls([mock.call.put(fake_network)])
self.call_driver.assert_called_once_with('reload_allocations',
fake_network)
def test_subnet_update_end_restart(self):
new_state = dhcp.NetModel(True, dict(id=fake_network.id,
tenant_id=fake_network.tenant_id,
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet3],
ports=[fake_port1]))
payload = dict(subnet=dict(network_id=fake_network.id))
self.cache.get_network_by_id.return_value = fake_network
self.plugin.get_network_info.return_value = new_state
self.dhcp.subnet_update_end(None, payload)
self.cache.assert_has_calls([mock.call.put(new_state)])
self.call_driver.assert_called_once_with('restart',
new_state)
def test_subnet_update_end_delete_payload(self):
prev_state = dhcp.NetModel(True, dict(id=fake_network.id,
tenant_id=fake_network.tenant_id,
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet3],
ports=[fake_port1]))
payload = dict(subnet_id=fake_subnet1.id)
self.cache.get_network_by_subnet_id.return_value = prev_state
self.cache.get_network_by_id.return_value = prev_state
self.plugin.get_network_info.return_value = fake_network
self.dhcp.subnet_delete_end(None, payload)
self.cache.assert_has_calls([
mock.call.get_network_by_subnet_id(
'bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb'),
mock.call.get_network_by_id('12345678-1234-5678-1234567890ab'),
mock.call.put(fake_network)])
self.call_driver.assert_called_once_with('restart',
fake_network)
def test_port_update_end(self):
payload = dict(port=fake_port2)
self.cache.get_network_by_id.return_value = fake_network
self.cache.get_port_by_id.return_value = fake_port2
self.dhcp.port_update_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_port2.network_id),
mock.call.put_port(mock.ANY)])
self.call_driver.assert_called_once_with('reload_allocations',
fake_network)
def test_port_update_change_ip_on_port(self):
payload = dict(port=fake_port1)
self.cache.get_network_by_id.return_value = fake_network
updated_fake_port1 = copy.deepcopy(fake_port1)
updated_fake_port1.fixed_ips[0].ip_address = '172.9.9.99'
self.cache.get_port_by_id.return_value = updated_fake_port1
self.dhcp.port_update_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_port1.network_id),
mock.call.put_port(mock.ANY)])
self.call_driver.assert_has_calls(
[mock.call.call_driver('reload_allocations', fake_network)])
def test_port_delete_end(self):
payload = dict(port_id=fake_port2.id)
self.cache.get_network_by_id.return_value = fake_network
self.cache.get_port_by_id.return_value = fake_port2
self.dhcp.port_delete_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_port_by_id(fake_port2.id),
mock.call.get_network_by_id(fake_network.id),
mock.call.remove_port(fake_port2)])
self.call_driver.assert_has_calls(
[mock.call.call_driver('reload_allocations', fake_network)])
def test_port_delete_end_unknown_port(self):
payload = dict(port_id='unknown')
self.cache.get_port_by_id.return_value = None
self.dhcp.port_delete_end(None, payload)
self.cache.assert_has_calls([mock.call.get_port_by_id('unknown')])
self.assertEqual(self.call_driver.call_count, 0)
class TestDhcpPluginApiProxy(base.BaseTestCase):
def setUp(self):
super(TestDhcpPluginApiProxy, self).setUp()
self.proxy = dhcp_agent.DhcpPluginApi('foo', {}, None)
self.proxy.host = 'foo'
self.call_p = mock.patch.object(self.proxy, 'call')
self.call = self.call_p.start()
self.make_msg_p = mock.patch.object(self.proxy, 'make_msg')
self.make_msg = self.make_msg_p.start()
def test_get_network_info(self):
self.call.return_value = dict(a=1)
retval = self.proxy.get_network_info('netid')
self.assertEqual(retval.a, 1)
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('get_network_info',
network_id='netid',
host='foo')
def test_get_dhcp_port(self):
self.call.return_value = dict(a=1)
retval = self.proxy.get_dhcp_port('netid', 'devid')
self.assertEqual(retval.a, 1)
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('get_dhcp_port',
network_id='netid',
device_id='devid',
host='foo')
def test_get_dhcp_port_none(self):
self.call.return_value = None
self.assertIsNone(self.proxy.get_dhcp_port('netid', 'devid'))
def test_get_active_networks_info(self):
self.proxy.get_active_networks_info()
self.make_msg.assert_called_once_with('get_active_networks_info',
host='foo')
def test_create_dhcp_port(self):
port_body = (
{'port':
{'name': '', 'admin_state_up': True,
'network_id': fake_network.id,
'tenant_id': fake_network.tenant_id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})
self.proxy.create_dhcp_port(port_body)
self.make_msg.assert_called_once_with('create_dhcp_port',
port=port_body,
host='foo')
def test_create_dhcp_port_none(self):
self.call.return_value = None
port_body = (
{'port':
{'name': '', 'admin_state_up': True,
'network_id': fake_network.id,
'tenant_id': fake_network.tenant_id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})
self.assertIsNone(self.proxy.create_dhcp_port(port_body))
def test_update_dhcp_port_none(self):
self.call.return_value = None
port_body = {'port': {'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}]}}
self.assertIsNone(self.proxy.update_dhcp_port(fake_port1.id,
port_body))
def test_update_dhcp_port(self):
port_body = {'port': {'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}]}}
self.proxy.update_dhcp_port(fake_port1.id, port_body)
self.make_msg.assert_called_once_with('update_dhcp_port',
port_id=fake_port1.id,
port=port_body,
host='foo')
def test_release_dhcp_port(self):
self.proxy.release_dhcp_port('netid', 'devid')
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('release_dhcp_port',
network_id='netid',
device_id='devid',
host='foo')
def test_release_port_fixed_ip(self):
self.proxy.release_port_fixed_ip('netid', 'devid', 'subid')
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('release_port_fixed_ip',
network_id='netid',
subnet_id='subid',
device_id='devid',
host='foo')
class TestNetworkCache(base.BaseTestCase):
def test_put_network(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.cache,
{fake_network.id: fake_network})
self.assertEqual(nc.subnet_lookup,
{fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id})
self.assertEqual(nc.port_lookup,
{fake_port1.id: fake_network.id})
def test_put_network_existing(self):
prev_network_info = mock.Mock()
nc = dhcp_agent.NetworkCache()
with mock.patch.object(nc, 'remove') as remove:
nc.cache[fake_network.id] = prev_network_info
nc.put(fake_network)
remove.assert_called_once_with(prev_network_info)
self.assertEqual(nc.cache,
{fake_network.id: fake_network})
self.assertEqual(nc.subnet_lookup,
{fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id})
self.assertEqual(nc.port_lookup,
{fake_port1.id: fake_network.id})
def test_remove_network(self):
nc = dhcp_agent.NetworkCache()
nc.cache = {fake_network.id: fake_network}
nc.subnet_lookup = {fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id}
nc.port_lookup = {fake_port1.id: fake_network.id}
nc.remove(fake_network)
self.assertEqual(len(nc.cache), 0)
self.assertEqual(len(nc.subnet_lookup), 0)
self.assertEqual(len(nc.port_lookup), 0)
def test_get_network_by_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_id(fake_network.id), fake_network)
def test_get_network_ids(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_ids(), [fake_network.id])
def test_get_network_by_subnet_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_subnet_id(fake_subnet1.id),
fake_network)
def test_get_network_by_port_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_port_id(fake_port1.id),
fake_network)
def test_put_port(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.put_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 2)
self.assertIn(fake_port2, fake_net.ports)
def test_put_port_existing(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1, fake_port2]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.put_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 2)
self.assertIn(fake_port2, fake_net.ports)
def test_remove_port_existing(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1, fake_port2]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.remove_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 1)
self.assertNotIn(fake_port2, fake_net.ports)
def test_get_port_by_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_port_by_id(fake_port1.id), fake_port1)
class FakePort1:
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
class FakeV4Subnet:
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
class FakeV4SubnetNoGateway:
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
ip_version = 4
cidr = '192.168.1.0/24'
gateway_ip = None
enable_dhcp = True
class FakeV4Network:
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnets = [FakeV4Subnet()]
ports = [FakePort1()]
namespace = 'qdhcp-aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
class FakeV4NetworkNoSubnet:
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnets = []
ports = []
class FakeV4NetworkNoGateway:
id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
subnets = [FakeV4SubnetNoGateway()]
ports = [FakePort1()]
class TestDeviceManager(base.BaseTestCase):
def setUp(self):
super(TestDeviceManager, self).setUp()
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_use_namespaces_opts_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
config.register_root_helper(cfg.CONF)
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_isolated_metadata', True)
self.ensure_device_is_ready_p = mock.patch(
'neutron.agent.linux.ip_lib.ensure_device_is_ready')
self.ensure_device_is_ready = (self.ensure_device_is_ready_p.start())
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
self.iproute_cls_p = mock.patch('neutron.agent.linux.'
'ip_lib.IpRouteCommand')
driver_cls = self.dvr_cls_p.start()
iproute_cls = self.iproute_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
self.mock_iproute = mock.MagicMock()
driver_cls.return_value = self.mock_driver
iproute_cls.return_value = self.mock_iproute
def _test_setup_helper(self, device_is_ready, net=None, port=None):
net = net or fake_network
port = port or fake_port1
plugin = mock.Mock()
plugin.create_dhcp_port.return_value = port or fake_port1
plugin.get_dhcp_port.return_value = port or fake_port1
self.ensure_device_is_ready.return_value = device_is_ready
self.mock_driver.get_device_name.return_value = 'tap12345678-12'
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh._set_default_route = mock.Mock()
interface_name = dh.setup(net)
self.assertEqual(interface_name, 'tap12345678-12')
plugin.assert_has_calls([
mock.call.create_dhcp_port(
{'port': {'name': '', 'admin_state_up': True,
'network_id': net.id, 'tenant_id': net.tenant_id,
'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})])
expected_ips = ['172.9.9.9/24', '169.254.169.254/16']
expected = [
mock.call.get_device_name(port),
mock.call.init_l3(
'tap12345678-12',
expected_ips,
namespace=net.namespace)]
if not device_is_ready:
expected.insert(1,
mock.call.plug(net.id,
port.id,
'tap12345678-12',
'aa:bb:cc:dd:ee:ff',
namespace=net.namespace))
self.mock_driver.assert_has_calls(expected)
dh._set_default_route.assert_called_once_with(net, 'tap12345678-12')
def test_setup(self):
cfg.CONF.set_override('enable_metadata_network', False)
self._test_setup_helper(False)
cfg.CONF.set_override('enable_metadata_network', True)
self._test_setup_helper(False)
def test_setup_device_is_ready(self):
self._test_setup_helper(True)
def test_create_dhcp_port_raise_conflict(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
plugin.create_dhcp_port.return_value = None
self.assertRaises(exceptions.Conflict,
dh.setup_dhcp_port,
fake_network)
def test_create_dhcp_port_create_new(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
plugin.create_dhcp_port.return_value = fake_network.ports[0]
dh.setup_dhcp_port(fake_network)
plugin.assert_has_calls([
mock.call.create_dhcp_port(
{'port': {'name': '', 'admin_state_up': True,
'network_id':
fake_network.id, 'tenant_id': fake_network.tenant_id,
'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})])
def test_create_dhcp_port_update_add_subnet(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
fake_network_copy.subnets[1].enable_dhcp = True
plugin.update_dhcp_port.return_value = fake_network.ports[0]
dh.setup_dhcp_port(fake_network_copy)
port_body = {'port': {
'network_id': fake_network.id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id,
'ip_address': fake_fixed_ip1.ip_address},
{'subnet_id': fake_subnet2.id}]}}
plugin.assert_has_calls([
mock.call.update_dhcp_port(fake_network_copy.ports[0].id,
port_body)])
def test_update_dhcp_port_raises_conflict(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
fake_network_copy.subnets[1].enable_dhcp = True
plugin.update_dhcp_port.return_value = None
self.assertRaises(exceptions.Conflict,
dh.setup_dhcp_port,
fake_network_copy)
def test_create_dhcp_port_no_update_or_create(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
dh.setup_dhcp_port(fake_network_copy)
self.assertFalse(plugin.setup_dhcp_port.called)
self.assertFalse(plugin.update_dhcp_port.called)
def test_destroy(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
fake_port = dhcp.DictModel(
dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff'))
with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls:
mock_driver = mock.MagicMock()
mock_driver.get_device_name.return_value = 'tap12345678-12'
dvr_cls.return_value = mock_driver
plugin = mock.Mock()
plugin.get_dhcp_port.return_value = fake_port
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh.destroy(fake_net, 'tap12345678-12')
dvr_cls.assert_called_once_with(cfg.CONF)
mock_driver.assert_has_calls(
[mock.call.unplug('tap12345678-12',
namespace='qdhcp-' + fake_net.id)])
plugin.assert_has_calls(
[mock.call.release_dhcp_port(fake_net.id, mock.ANY)])
def test_get_interface_name(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
fake_port = dhcp.DictModel(
dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff'))
with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls:
mock_driver = mock.MagicMock()
mock_driver.get_device_name.return_value = 'tap12345678-12'
dvr_cls.return_value = mock_driver
plugin = mock.Mock()
plugin.get_dhcp_port.return_value = fake_port
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh.get_interface_name(fake_net, fake_port)
dvr_cls.assert_called_once_with(cfg.CONF)
mock_driver.assert_has_calls(
[mock.call.get_device_name(fake_port)])
self.assertEqual(len(plugin.mock_calls), 0)
def test_get_device_id(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
expected = ('dhcp1ae5f96c-c527-5079-82ea-371a01645457-12345678-1234-'
'5678-1234567890ab')
with mock.patch('uuid.uuid5') as uuid5:
uuid5.return_value = '1ae5f96c-c527-5079-82ea-371a01645457'
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
uuid5.called_once_with(uuid.NAMESPACE_DNS, cfg.CONF.host)
self.assertEqual(dh.get_device_id(fake_net), expected)
def test_update(self):
# Try with namespaces and no metadata network
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_network', False)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
network = mock.Mock()
dh.update(network, 'ns-12345678-12')
dh._set_default_route.assert_called_once_with(network,
'ns-12345678-12')
# No namespaces, shouldn't set default route.
cfg.CONF.set_override('use_namespaces', False)
cfg.CONF.set_override('enable_metadata_network', False)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'tap12345678-12')
self.assertFalse(dh._set_default_route.called)
# Meta data network enabled, don't interfere with its gateway.
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_network', True)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'ns-12345678-12')
self.assertTrue(dh._set_default_route.called)
# For completeness
cfg.CONF.set_override('use_namespaces', False)
cfg.CONF.set_override('enable_metadata_network', True)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'ns-12345678-12')
self.assertFalse(dh._set_default_route.called)
def test_set_default_route(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
# Basic one subnet with gateway.
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.0.1')
def test_set_default_route_no_subnet(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
network = FakeV4NetworkNoSubnet()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_no_subnet_delete_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4NetworkNoSubnet()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
device.route.delete_gateway.assert_called_once_with('192.168.0.1')
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_no_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4NetworkNoGateway()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
device.route.delete_gateway.assert_called_once_with('192.168.0.1')
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_do_nothing(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_change_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.2')
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.0.1')
def test_set_default_route_two_subnets(self):
# Try two subnets. Should set gateway from the first.
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
network = FakeV4Network()
subnet2 = FakeV4Subnet()
subnet2.gateway_ip = '192.168.1.1'
network.subnets = [subnet2, FakeV4Subnet()]
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.1.1')
class TestDictModel(base.BaseTestCase):
def test_basic_dict(self):
d = dict(a=1, b=2)
m = dhcp.DictModel(d)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
def test_dict_has_sub_dict(self):
d = dict(a=dict(b=2))
m = dhcp.DictModel(d)
self.assertEqual(m.a.b, 2)
def test_dict_contains_list(self):
d = dict(a=[1, 2])
m = dhcp.DictModel(d)
self.assertEqual(m.a, [1, 2])
def test_dict_contains_list_of_dicts(self):
d = dict(a=[dict(b=2), dict(c=3)])
m = dhcp.DictModel(d)
self.assertEqual(m.a[0].b, 2)
self.assertEqual(m.a[1].c, 3)
class TestNetModel(base.BaseTestCase):
def test_ns_name(self):
network = dhcp.NetModel(True, {'id': 'foo'})
self.assertEqual(network.namespace, 'qdhcp-foo')
def test_ns_name_false_namespace(self):
network = dhcp.NetModel(False, {'id': 'foo'})
self.assertIsNone(network.namespace)
def test_ns_name_none_namespace(self):
network = dhcp.NetModel(None, {'id': 'foo'})
self.assertIsNone(network.namespace)
| subramani95/neutron | neutron/tests/unit/test_dhcp_agent.py | Python | apache-2.0 | 63,931 |
"""Configure an azure search instance
-------------------------------------
How to use
-------------------------------------
python provision_azsearch.py -k <AZURE_SEARCH_ADMIN_KEY> -u <AZURE_SEARCH_URL> -c <SAVED_AZURE_search_resource> -d <DATA_SOURCE_CONFIG> [-b <skip|update|delete>]
"""
import os
import sys
import json
import requests
import getopt
def usage():
print 'Usage: %s -k <AZURE_SEARCH_ADMIN_KEY> -u <AZURE_SEARCH_URL -c <SAVED_AZURE_search_resource> -d <DATA_SOURCE_CONFIG> [-b <skip|update|delete>] [-p]' % os.path.basename(sys.argv[0])
print ''
print ' -k, --key \n Specifies the admin key for the azure search instance'
print ' -u, --url \n Specifies the url for the azre search instance'
print ' -c, --savedconfig \n Specifies the json file containig a saved Azure Search config'
print ' -d, --datasourcecofnig \n Specifies the json file containing data source configuration (i.e. connection string values)',
print ' -a, --apiversion \n Specifies the api version to use for azure search requests'
print ' -b, --behavior \n Specifies the behavior when encountering existing resources'
print " 'skip' -- Skip the resource (leave it as is)"
print " 'update' -- Attempt to update the resource in place (PUT)"
print " 'delete' -- Delete the resource first then create it as specified"
print ' -p, --purge \n Delete all indexes, datasources and indexers in the instance before configuring'
def get_search_resource(url, admin_key, apiversion, resource):
params = {'api-version': apiversion}
headers = { 'api-key': admin_key }
response = requests.get(url + '/' + resource, headers=headers, params=params)
response.raise_for_status()
result = response.json()['value']
response.close()
return result
def delete_search_resource(url, admin_key, apiversion, resource):
return requestsaction_search_resource(requests.delete, url, admin_key, apiversion, resource, None)
def post_search_resource(url, admin_key, apiversion, resource, data):
return requestsaction_search_resource(requests.post, url, admin_key, apiversion, resource, data)
def put_search_resource(url, admin_key, apiversion, resource, data):
return requestsaction_search_resource(requests.put, url, admin_key, apiversion, resource, data)
def requestsaction_search_resource(requests_action, url, admin_key, apiversion, resource, data):
params = {'api-version': apiversion}
headers = { 'api-key': admin_key, "content-type": "application/json"}
response = requests_action(url + "/" + resource, headers=headers, params=params, data=json.dumps(data))
try:
response.raise_for_status()
except requests.exceptions.HTTPError,e:
print "\n\nERROR\n\n"
print "URL: " + response.url
print "STATUS_CODE: " + str(response.status_code)
print "REASON: " + response.reason
print "TEXT: " + response.text
raise e
finally:
response.close()
def main():
INDEXES = "indexes"
INDEXERS = "indexers"
DATASOURCES = "datasources"
ALL_CONFIG_TYPES = [INDEXES, DATASOURCES, INDEXERS]
try:
opts, _ = getopt.getopt(sys.argv[1:], "hk:u:c:d:a:b:p", ["help", "key=", "url=", "savedconfig=", "datasourceconfig=", "apiversion=", "behavior=", "purge"])
except getopt.GetoptError:
usage()
sys.exit(2)
key = ""
url = ""
apiversion = "2015-02-28"
savedconfigfilename = ""
datasourceconfigfilename = "datasourceconfigfilename"
behavior = "skip"
purge = False
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-k", "--key"):
key = a
elif o in ("-u", "--url"):
url = a
elif o in ("-c", "--savedconfig"):
savedconfigfilename = a
elif o in ("-d", "--datasourceconfig"):
datasourceconfigfilename = a
elif o in("-a", "--apiversion"):
apiversion = a
elif o in ("-b", "--behavior"):
behavior = a
elif o in ("-p", "--purge"):
purge = True
if url == "" or key == "" or savedconfigfilename == "" or datasourceconfigfilename == "":
usage()
sys.exit(2)
if behavior not in ['skip', 'delete', 'update']:
usage()
sys.exit(2)
print 'Key = %s, URL = %s, Saved config file = %s, Data config file = %s' % (key, url, savedconfigfilename, datasourceconfigfilename)
existingconfigByType = {}
for configtype in ALL_CONFIG_TYPES:
existingconfigByType[configtype] = get_search_resource(url, key, apiversion, configtype)
savedconfigfile = open(savedconfigfilename, 'r')
savedconfig = json.loads(savedconfigfile.read())
savedconfigfile.close()
dsconfigfile = open(datasourceconfigfilename, 'r')
dsconfig = json.loads(dsconfigfile.read())
dsconfigfile.close()
if purge:
try:
_ = raw_input("WARNING: About to delete all the configs for the Azure Search instance. ... Enter Ctrl+C to abort!")
except KeyboardInterrupt:
sys.exit()
for configtype in ALL_CONFIG_TYPES:
for existingconfig in existingconfigByType[configtype]:
resource = "%s/%s" % (configtype, existingconfig['name'])
sys.stdout.write("DELETING %s ... " % resource)
delete_search_resource(url, key, apiversion, resource )
print "OK"
print "Inserting any matching datasource connection strings."
for datasource in savedconfig[DATASOURCES]:
for ds in dsconfig:
if datasource['type'] == ds['type']:
if datasource['name'] in ds['sources']:
print 'Updating datasource %s connectionString' % (datasource['name'])
datasource['credentials']['connectionString'] = ds['connectionString']
print "\n"
for configtype in ALL_CONFIG_TYPES:
print "Provisioning %s" % configtype
existingconfignames = [exconfig['name'] for exconfig in existingconfigByType[configtype]]
for config in savedconfig[configtype]:
configname = config['name']
resource = "%s/%s" % (configtype, configname)
if configname in existingconfignames:
if behavior == 'skip':
print "%s already exists, skipping." % resource
continue
elif behavior == 'update':
sys.stdout.write("UPDATING %s ... " % resource)
put_search_resource(url, key, apiversion, resource, config)
print "OK"
continue
elif behavior == 'delete':
sys.stdout.write("DELETING %s ... " % resource)
delete_search_resource(url, key, apiversion, resource)
print "OK"
sys.stdout.write("CREATING %s ... " % resource)
post_search_resource(url, key, apiversion, configtype, config)
print "OK"
print "\n\n"
sys.exit()
if __name__ == "__main__":
main()
| obsoleted/azsearch_management | provision_azsearch.py | Python | mit | 7,204 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: lambda
short_description: Manage AWS Lambda functions
description:
- Allows for the management of Lambda functions.
version_added: '2.2'
requirements: [ boto3 ]
options:
name:
description:
- The name you want to assign to the function you are uploading. Cannot be changed.
required: true
state:
description:
- Create or delete Lambda function
required: false
default: present
choices: [ 'present', 'absent' ]
runtime:
description:
- The runtime environment for the Lambda function you are uploading. Required when creating a function. Use parameters as described in boto3 docs. Current example runtime environments are nodejs, nodejs4.3, java8 or python2.7
required: true
role:
description:
- The Amazon Resource Name (ARN) of the IAM role that Lambda assumes when it executes your function to access any other Amazon Web Services (AWS) resources. You may use the bare ARN if the role belongs to the same AWS account.
default: null
handler:
description:
- The function within your code that Lambda calls to begin execution
default: null
zip_file:
description:
- A .zip file containing your deployment package
required: false
default: null
aliases: [ 'src' ]
s3_bucket:
description:
- Amazon S3 bucket name where the .zip file containing your deployment package is stored
required: false
default: null
s3_key:
description:
- The Amazon S3 object (the deployment package) key name you want to upload
required: false
default: null
s3_object_version:
description:
- The Amazon S3 object (the deployment package) version you want to upload.
required: false
default: null
description:
description:
- A short, user-defined function description. Lambda does not use this value. Assign a meaningful description as you see fit.
required: false
default: null
timeout:
description:
- The function execution time at which Lambda should terminate the function.
required: false
default: 3
memory_size:
description:
- The amount of memory, in MB, your Lambda function is given
required: false
default: 128
vpc_subnet_ids:
description:
- List of subnet IDs to run Lambda function in. Use this option if you need to access resources in your VPC. Leave empty if you don't want to run the function in a VPC.
required: false
default: None
vpc_security_group_ids:
description:
- List of VPC security group IDs to associate with the Lambda function. Required when vpc_subnet_ids is used.
required: false
default: None
author:
- 'Steyn Huizinga (@steynovich)'
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Create Lambda functions
tasks:
- name: looped creation
lambda:
name: '{{ item.name }}'
state: present
zip_file: '{{ item.zip_file }}'
runtime: 'python2.7'
role: 'arn:aws:iam::987654321012:role/lambda_basic_execution'
handler: 'hello_python.my_handler'
vpc_subnet_ids:
- subnet-123abcde
- subnet-edcba321
vpc_security_group_ids:
- sg-123abcde
- sg-edcba321
with_items:
- name: HelloWorld
zip_file: hello-code.zip
- name: ByeBye
zip_file: bye-code.zip
# Basic Lambda function deletion
tasks:
- name: Delete Lambda functions HelloWorld and ByeBye
lambda:
name: '{{ item }}'
state: absent
with_items:
- HelloWorld
- ByeBye
'''
RETURN = '''
output:
description: the data returned by get_function in boto3
returned: success
type: dict
sample:
'code':
{
'location': 'an S3 URL',
'repository_type': 'S3',
}
'configuration':
{
'function_name': 'string',
'function_arn': 'string',
'runtime': 'nodejs',
'role': 'string',
'handler': 'string',
'code_size': 123,
'description': 'string',
'timeout': 123,
'memory_size': 123,
'last_modified': 'string',
'code_sha256': 'string',
'version': 'string',
}
'''
# Import from Python standard library
import base64
import hashlib
try:
import botocore
HAS_BOTOCORE = True
except ImportError:
HAS_BOTOCORE = False
try:
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def get_current_function(connection, function_name, qualifier=None):
try:
if qualifier is not None:
return connection.get_function(FunctionName=function_name,
Qualifier=qualifier)
return connection.get_function(FunctionName=function_name)
except botocore.exceptions.ClientError:
return None
def sha256sum(filename):
hasher = hashlib.sha256()
with open(filename, 'rb') as f:
hasher.update(f.read())
code_hash = hasher.digest()
code_b64 = base64.b64encode(code_hash)
hex_digest = code_b64.decode('utf-8')
return hex_digest
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
runtime=dict(type='str', required=True),
role=dict(type='str', default=None),
handler=dict(type='str', default=None),
zip_file=dict(type='str', default=None, aliases=['src']),
s3_bucket=dict(type='str'),
s3_key=dict(type='str'),
s3_object_version=dict(type='str', default=None),
description=dict(type='str', default=''),
timeout=dict(type='int', default=3),
memory_size=dict(type='int', default=128),
vpc_subnet_ids=dict(type='list', default=None),
vpc_security_group_ids=dict(type='list', default=None),
)
)
mutually_exclusive = [['zip_file', 's3_key'],
['zip_file', 's3_bucket'],
['zip_file', 's3_object_version']]
required_together = [['s3_key', 's3_bucket'],
['vpc_subnet_ids', 'vpc_security_group_ids']]
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive,
required_together=required_together)
name = module.params.get('name')
state = module.params.get('state').lower()
runtime = module.params.get('runtime')
role = module.params.get('role')
handler = module.params.get('handler')
s3_bucket = module.params.get('s3_bucket')
s3_key = module.params.get('s3_key')
s3_object_version = module.params.get('s3_object_version')
zip_file = module.params.get('zip_file')
description = module.params.get('description')
timeout = module.params.get('timeout')
memory_size = module.params.get('memory_size')
vpc_subnet_ids = module.params.get('vpc_subnet_ids')
vpc_security_group_ids = module.params.get('vpc_security_group_ids')
check_mode = module.check_mode
changed = False
if not HAS_BOTOCORE:
module.fail_json(msg='Python module "botocore" is missing, please install it')
if not HAS_BOTO3:
module.fail_json(msg='Python module "boto3" is missing, please install it')
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
if not region:
module.fail_json(msg='region must be specified')
try:
client = boto3_conn(module, conn_type='client', resource='lambda',
region=region, endpoint=ec2_url, **aws_connect_kwargs)
except (botocore.exceptions.ClientError, botocore.exceptions.ValidationError) as e:
module.fail_json(msg=str(e))
if role.startswith('arn:aws:iam'):
role_arn = role
else:
# get account ID and assemble ARN
try:
iam_client = boto3_conn(module, conn_type='client', resource='iam',
region=region, endpoint=ec2_url, **aws_connect_kwargs)
account_id = iam_client.get_user()['User']['Arn'].split(':')[4]
role_arn = 'arn:aws:iam::{0}:role/{1}'.format(account_id, role)
except (botocore.exceptions.ClientError, botocore.exceptions.ValidationError) as e:
module.fail_json(msg=str(e))
# Get function configuration if present, False otherwise
current_function = get_current_function(client, name)
# Update existing Lambda function
if state == 'present' and current_function:
# Get current state
current_config = current_function['Configuration']
current_version = None
# Update function configuration
func_kwargs = {'FunctionName': name}
# Update configuration if needed
if role_arn and current_config['Role'] != role_arn:
func_kwargs.update({'Role': role_arn})
if handler and current_config['Handler'] != handler:
func_kwargs.update({'Handler': handler})
if description and current_config['Description'] != description:
func_kwargs.update({'Description': description})
if timeout and current_config['Timeout'] != timeout:
func_kwargs.update({'Timeout': timeout})
if memory_size and current_config['MemorySize'] != memory_size:
func_kwargs.update({'MemorySize': memory_size})
# Check for unsupported mutation
if current_config['Runtime'] != runtime:
module.fail_json(msg='Cannot change runtime. Please recreate the function')
# If VPC configuration is desired
if vpc_subnet_ids or vpc_security_group_ids:
if len(vpc_subnet_ids) < 1:
module.fail_json(msg='At least 1 subnet is required')
if len(vpc_security_group_ids) < 1:
module.fail_json(msg='At least 1 security group is required')
if 'VpcConfig' in current_config:
# Compare VPC config with current config
current_vpc_subnet_ids = current_config['VpcConfig']['SubnetIds']
current_vpc_security_group_ids = current_config['VpcConfig']['SecurityGroupIds']
subnet_net_id_changed = sorted(vpc_subnet_ids) != sorted(current_vpc_subnet_ids)
vpc_security_group_ids_changed = sorted(vpc_security_group_ids) != sorted(current_vpc_security_group_ids)
if any((subnet_net_id_changed, vpc_security_group_ids_changed)):
func_kwargs.update({'VpcConfig':
{'SubnetIds': vpc_subnet_ids,'SecurityGroupIds': vpc_security_group_ids}})
else:
# No VPC configuration is desired, assure VPC config is empty when present in current config
if ('VpcConfig' in current_config and
'VpcId' in current_config['VpcConfig'] and
current_config['VpcConfig']['VpcId'] != ''):
func_kwargs.update({'VpcConfig':{'SubnetIds': [], 'SecurityGroupIds': []}})
# Upload new configuration if configuration has changed
if len(func_kwargs) > 2:
try:
if not check_mode:
response = client.update_function_configuration(**func_kwargs)
current_version = response['Version']
changed = True
except (botocore.exceptions.ParamValidationError, botocore.exceptions.ClientError) as e:
module.fail_json(msg=str(e))
# Update code configuration
code_kwargs = {'FunctionName': name, 'Publish': True}
# Update S3 location
if s3_bucket and s3_key:
# If function is stored on S3 always update
code_kwargs.update({'S3Bucket': s3_bucket, 'S3Key': s3_key})
# If S3 Object Version is given
if s3_object_version:
code_kwargs.update({'S3ObjectVersion': s3_object_version})
# Compare local checksum, update remote code when different
elif zip_file:
local_checksum = sha256sum(zip_file)
remote_checksum = current_config['CodeSha256']
# Only upload new code when local code is different compared to the remote code
if local_checksum != remote_checksum:
try:
with open(zip_file, 'rb') as f:
encoded_zip = f.read()
code_kwargs.update({'ZipFile': encoded_zip})
except IOError as e:
module.fail_json(msg=str(e))
# Upload new code if needed (e.g. code checksum has changed)
if len(code_kwargs) > 2:
try:
if not check_mode:
response = client.update_function_code(**code_kwargs)
current_version = response['Version']
changed = True
except (botocore.exceptions.ParamValidationError, botocore.exceptions.ClientError) as e:
module.fail_json(msg=str(e))
# Describe function code and configuration
response = get_current_function(client, name, qualifier=current_version)
if not response:
module.fail_json(msg='Unable to get function information after updating')
# We're done
module.exit_json(changed=changed, **camel_dict_to_snake_dict(response))
# Function doesn't exists, create new Lambda function
elif state == 'present':
if s3_bucket and s3_key:
# If function is stored on S3
code = {'S3Bucket': s3_bucket,
'S3Key': s3_key}
if s3_object_version:
code.update({'S3ObjectVersion': s3_object_version})
elif zip_file:
# If function is stored in local zipfile
try:
with open(zip_file, 'rb') as f:
zip_content = f.read()
code = {'ZipFile': zip_content}
except IOError as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg='Either S3 object or path to zipfile required')
func_kwargs = {'FunctionName': name,
'Description': description,
'Publish': True,
'Runtime': runtime,
'Role': role_arn,
'Handler': handler,
'Code': code,
'Timeout': timeout,
'MemorySize': memory_size,
}
# If VPC configuration is given
if vpc_subnet_ids or vpc_security_group_ids:
if len(vpc_subnet_ids) < 1:
module.fail_json(msg='At least 1 subnet is required')
if len(vpc_security_group_ids) < 1:
module.fail_json(msg='At least 1 security group is required')
func_kwargs.update({'VpcConfig': {'SubnetIds': vpc_subnet_ids,
'SecurityGroupIds': vpc_security_group_ids}})
# Finally try to create function
try:
if not check_mode:
response = client.create_function(**func_kwargs)
current_version = response['Version']
changed = True
except (botocore.exceptions.ParamValidationError, botocore.exceptions.ClientError) as e:
module.fail_json(msg=str(e))
response = get_current_function(client, name, qualifier=current_version)
if not response:
module.fail_json(msg='Unable to get function information after creating')
module.exit_json(changed=changed, **camel_dict_to_snake_dict(response))
# Delete existing Lambda function
if state == 'absent' and current_function:
try:
if not check_mode:
client.delete_function(FunctionName=name)
changed = True
except (botocore.exceptions.ParamValidationError, botocore.exceptions.ClientError) as e:
module.fail_json(msg=str(e))
module.exit_json(changed=changed)
# Function already absent, do nothing
elif state == 'absent':
module.exit_json(changed=changed)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| emersonsoftware/ansiblefork | lib/ansible/modules/cloud/amazon/lambda.py | Python | gpl-3.0 | 17,176 |
# Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
"""
Core objects for accessing staticData API.
"""
import urlparse
import remoteobjects
from ..core.conf import conf
from ..core.api import ObjectMixin, fields
class CodeValue(ObjectMixin, remoteobjects.RemoteObject):
id = fields.Field()
description = fields.CharField()
sortOrder = fields.Field()
def __unicode__(self):
return self.description
def __repr__(self):
return "<CodeValue: %s>" % self
class ArrayOfCodeValue(ObjectMixin, remoteobjects.ListObject):
api_base_url = urlparse.urljoin(conf.CC_API_BASE, "staticData/values/")
entries = fields.List(fields.Object(CodeValue))
def update_from_dict(self, data):
"""
Unwrap the JSON data.
We expect to get data in a form like this:
{
"ns1.ArrayOfCodeValue":[
{
"@xsi.type":"ns1:ArrayOfCodeValue",
"ns1.CodeValue":[
{
"@xsi.type":"ns1:CodeValue",
"ns1.description":"Active",
"ns1.id":1,
"ns1.sortOrder":0
},
{
"@xsi.type":"ns1:CodeValue",
"ns1.description":"Disabled",
"ns1.id":3,
"ns1.sortOrder":0
},
{
"@xsi.type":"ns1:CodeValue",
"ns1.description":"Inactive",
"ns1.id":2,
"ns1.sortOrder":0
}
]
}
]
}
We pass on the inner list of data dictionaries.
"""
if "ns1.ArrayOfCodeValue" in data:
data = data["ns1.ArrayOfCodeValue"][0]["ns1.CodeValue"]
# Because this JSON is BadgerFish-translated XML
# (http://ajaxian.com/archives/badgerfish-translating-xml-to-json)
# length-1 lists are not sent as lists, so we re-listify.
if "@xsi.type" in data:
data = [data]
return super(ArrayOfCodeValue, self).update_from_dict(data)
| mozilla/caseconductor-ui | ccui/static/models.py | Python | gpl-3.0 | 2,951 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011-today TaPo-IT (http://tapo-it.at) All Rights Reserved.
# Author: Wolfgang Taferner ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _
import logging
_logger = logging.getLogger(__name__)
class hr_attendance(orm.Model):
_inherit = 'hr.attendance'
_columns = {
'pause': fields.boolean('Break')
}
_defaults = {
'name': fields.datetime.now
}
_order = "name DESC"
# not required to inherit create
def create(self, cr, uid, vals, context=None):
context = dict(context or {})
# Automation: Logic change of pre and post processor? follow up write in project.task.work inherited create, write
# TODO
if 'sheet_id' in context:
ts = self.pool.get('hr_timesheet_sheet.sheet').browse(cr, uid, context['sheet_id'], context=context)
if ts.state not in ('draft', 'new'):
raise orm.except_orm(_('Error !'), _('You cannot modify an entry in a confirmed timesheet !'))
res = super(hr_attendance, self).create(cr, uid, vals, context=context)
if 'sheet_id' in context:
if context['sheet_id'] != self.browse(cr, uid, res, context=context).sheet_id.id:
raise orm.except_orm(_('UserError'), _('You cannot enter an attendance '
'date outside the current timesheet dates!'))
return res
def write(self, cr, uid, ids, vals, context=None):
context = dict(context or {})
context['handling'] = 'modify'
context['hint'] = '\n\nPlease modify the duration, time and employee/user directly with the task work!'
if 'name' in vals or 'employee_id' in vals:
self.check_linked_work(cr, uid, ids, context=context)
else:
_logger.info('write: timestamp or employee_id were not changed (%s) | vals: %s', ids, vals)
# Automation: Change of pre and post processor? follow up write in project.task.work inherited create, write
# TODO
workcontext = False
pause = self.check_pre_attendance_pause_begin(cr, uid, ids, context)
# Get related workcontext (always start_attendance)
if not isinstance(ids, (list)):
ids = [ids]
related_task_work = self.pool.get('project.task.work').search(cr, uid, [('start_attendance', 'in', ids)])
if not related_task_work:
related_task_work = self.pool.get('project.task.work').search(cr, uid, [('end_attendance', 'in', ids)])
if related_task_work:
task_work = self.pool.get('project.task.work').browse(cr, uid, related_task_work)[0]
workcontext = task_work.workcontext
return super(hr_attendance, self).write(cr, uid, ids, vals, context=context)
def unlink(self, cr, uid, ids, context=None):
context = dict(context or {})
excluded = []
if 'exclude' in context:
for id in context['exclude']:
excluded.append(id)
context['exclude'] = excluded
context['handling'] = 'delete'
context['hint'] = '\n\nPlease delete the task work first!'
self.check_linked_work(cr, uid, ids, context=context)
if not isinstance(ids, list):
ids = [ids]
own_list = self.browse(cr, uid, ids)
if own_list:
for own in own_list:
# Delete or modify atttendances (sign_in and sign_out only) next to deleted attendance to preserve the logic itself and support the user
if 'project_task_write' not in context and 'obstructions_remove' not in context:
_logger.info('Object: %s', own.id)
if own.action == 'sign_in':
action_check = self.search(cr, uid, [('employee_id', '=', own.employee_id.id), ('name', '>', own.name)], order='name asc', limit=1)
if action_check:
post = self.browse(cr, uid, action_check[0])
if post.action == 'action':
# add pre_deleted to context for indicating that the predecesor will be deleted
self.write(cr, uid, post.id, {'action': own.action}, context={'pre_deleted': True})
elif post.action == 'sign_out':
if 'trigger' not in context:
context['trigger'] = own.id
if context['trigger'] != post.id:
_logger.info('POST DELETE: %s', own.id)
self.unlink(cr, uid, post.id, context=context)
del context['trigger']
ids = filter(lambda a: a != post.id, ids)
elif own.action == 'sign_out':
action_check = self.search(cr, uid, [('employee_id', '=', own.employee_id.id), ('name', '<', own.name)], order='name desc', limit=1)
if action_check:
pre = self.browse(cr, uid, action_check[0])
if pre.action == 'action':
self.write(cr, uid, pre.id, {'action': own.action})
elif pre.action == 'sign_in':
if 'trigger' not in context:
context['trigger'] = own.id
if context['trigger'] != pre.id:
_logger.info('PRE DELETE: %s', own.id)
self.unlink(cr, uid, pre.id, context=context)
del context['trigger']
ids = filter(lambda a: a != pre.id, ids)
else:
_logger.info('Object: %s', own)
return super(hr_attendance, self).unlink(cr, uid, ids, context=context)
def _check(self, cr, uid, ids):
if isinstance(ids, int):
own = self.browse(cr, uid, [ids])
else:
own = self.browse(cr, uid, ids)
if not isinstance(own, list):
own = [own]
for att in own:
if att.sheet_id and att.sheet_id.state not in ('draft', 'new'):
raise orm.except_orm(_('Error !'), _('You cannot modify an entry in a confirmed timesheet !'))
return True
def check_linked_work(self, cr, uid, ids, context=None):
search_base = []
context = dict(context or {})
if 'exclude' in context:
for id in context['exclude']:
search_base.append(('id', '!=', id))
workobj = self.pool.get('project.task.work')
emp_obj = self.pool.get('hr.employee')
if isinstance(ids, int):
ids = [ids]
attendance_browse = self.browse(cr, uid, ids)
for attendance in attendance_browse:
search_list = search_base
search_list.append('|')
search_list.append(('start_attendance', '=', attendance.id))
search_list.append(('end_attendance', '=', attendance.id))
search = workobj.search(cr, uid, search_list)
if search:
if len(search) > 1:
text = ''
x = 1
for id in search:
text = text + str(id)
if (x < len(search)):
text = text + ', '
x = x + 1
else:
text = str(search[0])
_logger.info('write: You can not %s the attendance %s because it is linked to this/these task work entry/entries (%s)!%s', context['handling'], attendance.id, text, context['hint'])
raise orm.except_orm(_('Dependency Error!'), _('You can not %s the attendance %s because it is linked to this/these task work entry/entries (%s)!%s') % (context['handling'], attendance.id, text, context['hint'],))
return True
def check_pre_attendance_pause_begin(self, cr, uid, ids, context=None):
if not isinstance(ids, list):
ids = [ids]
if len(ids) == 1:
current = self.browse(cr, uid, ids)
if isinstance(current, orm.browse_record_list):
current = current[0]
if 'pre_deleted' in context:
offset = 1
else:
offset = 0
action_check = self.search(cr, uid, [('employee_id', '=', current.employee_id.id), ('name', '<', str(current.name))], order='name DESC', limit=1, offset=offset)
if action_check:
if isinstance(action_check, list):
action_check = action_check[0]
pre = self.browse(cr, uid, action_check)
# if sign in check reason of the ancestor (pause adaption)
if pre and pre.action == 'sign_out' and pre.action_desc.pause:
_logger.debug('Pre Reason: %s | Current Pause: %s', pre.action_desc.pause, current.action_desc.pause)
return True
return False
# Neutralize constraint b/c it is not working properly with actions
def _altern_si_so(self, cr, uid, ids, context=None):
return True
_constraints = [(_altern_si_so, 'Error: Sign in (resp. Sign out) must follow Sign out (resp. Sign in)', ['action'])]
hr_attendance()
| tapo-it/odoo-addons-worktrail | addons_worktrail/tapoit_hr_project/model/hr_attendance.py | Python | agpl-3.0 | 10,490 |
#!/usr/bin/python2.7 -tt
"""
Copyright (c) 2013, Adel Qodmani
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import tarfile # For the compression
import os # For everything related to path
import logging
import sys # For the argv and exit
import datetime
def main():
""" zipper source-dir-full-path dest-dir-full-path
Tars and zips the source-dir and put it in the dest-dir with the name:
source-dir-name_date_time.tar.gz
"""
check_args()
source_path = sys.argv[1]
source_path = source_path.rstrip('/')
logging.debug("source_path: %s" % source_path)
dest_path = sys.argv[2]
dest_path = dest_path.rstrip('/')
logging.debug("dest_path: %s" % dest_path)
# source name is the name of the dir to be archived
source_name = source_path.split("/")[-1]
logging.debug("source_name: %s" % source_name)
# tar_path
tar_path = create_tar_path(source_name, dest_path)
logging.debug("tar_path: %s" % tar_path)
create_tar_file(tar_path, source_path)
def check_args():
""" Checks if the args supplied to the script are what it expects """
if len(sys.argv) > 1 and sys.argv[1] == "--help":
help_text = ("zipper creates a zipped tar-ball of the <source> directory"
+ "and puts it in \nthe <destination> directory ")
usage = "e.g: zipper /tmp/ /home/sally/Desktop/"
result = "will create a file called tmp_date_time.tar.gz in "
result += "/home/sally/Desktop/ which has all the contents of /tmp/"
print(help_text)
print(usage)
print(result)
sys.exit(0)
elif len(sys.argv) < 3:
print("Missing arguments!")
print("Usage:")
print("\tzipper source destination")
print("You can get the help by: zipper --help")
logging.error("Missing arguments!")
logging.error("Shutting down!")
sys.exit(1)
elif not os.path.isabs(sys.argv[1]):
print("Source directory is not an absolute path")
print("You can get the help by: zipper --help")
logging.error("Source is not absolute")
logging.error("Shutting down")
sys.exit(2)
elif not os.path.isabs(sys.argv[2]):
print("Destination directory is not an absolute path")
print("You can get the help by: zipper --help")
logging.error("Destination is not absolute")
logging.error("Shutting down")
sys.exit(3)
elif not os.path.isdir(sys.argv[1]):
print("Path given as a source directory is not a directory")
print("You can get the help by: zipper --help")
logging.error("Source is not a directory")
logging.error("Shutting down")
sys.exit(4)
elif not os.path.isdir(sys.argv[2]):
print("Path given as destination directory is not a directory")
print("You can get the help by: zipper --help")
logging.error("Destination is not a directory")
logging.error("Shutting down")
sys.exit(5)
def create_tar_path(source_name, dest_path):
""" Creates a path for a backup that will be in the desktop of the user
and the file name will be the /path/to/desktktop/source_name_date.tar.gz
"""
# Get the path to the desktop ready
path = os.path.expanduser('~') # changes ~ to home dir path
logging.debug(path)
path = os.path.join(path, dest_path+"/")
logging.debug(path)
# string from time(strftime): %Year %month %day %Hour %Minute $Second
now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
logging.debug(now)
# The dest path is the path + source_name + date + extension
path = os.path.join(path, source_name)
logging.debug(path)
path += '_' + now + ".tar.gz"
logging.debug(path)
return path
def create_tar_file(tar_path, source_path):
# "w:gz" is open for writing a gz tarball
try:
tar = tarfile.open(tar_path, "w:gz")
tar.add(source_path)
tar.close()
logging.debug("Tar ball [%s] created for directory [%s]" % (tar_path,
source_path))
except IOError:
logging.critical("IOError exception! Aborting ..")
sys.exit(6)
except TarError:
logging.critical("TarError exception! Aborting ...")
sys.exit(7)
if __name__ == "__main__":
# Set up the logging env
# Format: (asctime) (filename) (funcname) (linenumber) (level) (msg)
# The time can be formated with the datefmt parameter
FORMAT = "%(asctime)s %(filename)s::%(funcName)s::%(lineno)d"
FORMAT += " [%(levelname)s]: %(msg)s"
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
try:
STREAM = open("/home/aral/learn/zipper/log", "a+")
except IOError:
print("Can't create a log file in [%s]" % STREAM)
sys.abort()
# Setting the log stream to go to stderr and print all log info from debug
# and higher levels (debug, info, warning, error, critical)
logging.basicConfig(stream=STREAM, level=logging.DEBUG, format=FORMAT,
datefmt=DATE_FORMAT)
main()
| adel-qod/zipper | zipper.py | Python | bsd-2-clause | 6,014 |
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import copy
import os
from ambari_agent.AlertSchedulerHandler import AlertSchedulerHandler
from ambari_agent.alerts.metric_alert import MetricAlert
from ambari_agent.alerts.ams_alert import AmsAlert
from ambari_agent.alerts.port_alert import PortAlert
from ambari_agent.alerts.web_alert import WebAlert
from AmbariConfig import AmbariConfig
from mock.mock import Mock, MagicMock, patch
from unittest import TestCase
TEST_PATH = os.path.join('ambari_agent', 'dummy_files')
class TestAlertSchedulerHandler(TestCase):
def setUp(self):
self.config = AmbariConfig()
def test_load_definitions(self):
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None)
definitions = scheduler._AlertSchedulerHandler__load_definitions()
self.assertEquals(len(definitions), 1)
@patch("ambari_commons.network.reconfigure_urllib2_opener")
def test_job_context_injector(self, reconfigure_urllib2_opener_mock):
self.config.use_system_proxy_setting = lambda: False
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
scheduler._job_context_injector(self.config)
self.assertTrue(reconfigure_urllib2_opener_mock.called)
reconfigure_urllib2_opener_mock.reset_mock()
self.config.use_system_proxy_setting = lambda: True
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
scheduler._job_context_injector(self.config)
self.assertFalse(reconfigure_urllib2_opener_mock.called)
def test_json_to_callable_metric(self):
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
json_definition = {
'source': {
'type': 'METRIC'
}
}
callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))
self.assertTrue(callable_result is not None)
self.assertTrue(isinstance(callable_result, MetricAlert))
self.assertEquals(callable_result.alert_meta, json_definition)
self.assertEquals(callable_result.alert_source_meta, json_definition['source'])
def test_json_to_callable_ams(self):
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
json_definition = {
'source': {
'type': 'AMS'
}
}
callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))
self.assertTrue(callable_result is not None)
self.assertTrue(isinstance(callable_result, AmsAlert))
self.assertEquals(callable_result.alert_meta, json_definition)
self.assertEquals(callable_result.alert_source_meta, json_definition['source'])
def test_json_to_callable_port(self):
json_definition = {
'source': {
'type': 'PORT'
}
}
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))
self.assertTrue(callable_result is not None)
self.assertTrue(isinstance(callable_result, PortAlert))
self.assertEquals(callable_result.alert_meta, json_definition)
self.assertEquals(callable_result.alert_source_meta, json_definition['source'])
def test_json_to_callable_web(self):
json_definition = {
'source': {
'type': 'WEB'
}
}
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))
self.assertTrue(callable_result is not None)
self.assertTrue(isinstance(callable_result, WebAlert))
self.assertEquals(callable_result.alert_meta, json_definition)
self.assertEquals(callable_result.alert_source_meta, json_definition['source'])
def test_json_to_callable_none(self):
json_definition = {
'source': {
'type': 'SOMETHING'
}
}
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))
self.assertTrue(callable_result is None)
def test_execute_alert_noneScheduler(self):
execution_commands = []
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
scheduler._AlertSchedulerHandler__scheduler = None
alert_mock = Mock()
scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
scheduler.execute_alert(execution_commands)
self.assertFalse(alert_mock.collect.called)
def test_execute_alert_noneCommands(self):
execution_commands = None
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
alert_mock = Mock()
scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
scheduler.execute_alert(execution_commands)
self.assertFalse(alert_mock.collect.called)
def test_execute_alert_emptyCommands(self):
execution_commands = []
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
alert_mock = Mock()
scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
scheduler.execute_alert(execution_commands)
self.assertFalse(alert_mock.collect.called)
def test_execute_alert(self):
execution_commands = [
{
'clusterName': 'cluster',
'hostName': 'host',
'publicHostName' : 'host',
'alertDefinition': {
'name': 'alert1'
}
}
]
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
alert_mock = MagicMock()
alert_mock.collect = Mock()
alert_mock.set_helpers = Mock()
scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
scheduler._AlertSchedulerHandler__config_maps = {
'cluster': {}
}
scheduler.execute_alert(execution_commands)
scheduler._AlertSchedulerHandler__json_to_callable.assert_called_with('cluster', 'host', 'host', {'name': 'alert1'})
self.assertTrue(alert_mock.collect.called)
def test_execute_alert_from_extension(self):
execution_commands = [
{
'clusterName': 'cluster',
'hostName': 'host',
'publicHostName' : 'host',
'alertDefinition': {
'name': 'alert1'
}
}
]
scheduler = AlertSchedulerHandler('wrong_path', 'wrong_path', 'wrong_path', TEST_PATH, 'wrong_path', None, self.config, None)
alert_mock = MagicMock()
alert_mock.collect = Mock()
alert_mock.set_helpers = Mock()
scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
scheduler._AlertSchedulerHandler__config_maps = {
'cluster': {}
}
scheduler.execute_alert(execution_commands)
scheduler._AlertSchedulerHandler__json_to_callable.assert_called_with('cluster', 'host', 'host', {'name': 'alert1'})
self.assertTrue(alert_mock.collect.called)
def test_load_definitions(self):
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
scheduler._AlertSchedulerHandler__config_maps = {
'cluster': {}
}
definitions = scheduler._AlertSchedulerHandler__load_definitions()
alert_def = definitions[0]
self.assertTrue(isinstance(alert_def, PortAlert))
def test_load_definitions_noFile(self):
scheduler = AlertSchedulerHandler('wrong_path', 'wrong_path', 'wrong_path', 'wrong_path', 'wrong_path', None, self.config, None)
scheduler._AlertSchedulerHandler__config_maps = {
'cluster': {}
}
definitions = scheduler._AlertSchedulerHandler__load_definitions()
self.assertEquals(definitions, [])
def test_start(self):
execution_commands = [
{
'clusterName': 'cluster',
'hostName': 'host',
'publicHostName' : 'host',
'alertDefinition': {
'name': 'alert1'
}
}
]
scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
alert_mock = MagicMock()
alert_mock.interval = Mock(return_value=5)
alert_mock.collect = Mock()
alert_mock.set_helpers = Mock()
scheduler.schedule_definition = MagicMock()
scheduler._AlertSchedulerHandler__scheduler = MagicMock()
scheduler._AlertSchedulerHandler__scheduler.running = False
scheduler._AlertSchedulerHandler__scheduler.start = Mock()
scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
scheduler._AlertSchedulerHandler__config_maps = {
'cluster': {}
}
scheduler.start()
self.assertTrue(scheduler._AlertSchedulerHandler__scheduler.start.called)
scheduler.schedule_definition.assert_called_with(alert_mock)
| arenadata/ambari | ambari-agent/src/test/python/ambari_agent/TestAlertSchedulerHandler.py | Python | apache-2.0 | 10,134 |
import gtk
from uxie.utils import join_to_settings_dir
from uxie.actions import KeyMap
from uxie.floating import Manager as FeedbackManager
from uxie.plugins import Manager as PluginManager
import filelist
import clipboard
import fsutils
keymap = KeyMap(join_to_settings_dir('fmd', 'keys.conf'))
keymap.map_generic('root-menu', 'F1')
keymap.map_generic('copy', '<ctrl>c')
keymap.map_generic('copy', '<ctrl>Insert')
keymap.map_generic('cut', '<ctrl>x')
keymap.map_generic('cut', '<shift>Delete')
keymap.map_generic('paste', '<ctrl>v')
keymap.map_generic('paste', '<shift>Insert')
keymap.map_generic('delete', 'Delete')
class App(object):
def __init__(self):
self.wg = gtk.WindowGroup()
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_default_size(700, 415)
self.window.connect('delete-event', self.quit)
self.wg.add_window(self.window)
self.clipboard = clipboard.Clipboard()
self.window.feedback = self.feedback = FeedbackManager()
self.activator = keymap.get_activator(self.window, 'main_window')
self.activator.add_context('filelist', None, lambda: self.filelist)
self.activator.bind_menu('_File#1')
self.activator.bind_menu('_View#10')
self.activator.bind_menu('_Goto#20')
self.activator.bind_menu('_Run#30').to('<Alt>X')
self.activator.bind_menu('_Utils#40')
self.activator.bind_menu('_Window#50')
self.activator.bind('window', 'quit', 'File/_Quit#100', self.quit).to('<ctrl>q')
self.activator.bind('window', 'close-window', 'Window/_Close#100', self.quit).to('<ctrl>w')
self.pm = PluginManager(self.activator)
filelist.init(self.activator)
self.init_plugins(self.pm)
self.executor = fsutils.Executor()
self.filelist = filelist.FileList(self.clipboard, self.executor)
self.window.add(self.filelist.widget)
self.pm.ready('filelist', self.filelist)
def init_plugins(self, pm):
from plugins import sync_names, places, info, history
pm.add_plugin(sync_names)
pm.add_plugin(places)
pm.add_plugin(info)
pm.add_plugin(history)
def open(self, uri):
self.window.show_all()
self.filelist.set_uri(uri)
def quit(self, *args):
gtk.main_quit() | baverman/fmd | fmd/app.py | Python | mit | 2,333 |
"""
********************************************************************************
* Name: forms.py
* Author: Nathan Swain
* Created On: 2014
* Copyright: (c) Brigham Young University 2014
* License: BSD 2-Clause
********************************************************************************
"""
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import validate_password
from django.conf import settings
def get_captcha():
if getattr(settings, 'ENABLE_CAPTCHA', False):
if getattr(settings, 'RECAPTCHA_PRIVATE_KEY', '') and getattr(settings, 'RECAPTCHA_PUBLIC_KEY', ''):
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
return ReCaptchaField(label='', widget=ReCaptchaWidget())
else:
from captcha.fields import CaptchaField
return CaptchaField(label='')
else:
return None
class LoginForm(forms.Form):
username = forms.RegexField(
label='', max_length=150,
regex=r'^[\w.@+-]+$',
error_messages={
'invalid': "This value may contain only letters, numbers and @/./+/-/_ characters."
},
widget=forms.TextInput(
attrs={
'placeholder': 'Username',
'autofocus': 'autofocus'
}
)
)
password = forms.CharField(
label='',
widget=forms.PasswordInput(
attrs={
'placeholder': 'Password',
'autocomplete': 'off'
}
)
)
captcha = get_captcha()
class RegisterForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'duplicate_username': "A user with that username already exists.",
'password_mismatch': "The two password fields didn't match.",
'duplicate_email': "A user with this email already exists."
}
username = forms.RegexField(
label='', max_length=150,
regex=r'^[\w.@+-]+$',
error_messages={
'invalid': "This value may contain only letters, numbers and @/./+/-/_ characters."},
widget=forms.TextInput(
attrs={
'placeholder': 'Username',
'autofocus': 'autofocus'
}
)
)
email = forms.CharField(
label='',
max_length=254,
widget=forms.EmailInput(
attrs={'placeholder': 'Email'}
)
)
password1 = forms.CharField(
label='',
widget=forms.PasswordInput(
attrs={
'placeholder': 'Password',
'autocomplete': 'off'
}
)
)
password2 = forms.CharField(
label='',
widget=forms.PasswordInput(
attrs={
'placeholder': 'Confirm Password',
'autocomplete': 'off'
}
)
)
captcha = get_captcha()
class Meta:
model = User
fields = ("username", "email")
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(
self.error_messages['duplicate_username'],
code='duplicate_username',
)
def clean_email(self):
# Enforce unique email addresses for password recovery.
email = self.cleaned_data["email"]
try:
User._default_manager.get(email=email)
except User.DoesNotExist:
return email
raise forms.ValidationError(
self.error_messages['duplicate_email'],
code='duplicate_email',
)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
validate_password(password2)
return password2
def save(self, commit=True):
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserSettingsForm(forms.ModelForm):
"""
A form for modifying user settings.
"""
first_name = forms.CharField(
max_length=30,
label='First Name:',
required=False,
widget=forms.TextInput(
attrs={
'placeholder': '',
'class': 'form-control',
'autofocus': 'autofocus'
}
)
)
last_name = forms.CharField(
max_length=150,
label='Last Name:',
required=False,
widget=forms.TextInput(
attrs={
'placeholder': '',
'class': 'form-control'
}
)
)
email = forms.EmailField(
max_length=254,
label='Email:',
widget=forms.EmailInput(
attrs={
'placeholder': '',
'class': 'form-control'
}
)
)
class Meta:
model = User
fields = ("first_name", "last_name", "email")
class UserPasswordChangeForm(forms.Form):
"""
A form that lets a user change their password by entering their old one.
"""
error_messages = {
'password_mismatch': "The two password fields didn't match.",
'password_incorrect': "Your old password was entered incorrectly. Please enter it again.",
}
old_password = forms.CharField(
label="",
widget=forms.PasswordInput(
attrs={
'placeholder': 'Old Password',
'autofocus': 'autofocus',
'autocomplete': 'off'
}
)
)
new_password1 = forms.CharField(
label="",
widget=forms.PasswordInput(
attrs={
'placeholder': 'New Password',
'autocomplete': 'off'
}
)
)
new_password2 = forms.CharField(
label="",
widget=forms.PasswordInput(
attrs={
'placeholder': 'Confirm New Password',
'autocomplete': 'off'
}
)
)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise forms.ValidationError(
self.error_messages['password_incorrect'],
code='password_incorrect',
)
return old_password
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
validate_password(password2)
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data['new_password1'])
if commit:
self.user.save()
return self.user
class SsoTenantForm(forms.Form):
tenant = forms.RegexField(
label='',
max_length=30,
required=True,
regex=getattr(settings, 'SSO_TENANT_REGEX', r'^[\w\s_-]+$'),
error_messages={
'invalid': "Invalid characters provided."
},
widget=forms.TextInput(
attrs={
'placeholder': getattr(settings, 'SSO_TENANT_ALIAS', 'Tenant').title(),
'autofocus': 'autofocus'
}
)
)
remember = forms.BooleanField(
label='Remember for next time',
required=False,
)
| tethysplatform/tethys | tethys_portal/forms.py | Python | bsd-2-clause | 8,528 |
import datetime
from django.conf import settings
from django.db.backends.utils import truncate_name, typecast_date, typecast_timestamp
from django.db.models.sql import compiler
from django.db.models.sql.constants import MULTI
from django.utils import six
from django.utils.six.moves import zip, zip_longest
from django.utils import timezone
SQLCompiler = compiler.SQLCompiler
class GeoSQLCompiler(compiler.SQLCompiler):
def get_columns(self, with_aliases=False):
"""
Return the list of columns to use in the select statement. If no
columns have been specified, returns all columns relating to fields in
the model.
If 'with_aliases' is true, any column names that are duplicated
(without the table names) are given unique aliases. This is needed in
some cases to avoid ambiguitity with nested queries.
This routine is overridden from Query to handle customized selection of
geometry columns.
"""
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
result = ['(%s) AS %s' % (self.get_extra_select_format(alias) % col[0], qn2(alias))
for alias, col in six.iteritems(self.query.extra_select)]
params = []
aliases = set(self.query.extra_select.keys())
if with_aliases:
col_aliases = aliases.copy()
else:
col_aliases = set()
if self.query.select:
only_load = self.deferred_to_columns()
# This loop customized for GeoQuery.
for col, field in self.query.select:
if isinstance(col, (list, tuple)):
alias, column = col
table = self.query.alias_map[alias].table_name
if table in only_load and column not in only_load[table]:
continue
r = self.get_field_select(field, alias, column)
if with_aliases:
if col[1] in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (r, c_alias))
aliases.add(c_alias)
col_aliases.add(c_alias)
else:
result.append('%s AS %s' % (r, qn2(col[1])))
aliases.add(r)
col_aliases.add(col[1])
else:
result.append(r)
aliases.add(r)
col_aliases.add(col[1])
else:
col_sql, col_params = col.as_sql(qn, self.connection)
result.append(col_sql)
params.extend(col_params)
if hasattr(col, 'alias'):
aliases.add(col.alias)
col_aliases.add(col.alias)
elif self.query.default_cols:
cols, new_aliases = self.get_default_columns(with_aliases,
col_aliases)
result.extend(cols)
aliases.update(new_aliases)
max_name_length = self.connection.ops.max_name_length()
for alias, aggregate in self.query.aggregate_select.items():
agg_sql, agg_params = aggregate.as_sql(qn, self.connection)
if alias is None:
result.append(agg_sql)
else:
result.append('%s AS %s' % (agg_sql, qn(truncate_name(alias, max_name_length))))
params.extend(agg_params)
# This loop customized for GeoQuery.
for (table, col), field in self.query.related_select_cols:
r = self.get_field_select(field, table, col)
if with_aliases and col in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (r, c_alias))
aliases.add(c_alias)
col_aliases.add(c_alias)
else:
result.append(r)
aliases.add(r)
col_aliases.add(col)
self._select_aliases = aliases
return result, params
def get_default_columns(self, with_aliases=False, col_aliases=None,
start_alias=None, opts=None, as_pairs=False, from_parent=None):
"""
Computes the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Returns a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
This routine is overridden from Query to handle customized selection of
geometry columns.
"""
result = []
if opts is None:
opts = self.query.get_meta()
aliases = set()
only_load = self.deferred_to_columns()
seen = self.query.included_inherited_models.copy()
if start_alias:
seen[None] = start_alias
for field, model in opts.get_fields_with_model():
if from_parent and model is not None and issubclass(from_parent, model):
# Avoid loading data for already loaded parents.
continue
alias = self.query.join_parent_model(opts, model, start_alias, seen)
table = self.query.alias_map[alias].table_name
if table in only_load and field.column not in only_load[table]:
continue
if as_pairs:
result.append((alias, field.column))
aliases.add(alias)
continue
# This part of the function is customized for GeoQuery. We
# see if there was any custom selection specified in the
# dictionary, and set up the selection format appropriately.
field_sel = self.get_field_select(field, alias)
if with_aliases and field.column in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (field_sel, c_alias))
col_aliases.add(c_alias)
aliases.add(c_alias)
else:
r = field_sel
result.append(r)
aliases.add(r)
if with_aliases:
col_aliases.add(field.column)
return result, aliases
def resolve_columns(self, row, fields=()):
"""
This routine is necessary so that distances and geometries returned
from extra selection SQL get resolved appropriately into Python
objects.
"""
values = []
aliases = list(self.query.extra_select)
# Have to set a starting row number offset that is used for
# determining the correct starting row index -- needed for
# doing pagination with Oracle.
rn_offset = 0
if self.connection.ops.oracle:
if self.query.high_mark is not None or self.query.low_mark: rn_offset = 1
index_start = rn_offset + len(aliases)
# Converting any extra selection values (e.g., geometries and
# distance objects added by GeoQuerySet methods).
values = [self.query.convert_values(v,
self.query.extra_select_fields.get(a, None),
self.connection)
for v, a in zip(row[rn_offset:index_start], aliases)]
if self.connection.ops.oracle or getattr(self.query, 'geo_values', False):
# We resolve the rest of the columns if we're on Oracle or if
# the `geo_values` attribute is defined.
for value, field in zip_longest(row[index_start:], fields):
values.append(self.query.convert_values(value, field, self.connection))
else:
values.extend(row[index_start:])
return tuple(values)
#### Routines unique to GeoQuery ####
def get_extra_select_format(self, alias):
sel_fmt = '%s'
if hasattr(self.query, 'custom_select') and alias in self.query.custom_select:
sel_fmt = sel_fmt % self.query.custom_select[alias]
return sel_fmt
def get_field_select(self, field, alias=None, column=None):
"""
Returns the SELECT SQL string for the given field. Figures out
if any custom selection SQL is needed for the column The `alias`
keyword may be used to manually specify the database table where
the column exists, if not in the model associated with this
`GeoQuery`. Similarly, `column` may be used to specify the exact
column name, rather than using the `column` attribute on `field`.
"""
sel_fmt = self.get_select_format(field)
if field in self.query.custom_select:
field_sel = sel_fmt % self.query.custom_select[field]
else:
field_sel = sel_fmt % self._field_column(field, alias, column)
return field_sel
def get_select_format(self, fld):
"""
Returns the selection format string, depending on the requirements
of the spatial backend. For example, Oracle and MySQL require custom
selection formats in order to retrieve geometries in OGC WKT. For all
other fields a simple '%s' format string is returned.
"""
if self.connection.ops.select and hasattr(fld, 'geom_type'):
# This allows operations to be done on fields in the SELECT,
# overriding their values -- used by the Oracle and MySQL
# spatial backends to get database values as WKT, and by the
# `transform` method.
sel_fmt = self.connection.ops.select
# Because WKT doesn't contain spatial reference information,
# the SRID is prefixed to the returned WKT to ensure that the
# transformed geometries have an SRID different than that of the
# field -- this is only used by `transform` for Oracle and
# SpatiaLite backends.
if self.query.transformed_srid and ( self.connection.ops.oracle or
self.connection.ops.spatialite ):
sel_fmt = "'SRID=%d;'||%s" % (self.query.transformed_srid, sel_fmt)
else:
sel_fmt = '%s'
return sel_fmt
# Private API utilities, subject to change.
def _field_column(self, field, table_alias=None, column=None):
"""
Helper function that returns the database column for the given field.
The table and column are returned (quoted) in the proper format, e.g.,
`"geoapp_city"."point"`. If `table_alias` is not specified, the
database table associated with the model of this `GeoQuery` will be
used. If `column` is specified, it will be used instead of the value
in `field.column`.
"""
if table_alias is None: table_alias = self.query.get_meta().db_table
return "%s.%s" % (self.quote_name_unless_alias(table_alias),
self.connection.ops.quote_name(column or field.column))
class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler):
pass
class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler):
"""
This is overridden for GeoDjango to properly cast date columns, since
`GeoQuery.resolve_columns` is used for spatial values.
See #14648, #16757.
"""
def results_iter(self):
if self.connection.ops.oracle:
from django.db.models.fields import DateTimeField
fields = [DateTimeField()]
else:
needs_string_cast = self.connection.features.needs_datetime_string_cast
offset = len(self.query.extra_select)
for rows in self.execute_sql(MULTI):
for row in rows:
date = row[offset]
if self.connection.ops.oracle:
date = self.resolve_columns(row, fields)[offset]
elif needs_string_cast:
date = typecast_date(str(date))
if isinstance(date, datetime.datetime):
date = date.date()
yield date
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, GeoSQLCompiler):
"""
This is overridden for GeoDjango to properly cast date columns, since
`GeoQuery.resolve_columns` is used for spatial values.
See #14648, #16757.
"""
def results_iter(self):
if self.connection.ops.oracle:
from django.db.models.fields import DateTimeField
fields = [DateTimeField()]
else:
needs_string_cast = self.connection.features.needs_datetime_string_cast
offset = len(self.query.extra_select)
for rows in self.execute_sql(MULTI):
for row in rows:
datetime = row[offset]
if self.connection.ops.oracle:
datetime = self.resolve_columns(row, fields)[offset]
elif needs_string_cast:
datetime = typecast_timestamp(str(datetime))
# Datetimes are artifically returned in UTC on databases that
# don't support time zone. Restore the zone used in the query.
if settings.USE_TZ:
datetime = datetime.replace(tzinfo=None)
datetime = timezone.make_aware(datetime, self.query.tzinfo)
yield datetime
| tastynoodle/django | django/contrib/gis/db/models/sql/compiler.py | Python | bsd-3-clause | 14,013 |
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------##
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------##
import pysollib.game
from pysollib.game import Game
from pysollib.gamedb import GI, GameInfo, registerGame
from pysollib.games.canfield import CanfieldRush_Talon
from pysollib.hint import CautiousDefaultHint
from pysollib.hint import FreeCellSolverWrapper
from pysollib.hint import KlondikeType_Hint
from pysollib.layout import Layout
from pysollib.mfxutil import Struct, kwdefault
from pysollib.mygettext import _
from pysollib.pysoltk import MfxCanvasText
from pysollib.stack import \
AC_RowStack, \
BO_RowStack, \
DealRowTalonStack, \
InitialDealTalonStack, \
KingAC_RowStack, \
KingSS_RowStack, \
OpenStack, \
OpenTalonStack, \
RK_FoundationStack, \
RK_RowStack, \
RedealTalonStack, \
ReserveStack, \
SC_RowStack, \
SS_FoundationStack, \
SS_RowStack, \
Stack, \
StackWrapper, \
SuperMoveAC_RowStack, \
UD_SS_RowStack, \
WasteStack, \
WasteTalonStack, \
isSameColorSequence
from pysollib.util import ACE, ANY_RANK, ANY_SUIT, KING, NO_RANK
# ************************************************************************
# * Klondike
# ************************************************************************
class Klondike(Game):
Layout_Method = staticmethod(Layout.klondikeLayout)
Talon_Class = WasteTalonStack
Foundation_Class = SS_FoundationStack
RowStack_Class = KingAC_RowStack
Hint_Class = KlondikeType_Hint
def createGame(self, max_rounds=-1, num_deal=1, **layout):
# create layout
lay, s = Layout(self), self.s
kwdefault(layout, rows=7, waste=1, texts=1, playcards=16)
self.Layout_Method.__get__(lay, lay.__class__)(**layout)
# self.__class__.Layout_Method(lay, **layout)
self.setSize(lay.size[0], lay.size[1])
# create stacks
s.talon = self.Talon_Class(lay.s.talon.x, lay.s.talon.y, self,
max_rounds=max_rounds, num_deal=num_deal)
if lay.s.waste:
s.waste = WasteStack(lay.s.waste.x, lay.s.waste.y, self)
for r in lay.s.foundations:
s.foundations.append(
self.Foundation_Class(r.x, r.y, self, suit=r.suit))
for r in lay.s.rows:
s.rows.append(self.RowStack_Class(r.x, r.y, self))
# default
lay.defaultAll()
return lay
def startGame(self, flip=0, reverse=1):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(
rows=self.s.rows[i:], flip=flip, frames=0, reverse=reverse)
self.startDealSample()
self.s.talon.dealRow(reverse=reverse)
if self.s.waste:
self.s.talon.dealCards() # deal first card to WasteStack
shallHighlightMatch = Game._shallHighlightMatch_AC
# ************************************************************************
# * Vegas Klondike
# ************************************************************************
class VegasKlondike(Klondike):
getGameScore = Game.getGameScoreCasino
getGameBalance = Game.getGameScoreCasino
def createGame(self, max_rounds=1):
lay = Klondike.createGame(self, max_rounds=max_rounds)
self.texts.score = MfxCanvasText(self.canvas,
8, self.height - 8, anchor="sw",
font=self.app.getFont("canvas_large"))
return lay
def updateText(self):
if self.preview > 1:
return
b1, b2 = self.app.stats.gameid_balance, 0
if self.shallUpdateBalance():
b2 = self.getGameBalance()
t = _("Balance $%d") % (b1 + b2)
self.texts.score.config(text=t)
def getDemoInfoTextAttr(self, tinfo):
return tinfo[1] # "se" corner
# ************************************************************************
# * Casino Klondike
# ************************************************************************
class CasinoKlondike(VegasKlondike):
def createGame(self):
lay = VegasKlondike.createGame(self, max_rounds=3)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Klondike by Threes
# ************************************************************************
class KlondikeByThrees(Klondike):
def createGame(self):
Klondike.createGame(self, num_deal=3)
# ************************************************************************
# * Trigon
# ************************************************************************
class Trigon(Klondike):
RowStack_Class = KingSS_RowStack
# ************************************************************************
# * Thumb and Pouch
# * Chinaman
# ************************************************************************
class ThumbAndPouch(Klondike):
RowStack_Class = BO_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1)
def shallHighlightMatch(self, stack1, card1, stack2, card2):
return (card1.suit != card2.suit and
(card1.rank + 1 == card2.rank or
card2.rank + 1 == card1.rank))
class Chinaman(ThumbAndPouch):
RowStack_Class = StackWrapper(BO_RowStack, base_rank=KING)
def createGame(self):
lay = Klondike.createGame(self, num_deal=3,
max_rounds=2, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Whitehead
# ************************************************************************
class Whitehead_RowStack(SS_RowStack):
def _isAcceptableSequence(self, cards):
return isSameColorSequence(cards, self.cap.mod, self.cap.dir)
def getHelp(self):
return _('Tableau. Build down by color. Sequences of cards '
'in the same suit can be moved as a unit.')
class Whitehead(Klondike):
RowStack_Class = Whitehead_RowStack
Hint_Class = CautiousDefaultHint
def createGame(self):
Klondike.createGame(self, max_rounds=1)
def startGame(self):
Klondike.startGame(self, flip=1)
shallHighlightMatch = Game._shallHighlightMatch_SS
getQuickPlayScore = Game._getSpiderQuickPlayScore
# ************************************************************************
# * Small Harp (Klondike in a different layout)
# ************************************************************************
class SmallHarp(Klondike):
Layout_Method = staticmethod(Layout.gypsyLayout)
def startGame(self):
for i in range(len(self.s.rows)):
self.s.talon.dealRow(rows=self.s.rows[:i], flip=0, frames=0)
self._startAndDealRowAndCards()
# ************************************************************************
# * Eastcliff
# * Easthaven
# ************************************************************************
class Eastcliff(Klondike):
RowStack_Class = AC_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1)
def startGame(self):
for i in range(2):
self.s.talon.dealRow(flip=0, frames=0)
self.startDealSample()
self.s.talon.dealRow()
if self.s.waste:
self.s.talon.dealCards() # deal first card to WasteStack
class Easthaven(Eastcliff):
Talon_Class = DealRowTalonStack
def createGame(self):
Klondike.createGame(self, max_rounds=1, waste=0)
class DoubleEasthaven(Easthaven):
def createGame(self):
Klondike.createGame(self, rows=8, max_rounds=1, waste=0, playcards=20)
class TripleEasthaven(Easthaven):
def createGame(self):
Klondike.createGame(self, rows=12, max_rounds=1, waste=0, playcards=26)
# ************************************************************************
# * Westcliff
# * Westhaven
# ************************************************************************
class Westcliff(Eastcliff):
Foundation_Class = StackWrapper(SS_FoundationStack, max_move=0)
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=10)
class Westhaven(Westcliff):
Talon_Class = DealRowTalonStack
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=10, waste=0)
# ************************************************************************
# * Pas Seul
# ************************************************************************
class PasSeul(pysollib.game.StartDealRowAndCards, Eastcliff):
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=6)
# ************************************************************************
# * Blind Alleys
# ************************************************************************
class BlindAlleys(Eastcliff):
def createGame(self):
lay = Klondike.createGame(self, max_rounds=2, rows=6, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
def _shuffleHook(self, cards):
# move Aces to top of the Talon (i.e. first cards to be dealt)
return self._shuffleHookMoveToTop(
cards, lambda c: (c.rank == 0, c.suit))
def startGame(self):
self.s.talon.dealRow(rows=self.s.foundations, frames=0)
Eastcliff.startGame(self)
# ************************************************************************
# * Somerset
# * Morehead
# * Usk
# ************************************************************************
class Somerset(Klondike):
Talon_Class = InitialDealTalonStack
RowStack_Class = SuperMoveAC_RowStack
Hint_Class = CautiousDefaultHint
Solver_Class = FreeCellSolverWrapper()
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=10, waste=0, texts=0)
def startGame(self):
for i in range(6):
self.s.talon.dealRow(rows=self.s.rows[i:], frames=0)
self.startDealSample()
self.s.talon.dealRow(rows=self.s.rows[6:])
self.s.talon.dealRow(rows=self.s.rows[7:])
class Morehead(Somerset):
RowStack_Class = StackWrapper(BO_RowStack, max_move=1)
Solver_Class = None
class Usk(Somerset):
Talon_Class = RedealTalonStack
RowStack_Class = StackWrapper(AC_RowStack, base_rank=KING)
Solver_Class = None
def createGame(self):
lay = Klondike.createGame(self, max_rounds=2, rows=10,
waste=False, texts=False, round_text=True)
lay.createRoundText(self.s.talon, 'ne')
def redealCards(self):
n = 0
while self.s.talon.cards:
self.s.talon.dealRowAvail(rows=self.s.rows[n:], frames=4)
n += 1
# ************************************************************************
# * Canister
# * American Canister
# * British Canister
# ************************************************************************
class AmericanCanister(Klondike):
Talon_Class = InitialDealTalonStack
RowStack_Class = AC_RowStack
Solver_Class = FreeCellSolverWrapper(sm='unlimited')
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=8, waste=0, texts=0)
def startGame(self):
self._startDealNumRows(5)
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.rows[2:6])
class Canister(AmericanCanister):
RowStack_Class = RK_RowStack
Solver_Class = FreeCellSolverWrapper(sbb='rank', sm='unlimited')
shallHighlightMatch = Game._shallHighlightMatch_RK
class BritishCanister(AmericanCanister):
RowStack_Class = StackWrapper(KingAC_RowStack, max_move=1)
Solver_Class = FreeCellSolverWrapper(esf='kings')
# ************************************************************************
# * Agnes Sorel
# ************************************************************************
class AgnesSorel(Klondike):
Talon_Class = DealRowTalonStack
Foundation_Class = StackWrapper(
SS_FoundationStack, mod=13, base_rank=NO_RANK, max_move=0)
RowStack_Class = StackWrapper(SC_RowStack, mod=13, base_rank=NO_RANK)
def createGame(self):
Klondike.createGame(self, max_rounds=1, waste=0)
def startGame(self):
Klondike.startGame(self, flip=1)
self.s.talon.dealSingleBaseCard()
def shallHighlightMatch(self, stack1, card1, stack2, card2):
return (card1.color == card2.color and
((card1.rank + 1) % 13 == card2.rank or
(card2.rank + 1) % 13 == card1.rank))
# ************************************************************************
# * 8 x 8
# * Achtmal Acht
# * Eight by Eight
# ************************************************************************
class EightTimesEight(Klondike):
Layout_Method = staticmethod(Layout.gypsyLayout)
RowStack_Class = AC_RowStack
def createGame(self):
Klondike.createGame(self, rows=8)
def startGame(self):
self._startDealNumRows(7)
self.s.talon.dealRow()
self.s.talon.dealCards() # deal first card to WasteStack
class AchtmalAcht(EightTimesEight):
def createGame(self):
lay = Klondike.createGame(self, rows=8, max_rounds=3, round_text=True)
lay.createRoundText(self.s.talon, 'sw', dx=-lay.XS)
class EightByEight_RowStack(RK_RowStack):
def acceptsCards(self, from_stack, cards):
if not RK_RowStack.acceptsCards(self, from_stack, cards):
return False
if not self.cards:
return len(cards) == 1
return True
class EightByEight(EightTimesEight):
Layout_Method = staticmethod(Layout.klondikeLayout) # gypsyLayout
Talon_Class = CanfieldRush_Talon
RowStack_Class = EightByEight_RowStack
def createGame(self):
lay = Klondike.createGame(self, rows=8, playcards=20,
max_rounds=3, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
shallHighlightMatch = Game._shallHighlightMatch_RK
# ************************************************************************
# * Batsford
# * Batsford Again
# ************************************************************************
class Batsford_ReserveStack(ReserveStack):
def acceptsCards(self, from_stack, cards):
if not ReserveStack.acceptsCards(self, from_stack, cards):
return False
# must be a King
return cards[0].rank == KING
def getHelp(self):
return _('Reserve. Only Kings are acceptable.')
class Batsford(Klondike):
def createGame(self, **layout):
kwdefault(layout, rows=10, max_rounds=1, playcards=22)
round_text = (layout['max_rounds'] > 1)
layout['round_text'] = round_text
lay = Klondike.createGame(self, **layout)
s = self.s
x, y = lay.XM, self.height - lay.YS
s.reserves.append(Batsford_ReserveStack(x, y, self, max_cards=3))
self.setRegion(
s.reserves, (-999, y - lay.YM - lay.CH//2,
x + lay.XS - lay.CW//2, 999999),
priority=1)
lay.createText(s.reserves[0], "se")
if round_text:
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
lay.defaultStackGroups()
class BatsfordAgain(Batsford):
def createGame(self):
Batsford.createGame(self, max_rounds=2)
# ************************************************************************
# * Jumbo
# ************************************************************************
class Jumbo(Klondike):
def createGame(self):
lay = Klondike.createGame(self, rows=9, max_rounds=2, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
def startGame(self, flip=0):
for i in range(9):
self.s.talon.dealRow(rows=self.s.rows[:i], flip=flip, frames=0)
self._startAndDealRowAndCards()
class OpenJumbo(Jumbo):
def startGame(self):
Jumbo.startGame(self, flip=1)
# ************************************************************************
# * Stonewall
# * Flower Garden
# ************************************************************************
class Stonewall(Klondike):
Talon_Class = InitialDealTalonStack
RowStack_Class = AC_RowStack
DEAL = (0, 1, 0, 1, -1, 0, 1)
def createGame(self):
lay = Klondike.createGame(self, rows=6, waste=0, max_rounds=1, texts=0)
s = self.s
h = max(self.height, lay.YM+4*lay.YS)
self.setSize(self.width + lay.XM+4*lay.XS, h)
for i in range(4):
for j in range(4):
x, y = self.width + (j-4)*lay.XS, lay.YM + i*lay.YS
s.reserves.append(OpenStack(x, y, self, max_accept=0))
lay.defaultStackGroups()
def startGame(self):
frames = 0
for flip in self.DEAL:
if flip < 0:
frames = -1
self.startDealSample()
else:
self.s.talon.dealRow(flip=flip, frames=frames)
self.s.talon.dealRow(rows=self.s.reserves)
class FlowerGarden(Stonewall):
RowStack_Class = StackWrapper(RK_RowStack, max_move=1)
Hint_Class = CautiousDefaultHint
DEAL = (1, 1, 1, 1, -1, 1, 1)
shallHighlightMatch = Game._shallHighlightMatch_RK
# ************************************************************************
# * King Albert
# * Raglan
# * Brigade
# * Relaxed Raglan
# * Queen Victoria
# ************************************************************************
class KingAlbert(Klondike):
Talon_Class = InitialDealTalonStack
RowStack_Class = StackWrapper(AC_RowStack, max_move=1)
Hint_Class = CautiousDefaultHint
ROWS = 9
RESERVES = (2, 2, 2, 1)
def createGame(self):
lay = Klondike.createGame(
self, max_rounds=1, rows=self.ROWS, waste=0, texts=0)
s = self.s
rw, rh = max(self.RESERVES), len(self.RESERVES)
h = max(self.height, lay.YM+rh*lay.YS)
self.setSize(self.width + 2*lay.XM+rw*lay.XS, h)
for i in range(rh):
for j in range(self.RESERVES[i]):
x, y = self.width + (j-rw)*lay.XS, lay.YM + i*lay.YS
s.reserves.append(OpenStack(x, y, self, max_accept=0))
lay.defaultStackGroups()
def startGame(self):
Klondike.startGame(self, flip=1, reverse=0)
self.s.talon.dealRow(rows=self.s.reserves)
class Raglan(KingAlbert):
RESERVES = (2, 2, 2)
def _shuffleHook(self, cards):
# move Aces to bottom of the Talon (i.e. last cards to be dealt)
return self._shuffleHookMoveToBottom(
cards, lambda c: (c.rank == 0, c.suit))
def startGame(self):
for i in range(6):
self.s.talon.dealRow(rows=self.s.rows[i:], frames=0)
self.startDealSample()
self.s.talon.dealRow(rows=self.s.rows[6:])
self.s.talon.dealRow(rows=self.s.reserves)
self.s.talon.dealRow(rows=self.s.foundations)
class Brigade(Raglan):
RowStack_Class = StackWrapper(RK_RowStack, max_move=1)
ROWS = 7
RESERVES = (4, 4, 4, 1)
def startGame(self):
self._startDealNumRows(4)
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.reserves)
self.s.talon.dealRow(rows=self.s.foundations)
shallHighlightMatch = Game._shallHighlightMatch_RK
class RelaxedRaglan(Raglan):
RowStack_Class = AC_RowStack
class QueenVictoria(KingAlbert):
RowStack_Class = AC_RowStack
# ************************************************************************
# * Jane
# * Agnes Bernauer
# ************************************************************************
class Jane_Talon(OpenTalonStack):
rightclickHandler = OpenStack.rightclickHandler
doubleclickHandler = OpenStack.doubleclickHandler
def canFlipCard(self):
return False
def canDealCards(self):
return len(self.cards) >= 2
def dealCards(self, sound=False):
c = 0
if len(self.cards) > 2:
c = self.dealRow(self.game.s.reserves, sound=sound)
if len(self.cards) == 2:
self.game.flipMove(self)
self.game.moveMove(1, self, self.game.s.waste, frames=4, shadow=0)
self.game.flipMove(self)
c = c + 1
return c
class Jane(Klondike):
Talon_Class = Jane_Talon
Foundation_Class = StackWrapper(
SS_FoundationStack, mod=13, base_rank=NO_RANK, min_cards=1)
RowStack_Class = StackWrapper(AC_RowStack, mod=13, base_rank=NO_RANK)
def createGame(self, max_rounds=1, rows=7, reserves=7, playcards=16):
lay, s = Layout(self), self.s
maxrows = max(rows, 7)
w = lay.XM+maxrows*lay.XS+lay.XM+2*lay.XS
h = max(lay.YM+2*lay.YS+playcards*lay.YOFFSET+lay.TEXT_HEIGHT,
lay.YM+4*lay.YS)
self.setSize(w, h)
x, y = lay.XM, lay.YM
s.talon = self.Talon_Class(x, y, self, max_rounds=max_rounds)
lay.createText(s.talon, 's')
x += lay.XS
s.waste = WasteStack(x, y, self)
x += 2*lay.XS
for i in range(4):
s.foundations.append(self.Foundation_Class(x, y, self, suit=i))
x += lay.XS
x, y = lay.XM, lay.YM+lay.YS+lay.TEXT_HEIGHT
for i in range(rows):
s.rows.append(self.RowStack_Class(x, y, self))
x += lay.XS
x0, y = self.width - 2*lay.XS, lay.YM
for i in range(reserves):
x = x0 + ((i+1) & 1) * lay.XS
stack = OpenStack(x, y, self, max_accept=0)
stack.CARD_YOFFSET = lay.YM // 3
s.reserves.append(stack)
y = y + lay.YS // 2
# not needed, as no cards may be placed on the reserves
# self.setRegion(s.reserves, (x0-lay.XM//2, -999, 999999, 999999),
# priority=1)
lay.defaultStackGroups()
self.sg.dropstacks.append(s.talon)
def startGame(self, flip=0, reverse=1):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(
rows=self.s.rows[i:], flip=flip, frames=0, reverse=reverse)
self.startDealSample()
self.s.talon.dealRow(reverse=reverse)
self.s.talon.dealRow(rows=self.s.reserves)
c = self.s.talon.dealSingleBaseCard()
# update base rank of row stacks
cap = Struct(base_rank=(c.rank - 1) % 13)
for s in self.s.rows:
s.cap.update(cap.__dict__)
self.saveinfo.stack_caps.append((s.id, cap))
shallHighlightMatch = Game._shallHighlightMatch_ACW
def _autoDeal(self, sound=True):
return 0
class AgnesBernauer_Talon(DealRowTalonStack):
def dealCards(self, sound=False):
return self.dealRowAvail(self.game.s.reserves, sound=sound)
class AgnesBernauer(Jane):
Talon_Class = AgnesBernauer_Talon
Foundation_Class = StackWrapper(
SS_FoundationStack, mod=13, base_rank=NO_RANK, max_move=0)
def startGame(self):
Jane.startGame(self, flip=1)
# ************************************************************************
# * Senate
# ************************************************************************
class Senate(Jane):
def createGame(self, rows=4):
playcards = 10
lay, s = Layout(self), self.s
self.setSize(lay.XM+(rows+7)*lay.XS,
lay.YM+2*(lay.YS+playcards*lay.YOFFSET))
x, y = lay.XM, lay.YM
for i in range(rows):
s.rows.append(SS_RowStack(x, y, self))
x += lay.XS
for y in lay.YM, lay.YM+lay.YS+playcards*lay.YOFFSET:
x = lay.XM+rows*lay.XS+lay.XS//2
for i in range(4):
stack = OpenStack(x, y, self, max_accept=0)
stack.CARD_XOFFSET, stack.CARD_YOFFSET = 0, lay.YOFFSET
s.reserves.append(stack)
x += lay.XS
x = lay.XM+(rows+5)*lay.XS
for i in range(2):
y = lay.YM+lay.YS
for j in range(4):
s.foundations.append(SS_FoundationStack(x, y, self, suit=j))
y += lay.YS
x += lay.XS
x, y = self.width-lay.XS, lay.YM
s.talon = AgnesBernauer_Talon(x, y, self)
lay.createText(s.talon, 'nw')
lay.defaultStackGroups()
def startGame(self):
self.s.talon.dealRow(rows=self.s.foundations, frames=0)
self.startDealSample()
self.s.talon.dealRow(rows=self.s.reserves)
self.s.talon.dealRow()
def _shuffleHook(self, cards):
# move Aces to top of the Talon (i.e. first cards to be dealt)
return self._shuffleHookMoveToTop(
cards,
lambda c: (c.rank == ACE, (c.deck, c.suit)))
shallHighlightMatch = Game._shallHighlightMatch_SS
class SenatePlus(Senate):
def createGame(self):
Senate.createGame(self, rows=5)
# ************************************************************************
# * Phoenix
# * Arizona
# ************************************************************************
class Phoenix(Klondike):
Hint_Class = CautiousDefaultHint
RowStack_Class = AC_RowStack
def createGame(self):
lay, s = Layout(self), self.s
self.setSize(lay.XM + 10*lay.XS, lay.YM + 4*(lay.YS+lay.YM))
for i in range(2):
x = lay.XM + i*lay.XS
for j in range(4):
y = lay.YM + j*(lay.YS+lay.YM)
s.reserves.append(OpenStack(x, y, self, max_accept=0))
for i in range(2):
x = lay.XM + (8+i)*lay.XS
for j in range(4):
y = lay.YM + j*(lay.YS+lay.YM)
s.reserves.append(OpenStack(x, y, self, max_accept=0))
for i in range(4):
s.foundations.append(
SS_FoundationStack(lay.XM+(3+i)*lay.XS, lay.YM, self, i))
for i in range(6):
s.rows.append(
self.RowStack_Class(lay.XM+(2+i)*lay.XS, lay.YM+lay.YS, self))
s.talon = InitialDealTalonStack(
lay.XM+int(4.5*lay.XS), lay.YM+3*(lay.YS+lay.YM), self)
lay.defaultStackGroups()
def startGame(self):
self._startDealNumRows(6)
self.s.talon.dealRow(rows=self.s.reserves)
class Arizona(Phoenix):
RowStack_Class = RK_RowStack
shallHighlightMatch = Game._shallHighlightMatch_RK
# ************************************************************************
# * Lanes
# ************************************************************************
class Lanes(Klondike):
Hint_Class = CautiousDefaultHint
Foundation_Class = StackWrapper(SS_FoundationStack, max_move=0)
RowStack_Class = StackWrapper(AC_RowStack, base_rank=ANY_RANK, max_move=1)
def createGame(self):
lay = Klondike.createGame(self, rows=6, max_rounds=2, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
def _shuffleHook(self, cards):
# move Aces to top of the Talon (i.e. first cards to be dealt)
return self._shuffleHookMoveToTop(cards,
lambda c: (c.rank == ACE, c.suit))
def startGame(self):
self.s.talon.dealRow(rows=self.s.foundations, frames=0)
self._startDealNumRows(2)
self.s.talon.dealRow()
self.s.talon.dealCards() # deal first card to WasteStack
# ************************************************************************
# * Thirty Six
# ************************************************************************
class ThirtySix(Klondike):
Foundation_Class = StackWrapper(SS_FoundationStack, max_move=0)
RowStack_Class = StackWrapper(RK_RowStack, base_rank=ANY_RANK)
def createGame(self):
Klondike.createGame(self, rows=6, max_rounds=1)
def _fillOne(self):
for r in self.s.rows:
if r.cards:
c = r.cards[-1]
for f in self.s.foundations:
if f.acceptsCards(r, [c]):
self.moveMove(1, r, f, frames=4, shadow=0)
return 1
return 0
def startGame(self):
self.startDealSample()
for i in range(6):
self.s.talon.dealRow()
while True:
if not self._fillOne():
break
self.s.talon.dealCards() # deal first card to WasteStack
shallHighlightMatch = Game._shallHighlightMatch_RK
# ************************************************************************
# * Q.C.
# ************************************************************************
class Q_C_(Klondike):
Hint_Class = CautiousDefaultHint
Foundation_Class = StackWrapper(SS_FoundationStack, max_move=0)
RowStack_Class = StackWrapper(SS_RowStack, base_rank=ANY_RANK, max_move=1)
def createGame(self):
lay = Klondike.createGame(self, rows=6, max_rounds=2)
lay.createRoundText(self.s.talon, 'sss')
def startGame(self):
self._startDealNumRows(3)
self.s.talon.dealRow()
while self.s.talon.cards:
self.s.talon.dealCards() # deal first card to WasteStack
if not self.fillWaste():
break
def fillWaste(self):
waste = self.s.waste
if waste.cards:
c = waste.cards[-1]
for f in self.s.foundations:
if f.acceptsCards(self.s.waste, [c]):
waste.moveMove(1, f)
return True
return False
def fillStack(self, stack=None):
waste = self.s.waste
while True:
if not self.fillWaste():
break
if stack in self.s.rows and not stack.cards:
if not waste.cards:
while self.s.talon.cards:
self.s.talon.dealCards()
if not self.fillWaste():
break
if waste.cards:
waste.moveMove(1, stack)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Northwest Territory
# * Artic Garden
# ************************************************************************
class NorthwestTerritory(KingAlbert):
RowStack_Class = StackWrapper(AC_RowStack, base_rank=KING)
RESERVES = (4, 4, 4, 4)
ROWS = 8
def startGame(self):
Klondike.startGame(self, flip=0, reverse=0)
self.s.talon.dealRow(rows=self.s.reserves)
class ArticGarden(NorthwestTerritory):
def startGame(self):
Klondike.startGame(self, flip=1, reverse=0)
self.s.talon.dealRow(rows=self.s.reserves)
# ************************************************************************
# * Aunt Mary
# ************************************************************************
class AuntMary(Klondike):
def createGame(self):
Klondike.createGame(self, rows=6, max_rounds=1)
def startGame(self):
for i in range(5):
j = i+1
self.s.talon.dealRow(rows=self.s.rows[:j], frames=0, flip=1)
self.s.talon.dealRow(rows=self.s.rows[j:], frames=0, flip=0)
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards()
# ************************************************************************
# * Double Dot
# ************************************************************************
class DoubleDot(Klondike):
Talon_Class = DealRowTalonStack
RowStack_Class = StackWrapper(RK_RowStack, dir=-2, mod=13)
Foundation_Class = StackWrapper(SS_FoundationStack, dir=2, mod=13)
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=8, waste=0)
def _shuffleHook(self, cards):
return self._shuffleHookMoveToTop(
cards,
lambda c: ((c.rank == ACE and c.suit in (0, 1)) or
(c.rank == 1 and c.suit in (2, 3)), c.suit))
def startGame(self):
self.s.talon.dealRow(rows=self.s.foundations, frames=0)
self._startAndDealRow()
# def shallHighlightMatch(self, stack1, card1, stack2, card2):
# return abs(card1.rank-card2.rank) == 2
shallHighlightMatch = Game._shallHighlightMatch_RKW
# ************************************************************************
# * Seven Devils
# ************************************************************************
class SevenDevils_RowStack(AC_RowStack):
def acceptsCards(self, from_stack, cards):
if not AC_RowStack.acceptsCards(self, from_stack, cards):
return False
return from_stack not in self.game.s.reserves
class SevenDevils(Klondike):
Hint_Class = CautiousDefaultHint
RowStack_Class = StackWrapper(SevenDevils_RowStack, max_move=1)
def createGame(self):
lay, s = Layout(self), self.s
self.setSize(lay.XM + 10*lay.XS, lay.YM+3*lay.YS+12*lay.YOFFSET)
x, y = lay.XM, lay.YM
for i in range(8):
s.foundations.append(SS_FoundationStack(x, y, self, suit=i//2))
x += lay.XS
x, y = lay.XM+lay.XS//2, lay.YM+lay.YS
for i in range(7):
s.rows.append(self.RowStack_Class(x, y, self))
x += lay.XS
x0, y = self.width - 2*lay.XS, lay.YM
for i in range(7):
x = x0 + ((i+1) & 1) * lay.XS
s.reserves.append(OpenStack(x, y, self, max_accept=0))
y += lay.YS // 2
x, y = lay.XM, self.height-lay.YS
s.talon = WasteTalonStack(x, y, self, max_rounds=1)
lay.createText(s.talon, 'n')
x += lay.XS
s.waste = WasteStack(x, y, self)
lay.createText(s.waste, 'n')
lay.defaultStackGroups()
def startGame(self, flip=0, reverse=1):
Klondike.startGame(self)
self.s.talon.dealRow(rows=self.s.reserves)
# ************************************************************************
# * Moving Left
# * Souter
# ************************************************************************
class MovingLeft(Klondike):
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=10, playcards=24)
def fillStack(self, stack):
if not stack.cards:
old_state = self.enterState(self.S_FILL)
if stack in self.s.rows:
i = list(self.s.rows).index(stack)
if i < len(self.s.rows)-1:
from_stack = self.s.rows[i+1]
pile = from_stack.getPile()
if pile:
from_stack.moveMove(len(pile), stack)
self.leaveState(old_state)
class Souter(MovingLeft):
def createGame(self):
lay = Klondike.createGame(self, max_rounds=2, rows=10,
playcards=24, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Big Forty
# * Ali Baba
# * Cassim
# ************************************************************************
class BigForty(Klondike):
RowStack_Class = SS_RowStack
def createGame(self):
Klondike.createGame(self, rows=10)
def startGame(self):
self._startDealNumRowsAndDealRowAndCards(3)
shallHighlightMatch = Game._shallHighlightMatch_SS
class AliBaba(BigForty):
def _shuffleHook(self, cards):
# move Aces to top of the Talon (i.e. first cards to be dealt)
return self._shuffleHookMoveToTop(cards,
lambda c: (c.rank == ACE, c.suit))
def startGame(self):
self.s.talon.dealRow(rows=self.s.foundations, frames=0)
BigForty.startGame(self)
class Cassim(AliBaba):
def createGame(self):
Klondike.createGame(self, rows=7)
# ************************************************************************
# * Saratoga
# ************************************************************************
class Saratoga(Klondike):
def createGame(self):
Klondike.createGame(self, num_deal=3)
def startGame(self):
Klondike.startGame(self, flip=1)
# ************************************************************************
# * Whitehorse
# ************************************************************************
class Whitehorse(Klondike):
def createGame(self):
Klondike.createGame(self, num_deal=3)
def startGame(self):
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards()
def fillStack(self, stack):
if not stack.cards:
old_state = self.enterState(self.S_FILL)
if stack in self.s.rows:
if not self.s.waste.cards:
self.s.talon.dealCards()
if self.s.waste.cards:
self.s.waste.moveMove(1, stack)
self.leaveState(old_state)
# ************************************************************************
# * Boost
# ************************************************************************
class Boost(Klondike):
def createGame(self):
lay = Klondike.createGame(self, rows=4, max_rounds=3, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Gold Rush
# ************************************************************************
class GoldRush(Klondike):
Talon_Class = CanfieldRush_Talon
def createGame(self):
lay = Klondike.createGame(self, max_rounds=3, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Gold Mine
# ************************************************************************
class GoldMine_RowStack(AC_RowStack):
getBottomImage = Stack._getReserveBottomImage
class GoldMine(Klondike):
RowStack_Class = GoldMine_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1, num_deal=3)
def startGame(self):
self.startDealSample()
self.s.talon.dealCards()
# ************************************************************************
# * Lucky Thirteen
# * Lucky Piles
# ************************************************************************
class LuckyThirteen(Game):
Hint_Class = CautiousDefaultHint
RowStack_Class = StackWrapper(RK_RowStack, base_rank=NO_RANK)
def createGame(self, xoffset=0, playcards=0):
lay, s = Layout(self), self.s
if xoffset:
xoffset = lay.XOFFSET
w0 = lay.XS+playcards*lay.XOFFSET
self.setSize(lay.XM + 5*w0, lay.YM+4*lay.YS)
x, y = lay.XM, lay.YM+lay.YS
for i in range(5):
stack = self.RowStack_Class(x, y, self, max_move=1)
s.rows.append(stack)
stack.CARD_XOFFSET = xoffset
stack.CARD_YOFFSET = 0
x += w0
x, y = lay.XM+w0, lay.YM+2*lay.YS
for i in range(3):
stack = self.RowStack_Class(x, y, self, max_move=1)
s.rows.append(stack)
stack.CARD_XOFFSET = xoffset
stack.CARD_YOFFSET = 0
x += w0
x, y = lay.XM, lay.YM+3*lay.YS
for i in range(5):
stack = self.RowStack_Class(x, y, self, max_move=1)
s.rows.append(stack)
stack.CARD_XOFFSET = xoffset
stack.CARD_YOFFSET = 0
x += w0
x, y = (self.width-4*lay.XS)//2, lay.YM
for i in range(4):
s.foundations.append(SS_FoundationStack(x, y, self, suit=i))
x += lay.XS
x, y = lay.XM, self.height-lay.YS
s.talon = InitialDealTalonStack(x, y, self, max_rounds=1)
lay.defaultStackGroups()
def startGame(self):
self._startDealNumRowsAndDealSingleRow(3)
shallHighlightMatch = Game._shallHighlightMatch_RK
class LuckyPiles(LuckyThirteen):
RowStack_Class = StackWrapper(UD_SS_RowStack, base_rank=KING)
def createGame(self):
LuckyThirteen.createGame(self, xoffset=1, playcards=7)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Legion
# ************************************************************************
class Legion(Klondike):
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=8)
def startGame(self):
self.startDealSample()
self.s.talon.dealRow()
for i in (1, 2, 3):
self.s.talon.dealRow(rows=self.s.rows[i:-i], flip=0)
self.s.talon.dealRow(rows=self.s.rows[i:-i])
self.s.talon.dealCards()
# ************************************************************************
# * Big Bertha
# ************************************************************************
class BigBertha(Game):
def createGame(self):
lay, s = Layout(self), self.s
self.setSize(lay.XM+15*lay.XS, lay.YM+3*lay.YS+15*lay.YOFFSET)
x, y = lay.XM, lay.YM
s.talon = InitialDealTalonStack(x, y, self)
x, y = lay.XM+3.5*lay.XS, lay.YM
for i in range(8):
s.foundations.append(SS_FoundationStack(x, y, self,
suit=i % 4, max_cards=12))
x += lay.XS
x, y = lay.XM, lay.YM+lay.YS
for i in range(15):
s.rows.append(AC_RowStack(x, y, self))
x += lay.XS
x, y = lay.XM, self.height-lay.YS
for i in range(14):
s.reserves.append(OpenStack(x, y, self, max_accept=0))
x += lay.XS
s.foundations.append(RK_FoundationStack(x, y, self, suit=ANY_SUIT,
base_rank=KING, dir=0, max_cards=8))
lay.defaultStackGroups()
def startGame(self):
self._startDealNumRows(5)
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.reserves)
shallHighlightMatch = Game._shallHighlightMatch_AC
# ************************************************************************
# * Athena
# ************************************************************************
class Athena(Klondike):
def startGame(self):
self.s.talon.dealRow(frames=0, flip=0)
self.s.talon.dealRow(frames=0)
self.s.talon.dealRow(frames=0, flip=0)
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards()
# ************************************************************************
# * Kingsley
# ************************************************************************
class Kingsley(Klondike):
Foundation_Class = StackWrapper(SS_FoundationStack, base_rank=KING, dir=-1)
RowStack_Class = StackWrapper(KingAC_RowStack, base_rank=ACE, dir=1)
def createGame(self):
Klondike.createGame(self, max_rounds=1)
# ************************************************************************
# * Scarp
# ************************************************************************
class Scarp(Klondike):
Talon_Class = DealRowTalonStack
RowStack_Class = AC_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=13, waste=0, playcards=28)
def startGame(self):
Klondike.startGame(self, flip=1)
# ************************************************************************
# * Eight Sages
# ************************************************************************
class EightSages_Row(AC_RowStack):
def acceptsCards(self, from_stack, cards):
if not AC_RowStack.acceptsCards(self, from_stack, cards):
return False
return from_stack is self.game.s.waste
class EightSages(Klondike):
RowStack_Class = EightSages_Row
def createGame(self):
lay = Klondike.createGame(self, max_rounds=2, rows=8,
playcards=12, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
def startGame(self):
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards()
# ************************************************************************
# * Guardian
# ************************************************************************
class Guardian_RowStack(AC_RowStack):
STEP = (3, 3, 3, 4, 4, 4, 4)
def basicIsBlocked(self):
r, step = self.game.s.rows, self.STEP
i, n, mylen = self.id, 1, len(step)
while i < mylen:
i = i + step[i]
n = n + 1
for j in range(i, i + n):
if r[j].cards:
return True
return False
def acceptsCards(self, from_stack, cards):
if len(self.cards) == 0 and self.id > 2:
return False
return AC_RowStack.acceptsCards(self, from_stack, cards)
class Guardian(Game):
def createGame(self):
lay, s = Layout(self), self.s
self.setSize((7 * lay.XS) + lay.XM,
(2.5 * lay.YS) + (13 * lay.YOFFSET) + lay.YM)
# create stacks
for i in range(3):
x = lay.XM + (4 - i) * lay.XS // 2
y = lay.YM + lay.TEXT_HEIGHT + lay.YS + i * lay.YS // 4
for j in range(i + 3):
s.rows.append(Guardian_RowStack(x, y, self))
x = x + lay.XS
x, y = lay.XM, lay.YM
s.talon = WasteTalonStack(x, y, self,
max_rounds=-1, num_deal=3)
lay.createText(s.talon, "s")
x += lay.XS
s.waste = WasteStack(x, y, self)
lay.createText(s.waste, "s")
x += lay.XS
for i in range(4):
x += lay.XS
s.foundations.append(SS_FoundationStack(x, y, self, i,
mod=13, max_move=0))
lay.defaultStackGroups()
def startGame(self):
self.startDealSample()
self.s.talon.dealRow(rows=self.s.rows[:7], flip=0)
self.s.talon.dealRow(rows=self.s.rows[7:])
self.s.talon.dealCards() # deal first card to WasteStack
# register the game
registerGame(GameInfo(2, Klondike, "Klondike",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED,
altnames=("Classic Solitaire", "American Patience")))
registerGame(GameInfo(61, CasinoKlondike, "Casino Klondike",
GI.GT_KLONDIKE | GI.GT_SCORE, 1, 2, GI.SL_BALANCED))
registerGame(GameInfo(129, VegasKlondike, "Vegas Klondike",
GI.GT_KLONDIKE | GI.GT_SCORE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(18, KlondikeByThrees, "Klondike by Threes",
GI.GT_KLONDIKE, 1, -1, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(58, ThumbAndPouch, "Thumb and Pouch",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(67, Whitehead, "Whitehead",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(39, SmallHarp, "Small Harp",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED,
altnames=("Die kleine Harfe",)))
registerGame(GameInfo(66, Eastcliff, "Eastcliff",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(224, Easthaven, "Easthaven",
GI.GT_GYPSY, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(33, Westcliff, "Westcliff",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(225, Westhaven, "Westhaven",
GI.GT_GYPSY, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(107, PasSeul, "Pas Seul",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(81, BlindAlleys, "Blind Alleys",
GI.GT_KLONDIKE, 1, 1, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(215, Somerset, "Somerset",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(231, Canister, "Canister",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(229, AgnesSorel, "Agnes Sorel",
GI.GT_GYPSY, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(4, EightTimesEight, "8 x 8",
GI.GT_KLONDIKE, 2, -1, GI.SL_BALANCED))
registerGame(GameInfo(127, AchtmalAcht, "Eight Times Eight",
GI.GT_KLONDIKE, 2, 2, GI.SL_BALANCED,
altnames=("Achtmal Acht",)))
registerGame(GameInfo(133, Batsford, "Batsford",
GI.GT_KLONDIKE, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(221, Stonewall, "Stonewall",
GI.GT_RAGLAN, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(222, FlowerGarden, "Flower Garden",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL,
altnames=("The Bouquet", "The Garden",)))
registerGame(GameInfo(233, KingAlbert, "King Albert",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL,
altnames=("Idiot's Delight",)))
registerGame(GameInfo(232, Raglan, "Raglan",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(223, Brigade, "Brigade",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(230, Jane, "Jane",
GI.GT_RAGLAN, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(236, AgnesBernauer, "Agnes Bernauer",
GI.GT_RAGLAN, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(263, Phoenix, "Phoenix",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(283, Jumbo, "Jumbo",
GI.GT_KLONDIKE, 2, 1, GI.SL_BALANCED))
registerGame(GameInfo(333, OpenJumbo, "Open Jumbo",
GI.GT_KLONDIKE, 2, 1, GI.SL_BALANCED))
registerGame(GameInfo(326, Lanes, "Lanes",
GI.GT_KLONDIKE, 1, 1, GI.SL_BALANCED))
registerGame(GameInfo(327, ThirtySix, "Thirty Six",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(350, Q_C_, "Q.C.",
GI.GT_KLONDIKE, 2, 1, GI.SL_BALANCED))
registerGame(GameInfo(361, NorthwestTerritory, "Northwest Territory",
GI.GT_RAGLAN, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(362, Morehead, "Morehead",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(388, Senate, "Senate",
GI.GT_RAGLAN, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(389, SenatePlus, "Senate +",
GI.GT_RAGLAN, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(390, Arizona, "Arizona",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(407, AuntMary, "Aunt Mary",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(420, DoubleDot, "Double Dot",
GI.GT_1DECK_TYPE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(434, SevenDevils, "Seven Devils",
GI.GT_RAGLAN, 2, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(452, DoubleEasthaven, "Double Easthaven",
GI.GT_GYPSY, 2, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(453, TripleEasthaven, "Triple Easthaven",
GI.GT_GYPSY, 3, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(470, MovingLeft, "Moving Left",
GI.GT_KLONDIKE, 2, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(471, Souter, "Souter",
GI.GT_KLONDIKE, 2, 1, GI.SL_BALANCED))
registerGame(GameInfo(473, BigForty, "Big Forty",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(474, AliBaba, "Ali Baba",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(475, Cassim, "Cassim",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(479, Saratoga, "Saratoga",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(491, Whitehorse, "Whitehorse",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(518, Boost, "Boost",
GI.GT_KLONDIKE | GI.GT_ORIGINAL, 1, 2, GI.SL_BALANCED))
registerGame(GameInfo(522, ArticGarden, "Artic Garden",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(532, GoldRush, "Gold Rush",
GI.GT_KLONDIKE, 1, 2, GI.SL_BALANCED))
registerGame(GameInfo(539, Usk, "Usk",
GI.GT_KLONDIKE | GI.GT_OPEN, 1, 1, GI.SL_BALANCED))
registerGame(GameInfo(541, BatsfordAgain, "Batsford Again",
GI.GT_KLONDIKE, 2, 1, GI.SL_BALANCED))
registerGame(GameInfo(572, GoldMine, "Gold Mine",
GI.GT_NUMERICA, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(585, LuckyThirteen, "Lucky Thirteen",
GI.GT_1DECK_TYPE, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(586, LuckyPiles, "Lucky Piles",
GI.GT_FAN_TYPE | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(601, AmericanCanister, "American Canister",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(602, BritishCanister, "British Canister",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(607, Legion, "Legion",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(627, QueenVictoria, "Queen Victoria",
GI.GT_RAGLAN | GI.GT_OPEN, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(630, BigBertha, "Big Bertha",
GI.GT_RAGLAN | GI.GT_OPEN, 2, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(633, Athena, "Athena",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(634, Chinaman, "Chinaman",
GI.GT_KLONDIKE, 1, 1, GI.SL_BALANCED))
registerGame(GameInfo(651, EightByEight, "Eight by Eight",
GI.GT_KLONDIKE, 2, 2, GI.SL_BALANCED))
registerGame(GameInfo(667, Kingsley, "Kingsley",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(669, Scarp, "Scarp",
GI.GT_GYPSY | GI.GT_ORIGINAL, 3, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(726, EightSages, "Eight Sages",
GI.GT_KLONDIKE, 2, 1, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(821, Trigon, "Trigon",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
registerGame(GameInfo(849, RelaxedRaglan, "Relaxed Raglan",
GI.GT_RAGLAN | GI.GT_RELAXED | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(852, Guardian, "Guardian",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED))
| shlomif/PySolFC | pysollib/games/klondike.py | Python | gpl-3.0 | 55,884 |
#!/usr/bin/env python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import time
import unittest
from contextlib import contextmanager
import mock
import swift
from swift.common import utils, swob
from swift.proxy import server as proxy_server
from swift.common.storage_policy import StoragePolicy, POLICIES
from test.unit import FakeRing, FakeMemcache, fake_http_connect, \
debug_logger, patch_policies
@contextmanager
def set_http_connect(*args, **kwargs):
old_connect = swift.proxy.controllers.base.http_connect
new_connect = fake_http_connect(*args, **kwargs)
try:
swift.proxy.controllers.base.http_connect = new_connect
swift.proxy.controllers.obj.http_connect = new_connect
swift.proxy.controllers.account.http_connect = new_connect
swift.proxy.controllers.container.http_connect = new_connect
yield new_connect
left_over_status = list(new_connect.code_iter)
if left_over_status:
raise AssertionError('left over status %r' % left_over_status)
finally:
swift.proxy.controllers.base.http_connect = old_connect
swift.proxy.controllers.obj.http_connect = old_connect
swift.proxy.controllers.account.http_connect = old_connect
swift.proxy.controllers.container.http_connect = old_connect
class PatchedObjControllerApp(proxy_server.Application):
object_controller = proxy_server.ObjectController
def handle_request(self, req):
with mock.patch('swift.proxy.server.ObjectController',
new=self.object_controller):
return super(PatchedObjControllerApp, self).handle_request(req)
@patch_policies([StoragePolicy(0, 'zero', True,
object_ring=FakeRing(max_more_nodes=9))])
class TestObjControllerWriteAffinity(unittest.TestCase):
def setUp(self):
self.app = proxy_server.Application(
None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing(), logger=debug_logger())
self.app.request_node_count = lambda ring: 10000000
self.app.sort_nodes = lambda l: l # stop shuffling the primary nodes
def test_iter_nodes_local_first_noops_when_no_affinity(self):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
self.app.write_affinity_is_local_fn = None
object_ring = self.app.get_object_ring(None)
all_nodes = object_ring.get_part_nodes(1)
all_nodes.extend(object_ring.get_more_nodes(1))
local_first_nodes = list(controller.iter_nodes_local_first(
object_ring, 1))
self.maxDiff = None
self.assertEqual(all_nodes, local_first_nodes)
def test_iter_nodes_local_first_moves_locals_first(self):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
self.app.write_affinity_is_local_fn = (
lambda node: node['region'] == 1)
self.app.write_affinity_node_count = lambda ring: 4
object_ring = self.app.get_object_ring(None)
all_nodes = object_ring.get_part_nodes(1)
all_nodes.extend(object_ring.get_more_nodes(1))
local_first_nodes = list(controller.iter_nodes_local_first(
object_ring, 1))
# the local nodes move up in the ordering
self.assertEqual([1, 1, 1, 1],
[node['region'] for node in local_first_nodes[:4]])
# we don't skip any nodes
self.assertEqual(len(all_nodes), len(local_first_nodes))
self.assertEqual(sorted(all_nodes), sorted(local_first_nodes))
def test_connect_put_node_timeout(self):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
self.app.conn_timeout = 0.05
with set_http_connect(slow_connect=True):
nodes = [dict(ip='', port='', device='')]
res = controller._connect_put_node(nodes, '', '', {}, ('', ''))
self.assertTrue(res is None)
@patch_policies([
StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(2, 'two'),
])
class TestObjController(unittest.TestCase):
container_info = {
'partition': 1,
'nodes': [
{'ip': '127.0.0.1', 'port': '1', 'device': 'sda'},
{'ip': '127.0.0.1', 'port': '2', 'device': 'sda'},
{'ip': '127.0.0.1', 'port': '3', 'device': 'sda'},
],
'write_acl': None,
'read_acl': None,
'storage_policy': None,
'sync_key': None,
'versions': None,
}
def setUp(self):
# setup fake rings with handoffs
self.obj_ring = FakeRing(max_more_nodes=3)
for policy in POLICIES:
policy.object_ring = self.obj_ring
logger = debug_logger('proxy-server')
logger.thread_locals = ('txn1', '127.0.0.2')
self.app = PatchedObjControllerApp(
None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing(), logger=logger)
class FakeContainerInfoObjController(proxy_server.ObjectController):
def container_info(controller, *args, **kwargs):
patch_path = 'swift.proxy.controllers.base.get_info'
with mock.patch(patch_path) as mock_get_info:
mock_get_info.return_value = dict(self.container_info)
return super(FakeContainerInfoObjController,
controller).container_info(*args, **kwargs)
self.app.object_controller = FakeContainerInfoObjController
def test_PUT_simple(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT')
req.headers['content-length'] = '0'
with set_http_connect(201, 201, 201):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
def test_PUT_if_none_match(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT')
req.headers['if-none-match'] = '*'
req.headers['content-length'] = '0'
with set_http_connect(201, 201, 201):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
def test_PUT_if_none_match_denied(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT')
req.headers['if-none-match'] = '*'
req.headers['content-length'] = '0'
with set_http_connect(201, 412, 201):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 412)
def test_PUT_if_none_match_not_star(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT')
req.headers['if-none-match'] = 'somethingelse'
req.headers['content-length'] = '0'
with set_http_connect():
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 400)
def test_GET_simple(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
with set_http_connect(200):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_GET_error(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
with set_http_connect(503, 200):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_GET_handoff(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
codes = [503] * self.obj_ring.replicas + [200]
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_GET_not_found(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
codes = [404] * (self.obj_ring.replicas +
self.obj_ring.max_more_nodes)
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 404)
def test_DELETE_simple(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(204, 204, 204):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
def test_DELETE_missing_one(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(404, 204, 204):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
def test_DELETE_half_not_found_statuses(self):
self.obj_ring.set_replicas(4)
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(404, 204, 404, 204):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
def test_DELETE_half_not_found_headers_and_body(self):
# Transformed responses have bogus bodies and headers, so make sure we
# send the client headers and body from a real node's response.
self.obj_ring.set_replicas(4)
status_codes = (404, 404, 204, 204)
bodies = ('not found', 'not found', '', '')
headers = [{}, {}, {'Pick-Me': 'yes'}, {'Pick-Me': 'yes'}]
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(*status_codes, body_iter=bodies,
headers=headers):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('Pick-Me'), 'yes')
self.assertEquals(resp.body, '')
def test_DELETE_not_found(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(404, 404, 204):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 404)
def test_DELETE_handoff(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
codes = [204] * self.obj_ring.replicas
with set_http_connect(507, *codes):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
def test_POST_as_COPY_simple(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='POST')
head_resp = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
put_resp = [201] * self.obj_ring.replicas
codes = head_resp + put_resp
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 202)
def test_POST_delete_at(self):
t = str(int(time.time() + 100))
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
post_headers = []
def capture_headers(ip, port, device, part, method, path, headers,
**kwargs):
if method == 'POST':
post_headers.append(headers)
x_newest_responses = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
post_resp = [200] * self.obj_ring.replicas
codes = x_newest_responses + post_resp
with set_http_connect(*codes, give_connect=capture_headers):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
for given_headers in post_headers:
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_POST_non_int_delete_after(self):
t = str(int(time.time() + 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': t})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-After', resp.body)
def test_POST_negative_delete_after(self):
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-After in past', resp.body)
def test_POST_delete_at_non_integer(self):
t = str(int(time.time() + 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-At', resp.body)
def test_POST_delete_at_in_past(self):
t = str(int(time.time() - 100))
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-At in past', resp.body)
def test_PUT_converts_delete_after_to_delete_at(self):
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
put_headers = []
def capture_headers(ip, port, device, part, method, path, headers,
**kwargs):
if method == 'PUT':
put_headers.append(headers)
codes = [201] * self.obj_ring.replicas
t = time.time()
with set_http_connect(*codes, give_connect=capture_headers):
with mock.patch('time.time', lambda: t):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
expected_delete_at = str(int(t) + 60)
for given_headers in put_headers:
self.assertEquals(given_headers.get('X-Delete-At'),
expected_delete_at)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_PUT_non_int_delete_after(self):
t = str(int(time.time() + 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': t})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-After', resp.body)
def test_PUT_negative_delete_after(self):
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-After in past', resp.body)
def test_PUT_delete_at(self):
t = str(int(time.time() + 100))
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
put_headers = []
def capture_headers(ip, port, device, part, method, path, headers,
**kwargs):
if method == 'PUT':
put_headers.append(headers)
codes = [201] * self.obj_ring.replicas
with set_http_connect(*codes, give_connect=capture_headers):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
for given_headers in put_headers:
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_PUT_delete_at_non_integer(self):
t = str(int(time.time() - 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-At', resp.body)
def test_PUT_delete_at_in_past(self):
t = str(int(time.time() - 100))
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-At in past', resp.body)
def test_container_sync_put_x_timestamp_not_found(self):
test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes:
self.container_info['storage_policy'] = policy_index
put_timestamp = utils.Timestamp(time.time()).normal
req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={
'Content-Length': 0,
'X-Timestamp': put_timestamp})
ts_iter = itertools.repeat(put_timestamp)
head_resp = [404] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
put_resp = [201] * self.obj_ring.replicas
codes = head_resp + put_resp
with set_http_connect(*codes, timestamps=ts_iter):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 201)
def test_container_sync_put_x_timestamp_match(self):
test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes:
self.container_info['storage_policy'] = policy_index
put_timestamp = utils.Timestamp(time.time()).normal
req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={
'Content-Length': 0,
'X-Timestamp': put_timestamp})
ts_iter = itertools.repeat(put_timestamp)
head_resp = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
codes = head_resp
with set_http_connect(*codes, timestamps=ts_iter):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
def test_container_sync_put_x_timestamp_older(self):
ts = (utils.Timestamp(t) for t in itertools.count(int(time.time())))
test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes:
self.container_info['storage_policy'] = policy_index
req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={
'Content-Length': 0,
'X-Timestamp': ts.next().internal})
ts_iter = itertools.repeat(ts.next().internal)
head_resp = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
codes = head_resp
with set_http_connect(*codes, timestamps=ts_iter):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
def test_container_sync_put_x_timestamp_newer(self):
ts = (utils.Timestamp(t) for t in itertools.count(int(time.time())))
test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes:
orig_timestamp = ts.next().internal
req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={
'Content-Length': 0,
'X-Timestamp': ts.next().internal})
ts_iter = itertools.repeat(orig_timestamp)
head_resp = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
put_resp = [201] * self.obj_ring.replicas
codes = head_resp + put_resp
with set_http_connect(*codes, timestamps=ts_iter):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 201)
def test_container_sync_delete(self):
ts = (utils.Timestamp(t) for t in itertools.count(int(time.time())))
test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes:
req = swob.Request.blank(
'/v1/a/c/o', method='DELETE', headers={
'X-Timestamp': ts.next().internal})
codes = [409] * self.obj_ring.replicas
ts_iter = itertools.repeat(ts.next().internal)
with set_http_connect(*codes, timestamps=ts_iter):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 409)
def test_put_x_timestamp_conflict(self):
ts = (utils.Timestamp(t) for t in itertools.count(int(time.time())))
req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={
'Content-Length': 0,
'X-Timestamp': ts.next().internal})
head_resp = [404] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
put_resp = [409] + [201] * self.obj_ring.replicas
codes = head_resp + put_resp
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 201)
def test_COPY_simple(self):
req = swift.common.swob.Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
head_resp = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
put_resp = [201] * self.obj_ring.replicas
codes = head_resp + put_resp
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
def test_HEAD_simple(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD')
with set_http_connect(200):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_HEAD_x_newest(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD',
headers={'X-Newest': 'true'})
with set_http_connect(200, 200, 200):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_PUT_log_info(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT')
req.headers['x-copy-from'] = 'some/where'
req.headers['Content-Length'] = 0
# override FakeConn default resp headers to keep log_info clean
resp_headers = {'x-delete-at': None}
head_resp = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
put_resp = [201] * self.obj_ring.replicas
codes = head_resp + put_resp
with set_http_connect(*codes, headers=resp_headers):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 201)
self.assertEquals(
req.environ.get('swift.log_info'), ['x-copy-from:some/where'])
# and then check that we don't do that for originating POSTs
req = swift.common.swob.Request.blank('/v1/a/c/o')
req.method = 'POST'
req.headers['x-copy-from'] = 'else/where'
with set_http_connect(*codes, headers=resp_headers):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
self.assertEquals(req.environ.get('swift.log_info'), None)
@patch_policies([
StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(2, 'two'),
])
class TestObjControllerLegacyCache(TestObjController):
"""
This test pretends like memcache returned a stored value that should
resemble whatever "old" format. It catches KeyErrors you'd get if your
code was expecting some new format during a rolling upgrade.
"""
container_info = {
'read_acl': None,
'write_acl': None,
'sync_key': None,
'versions': None,
}
if __name__ == '__main__':
unittest.main()
| heemanshu/swift_juno | test/unit/proxy/controllers/test_obj.py | Python | apache-2.0 | 25,863 |
#!/usr/bin/env python
from pandas_ml.core.accessor import _AccessorMethods, _attach_methods, _wrap_data_func
class NeighborsMethods(_AccessorMethods):
"""
Accessor to ``sklearn.neighbors``.
"""
_module_name = 'sklearn.neighbors'
_neighbor_methods = ['kneighbors_graph', 'radius_neighbors_graph']
_attach_methods(NeighborsMethods, _wrap_data_func, _neighbor_methods)
| sinhrks/pandas-ml | pandas_ml/skaccessors/neighbors.py | Python | bsd-3-clause | 403 |
#!/usr/bin/env python3
'''
5.5-score_sequences.py
This script scores the rarity for each position in a set of antibody sequences
and identify those qualifying as "extremely rare."
Usage: 5.5-score_sequences.py QVQLVQ... -v IGHV1-2 [ --gssp GSSP.txt ] [options]
5.5-score_sequences.py -f input.fasta [ -v IGHV1-2 --gssp GSSP.txt -a ] [options]
5.5-score_sequences.py -r rearrangements.tsv [ --gssp GSSP.txt ] [options]
Options:
QVQLVQ Amino acid sequence of one or more antibodies to score.
-f input.fasta Fasta file containing sequences to score.
-r rearrangements.tsv AIRR-formatted rearrangments with sequences to score. Assumes -n.
-v IGHV1-2 V gene of the input sequences. If not provided, will be extracted
from the 'V_gene=' or 'v_call=' tag in the fasta def line or
the 'v_call' field of a rearrangements file.
--gssp GSSP.txt File with GSSPs to use for scoring the sequences.
[default: <SONAR>/sample_data/GSSPs/Sheng2017_VH_GSSPs.txt]
-a Flag to indicate that input sequences are already aligned to
germline v, which must be included in the file as the first
sequence. Require '-v' to be specified, as well, because
I'm too lazy to code sanity checking. [default: False]
-n Flag to indicate that input sequences are nucleotide and must
be translated. [default: False]
--germ germline.fa File with germline sequences, used for aligning input before
scoring. [default: <SONAR>/germDB/IgHKLV_cysTruncated.AA.fa]
--rare .995 Threshold for what counts as 'extremely rare'. [default: 0.995]
--lineage Instead of reporting rare substitutions in each sequence, report
those that appear in at least `--threshold` percent of all
input sequences. [default: False]
--threshold 50 If `--lineage` is specified, the percent of all input sequences
that must contain a rare substitution for it to be reported.
[default: 50]
Created by Chaim A Schramm on 2019-02-22.
Copyright (c) 2019, Vaccine Research Center, National Institutes of Health, USA.
All rights reserved.
'''
import sys, csv, re
from docopt import docopt
from Bio import Seq, SeqIO
import airr
from collections import defaultdict
try:
from SONAR.mGSSP import *
except ImportError:
find_SONAR = sys.argv[0].split("SONAR/mGSSP")
sys.path.append(find_SONAR[0])
from SONAR.mGSSP import *
def checkGermSeq( gene, lib ):
try:
germSeq = lib[gene+"*01"]
return True
except KeyError:
print( "Can't find sequence of %s*01 in database %s" % (gene,arguments['--germ']), file=sys.stderr )
return False
def checkGSSP( gene, lib ):
if not gene in lib:
print( "Can't find a GSSP for %s in database %s" % (gene,arguments['--gssp']), file=sys.stderr )
return False
else:
return True
def score( sequence, germline, v_rarity ):
if arguments['-n']:
if not arguments['-a']:
sequence = re.sub("-","",sequence)
sequence = str(Seq.Seq(sequence).translate(table=GAPPED_CODON_TABLE))
if arguments['-a']:
align = dict( ref=germline, test=sequence )
else:
align = quickAlign( germline, sequence )
rare = []
ind=0
for r,t in zip(align['ref'],align['test']):
if r=="-":
continue
if ind in v_rarity and t in v_rarity[ind]['mutants'] and v_rarity[ind]['mutants'][t]['average'] >= arguments['--rare']:
rare.append( f"{r}{ind+1}{t}" )
ind += 1
return rare
def main():
#start by reading in the GSSP
gssp = GSSP( arguments['--gssp'] )
gssp.computeRarity()
#now get germline genes
germDB = load_fastas( arguments['--germ'] )
rareSubs = dict()
if arguments['-r'] is not None:
for seq in airr.read_rearrangement( arguments['-r'] ):
gl = re.sub("\*.*","",seq['v_call'])
if checkGermSeq(gl, germDB) and checkGSSP(gl, gssp.rarity):
rareSubs[ seq['sequence_id'] ] = score( seq['sequence_alignment'], germDB[gl+"*01"], gssp.rarity[gl] )
elif arguments['-f'] is not None:
#if there's a global V gene, check it
if arguments['-v'] is not None:
if not checkGermSeq(arguments['-v'], germDB) or not checkGSSP(arguments['-v'], gssp.rarity):
sys.exit(1)
#set up incase it's a prealigned file
alignedV = None
for seq in generate_read_fasta( arguments['-f'] ):
#if aligned, then first seq is germline
if arguments['-a'] and alignedV is None:
alignedV = seq.seq
if arguments['-n']:
alignedV = alignedV.translate(table=GAPPED_CODON_TABLE)
alignedV = str(alignedV)
continue
#score all other sequences
if arguments['-v'] is not None:
if arguments['-a']:
rareSubs[ seq.id ] = score( str(seq.seq), alignedV, gssp.rarity[arguments['-v']] )
else:
rareSubs[ seq.id ] = score( str(seq.seq), germDB[arguments['-v']+"*01"], gssp.rarity[arguments['-v']] )
else:
gl = re.search("(v_call|V_gene)=([^\*\s]+)", seq.description)
if gl:
if checkGermSeq(gl.group(2), germDB) and checkGSSP(gl.group(2), gssp.rarity):
rareSubs[ seq.id ] = score( str(seq.seq), germDB[gl.group(2)+"*01"], gssp.rarity[gl.group(2)] )
else:
print("Could not find V gene annotation for %s, skipping..." % seq.id, file=sys.stderr)
continue
else:
if checkGermSeq(arguments['-v'], germDB) and checkGSSP(arguments['-v'], gssp.rarity):
for sequence in arguments['QVQLVQ']:
rareSubs[ sequence ] = score( sequence, germDB[arguments['-v']+"*01"], gssp.rarity[arguments['-v']] )
else:
sys.exit(1)
#now do output
count = 0
if arguments['--lineage']:
reverse_dict = defaultdict(list)
for seq in rareSubs:
for sub in rareSubs[seq]:
reverse_dict[ sub ].append( seq )
for sub in sorted(reverse_dict.keys(), key=lambda x: int(re.search("(\d+)",x).group(1))):
if 100*len(reverse_dict[sub])/len(rareSubs) >= arguments['--threshold']:
print(sub)
count +=1
else:
for seq in rareSubs:
if len(rareSubs[seq]) > 0:
print( seq + ": " + ",".join(rareSubs[seq]) )
count +=1
if count == 0:
print( "No rare substitutions were found")
if __name__ == "__main__":
arguments = docopt(__doc__)
arguments['--gssp'] = re.sub( "<SONAR>", SCRIPT_FOLDER, arguments['--gssp'] )
arguments['--germ'] = re.sub( "<SONAR>", SCRIPT_FOLDER, arguments['--germ'] )
arguments['--rare'] = float( arguments['--rare'] )
arguments['--threshold'] = float( arguments['--threshold'] )
if arguments['-r'] is not None:
arguments['-n'] = True
if arguments['-a'] and arguments['-v'] is None:
sys.exit( "Use of the `-a` flag requires the `-v` option to be specified." )
#log command line
logCmdLine(sys.argv)
main()
| scharch/zap | mGSSP/5.5-score_sequences.py | Python | gpl-3.0 | 7,004 |
# -*- coding: utf-8 -*-
"""systemd_cleanup command.
@author: Tobias Hunger <[email protected]>
"""
from cleanroom.command import Command
from cleanroom.exceptions import GenerateError
from cleanroom.location import Location
from cleanroom.printer import trace
from cleanroom.systemcontext import SystemContext
import os
import shutil
import typing
def _map_base(old_base: str, new_base: str, input_path: str) -> typing.Tuple[str, str]:
assert old_base.endswith("/")
input_path = os.path.normpath(input_path)
if not input_path.startswith(old_base):
return input_path, input_path
input_relative_to_oldbase = input_path[len(old_base) :]
output = os.path.join(new_base, input_relative_to_oldbase)
return output, input_path
def _map_target_link(
old_base: str, new_base: str, link: str, link_target: str
) -> typing.Tuple[str, str]:
assert old_base.endswith("/")
assert new_base.endswith("/")
assert link.startswith(old_base)
link_directory = os.path.dirname(link)
(link, _) = _map_base(old_base, new_base, link)
(link_target, _) = _map_base(
old_base, new_base, os.path.join(link_directory, link_target)
)
if link_target.startswith(new_base):
relative_link_target = os.path.relpath(link_target, os.path.dirname(link))
return link, relative_link_target
return link, link_target
def _map_host_link(
root_directory: str, old_base: str, new_base: str, link: str, link_target: str
):
assert root_directory.endswith("/")
assert old_base.startswith(root_directory)
assert new_base.startswith(root_directory)
assert old_base.endswith("/")
assert new_base.endswith("/")
assert link.startswith(old_base)
assert not link_target.startswith(root_directory)
root_directory_length = len(root_directory) - 1 # minus last '/'
host_old_base = old_base[root_directory_length:]
host_new_base = new_base[root_directory_length:]
host_link = link[root_directory_length:]
(host_link, link_target) = _map_target_link(
host_old_base, host_new_base, host_link, link_target
)
assert os.path.isabs(host_link)
return os.path.join(root_directory, host_link[1:]), link_target
def _move_symlink(
location: Location,
system_context: SystemContext,
old_base: str,
new_base: str,
link: str,
):
"""Move a symlink."""
root_directory = system_context.fs_directory + "/"
link_target = os.readlink(link)
# normalize to /usr/lib...
if link_target.startswith("/lib/"):
link_target = f"/usr{link_target}"
(output_link, output_link_target) = _map_host_link(
root_directory, old_base, new_base, link, link_target
)
trace(f"Moving link {link}->{link_target}: {output_link} to {output_link_target}")
os.makedirs(os.path.dirname(output_link), mode=0o755, exist_ok=True)
if not os.path.isdir(os.path.dirname(output_link)):
raise GenerateError(
f'"{output_link}" is no directory when trying to move "{link}" into /usr.',
location=location,
)
if os.path.exists(output_link):
if not os.path.islink(output_link):
raise GenerateError(
f'"{output_link}" exists and is not a link when trying to move "{link}" into /usr.',
location=location,
)
else:
old_link_target = os.readlink(output_link)
if old_link_target != output_link_target:
raise GenerateError(
f'"{link}" exists but points to "{old_link_target}" when "{output_link_target}" was expected.',
location=location,
)
else:
os.unlink(link)
return # Already correct
else:
os.symlink(output_link_target, output_link)
os.unlink(link)
def _move_file(location: Location, old_base: str, new_base: str, path: str):
"""Move a file."""
path_dir = os.path.dirname(path)
path_name = os.path.basename(path)
new_dir = _map_base(old_base, new_base, path_dir)[0]
if os.path.exists(new_dir) and not os.path.isdir(new_dir):
raise GenerateError(
f'"{new_dir}" is not a directory when moving "{path}".', location=location,
)
if not os.path.exists(new_dir):
os.makedirs(new_dir, 0o755)
new_path = os.path.join(new_dir, path_name)
if os.path.exists(new_path):
raise GenerateError(
f'"{new_path}" already exists when moving "{path}".', location=location,
)
shutil.copyfile(path, new_path)
class SystemdCleanupCommand(Command):
"""The systemd_cleanup command."""
def __init__(self, **services: typing.Any) -> None:
"""Constructor."""
super().__init__(
"systemd_cleanup",
help_string="Make sure /etc/systemd/system is empty by "
"moving files and links to the appropriate /usr "
"directory.",
file=__file__,
**services,
)
def validate(
self, location: Location, *args: typing.Any, **kwargs: typing.Any
) -> None:
"""Validate the arguments."""
self._validate_no_arguments(location, *args, **kwargs)
def __call__(
self,
location: Location,
system_context: SystemContext,
*args: typing.Any,
**kwargs: typing.Any,
) -> None:
"""Execute command."""
old_base = system_context.file_name("/etc/systemd/system") + "/"
new_base = system_context.file_name("/usr/lib/systemd/system") + "/"
trace("walking:", old_base)
for root, _, files in os.walk(old_base):
for f in files:
full_path = os.path.join(root, f)
trace("Checking", full_path)
if os.path.islink(full_path):
trace("Moving link", full_path)
_move_symlink(
location, system_context, old_base, new_base, full_path
)
else:
trace("Moving file", full_path)
_move_file(location, old_base, new_base, full_path)
self._execute(
location.next_line(),
system_context,
"remove",
"/etc/systemd/system/*",
recursive=True,
force=True,
)
| hunger/cleanroom | cleanroom/commands/systemd_cleanup.py | Python | gpl-3.0 | 6,420 |
from hashlib import md5
from base64 import (b64encode, b64decode)
from Crypto.Cipher import DES, DES3
class PBECipher(object):
def pad(self, data):
# pkcs5 padding
n = 8 - (len(data) % 8)
if n == 0:
return data + chr(8) * 8
else:
return data + chr(n) * n
def unpad(self, data):
# remove pkcs5 padding
n = ord(data[-1])
return data[:-n]
def encrypt(self, data):
return self.cipher.encrypt(self.pad(data))
def decrypt(self, data):
return self.unpad(self.cipher.decrypt(data))
class PBEWithMD5AndDES(PBECipher):
def __init__(self, salt, pw, iterations):
self.pkcs5(salt, pw, iterations)
self.cipher = DES.new(self.key, DES.MODE_CBC, IV=self.iv)
def pkcs5(self, salt, pw, iterations):
x = pw + salt
for i in range(iterations):
x = md5(x).digest()
self.key = x[:8]
self.iv = x[8:]
class PBEWithMD5AndTripleDES(PBECipher):
def __init__(self, salt, pw, iterations):
self.pkcs5(salt, pw, iterations)
self.cipher = DES3.new(self.key, DES3.MODE_CBC, IV=self.iv)
def pkcs5(self, salt, pw, iterations):
a = salt[:4]
b = salt[4:]
if a == b:
a = a[::-1]
for i in range(iterations):
a = md5(a+pw).digest()
b = md5(b+pw).digest()
self.key = a + b[:8]
self.iv = b[8:]
| danieleandreatta/PyPBE | PyPBE/__init__.py | Python | bsd-2-clause | 1,453 |
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
from sklearn import datasets, metrics, cross_validation
from tensorflow.contrib import learn
iris = datasets.load_iris()
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,
test_size=0.2, random_state=42)
classifier = learn.TensorFlowLinearClassifier(n_classes=3)
classifier.fit(X_train, y_train)
score = metrics.accuracy_score(y_test, classifier.predict(X_test))
print('Accuracy: {0:f}'.format(score))
# Clean checkpoint folder if exists
try:
shutil.rmtree('/tmp/skflow_examples/iris_custom_model')
except OSError:
pass
# Save model, parameters and learned variables.
classifier.save('/tmp/skflow_examples/iris_custom_model')
classifier = None
## Restore everything
new_classifier = learn.TensorFlowEstimator.restore('/tmp/skflow_examples/iris_custom_model')
score = metrics.accuracy_score(y_test, new_classifier.predict(X_test))
print('Accuracy: {0:f}'.format(score))
| sachinpro/sachinpro.github.io | tensorflow/examples/skflow/iris_save_restore.py | Python | apache-2.0 | 1,677 |
"""Simple W demo -- shows how to make a window, and bind a function to a "key" event."""
import W
# key callback function
def tester(char, event):
text = "%r\r%d\r%s\r%s" % (char, ord(char), hex(ord(chart)), oct(ord(char)))
window.keys.set(text)
# close callback
def close():
window.close()
# new window
window = W.Dialog((180, 100), "Type a character")
# make a frame (a simple rectangle)
window.frame = W.Frame((5, 5, -5, -33))
# some labels, static text
window.captions = W.TextBox((10, 9, 43, -36), "char:\rdecimal:\rhex:\roctal:")
# another static text box
window.keys = W.TextBox((60, 9, 40, -36))
# a button
window.button = W.Button((-69, -24, 60, 16), "Done", close)
# bind the callbacks
window.bind("<key>", tester)
window.bind("cmdw", window.button.push)
# open the window
window.open()
| xbmc/atv2 | xbmc/lib/libPython/Python/Mac/IDE scripts/Widget demos/KeyTester.py | Python | gpl-2.0 | 819 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('player', '0002_auto_20150221_2333'),
]
operations = [
migrations.AddField(
model_name='playlistitem',
name='playlist',
field=models.ForeignKey(related_name='items', default=1, to='player.PlayList'),
preserve_default=False,
),
]
| crash843/share-fm | backend/share_fm/player/migrations/0003_playlistitem_playlist.py | Python | gpl-2.0 | 484 |
# -*- coding: utf-8 -*-
'''
Set of tools to manage images in base64
@author: Laurent GAY
@organization: sd-libre.fr
@contact: [email protected]
@copyright: 2015 sd-libre.fr
@license: This file is part of Lucterios.
Lucterios is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lucterios is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lucterios. If not, see <http://www.gnu.org/licenses/>.
'''
from __future__ import unicode_literals
from lxml import etree, objectify
from lxml.etree import XMLSyntaxError
from base64 import b64encode, b64decode
from os.path import join, exists, dirname, isfile
from os import makedirs, environ
from hashlib import md5
from _io import BytesIO
import io
from django.utils import six
from lucterios.framework.tools import get_binay
BASE64_PREFIX = 'data:image/*;base64,'
def read_file(filepath):
with io.open(filepath, mode='rb') as openfile:
return openfile.read()
def save_file(file_path, data):
with io.open(file_path, mode="w", encoding='utf-8') as savefile:
try:
savefile.write(data.encode('utf-8'))
except Exception:
savefile.write(data)
def get_tmp_dir():
from django.conf import settings
if not environ.get("DJANGO_SETTINGS_MODULE"):
tmp_path = '/tmp'
else:
setting_path = join(
settings.BASE_DIR, settings.SETTINGS_MODULE.split('.')[0])
tmp_path = join(setting_path, 'tmp')
if not exists(tmp_path):
makedirs(tmp_path)
return tmp_path
def get_user_dir():
from django.conf import settings
user_dir = settings.MEDIA_ROOT
if not exists(user_dir):
makedirs(user_dir)
return user_dir
def md5sum(filename):
md5res = md5()
full_path = join(get_user_dir(), filename)
with open(full_path, 'rb') as readfile:
md5res.update(readfile.read())
return md5res.hexdigest()
def get_user_path(rootpath, filename):
root_path = join(get_user_dir(), rootpath)
if not exists(root_path):
makedirs(root_path)
return join(root_path, filename)
def readimage_to_base64(file_path, with_prefix=True):
with open(file_path, "rb") as image_file:
if with_prefix:
return get_binay(BASE64_PREFIX) + b64encode(image_file.read())
else:
return b64encode(image_file.read())
def save_from_base64(base64stream):
if base64stream[:len(BASE64_PREFIX)] == BASE64_PREFIX:
stream = base64stream[len(BASE64_PREFIX):]
md5res = md5()
md5res.update(stream)
file_name = md5res.hexdigest() + ".jpg"
else:
file_name, stream = base64stream.split(";")
file_path = join(get_tmp_dir(), file_name)
with open(file_path, "wb") as image_tmp:
image_tmp.write(b64decode(stream))
return file_path
def open_from_base64(base64stream):
if base64stream[:len(BASE64_PREFIX)] == BASE64_PREFIX:
stream = base64stream[len(BASE64_PREFIX):]
else:
_, stream = base64stream.split(";")
return BytesIO(b64decode(stream))
def open_image_resize(filep, max_width, max_height):
from PIL import Image
image = Image.open(filep)
width, height = image.size
x_ratio = (max_width * 1.0) / width
y_ratio = (max_height * 1.0) / height
if (width > max_width) or (height > max_height):
if (x_ratio * height) < max_height:
tn_height = int(x_ratio * height)
tn_width = int(max_width)
else:
tn_width = int(y_ratio * width)
tn_height = int(max_height)
image = image.resize((tn_width, tn_height))
return image
def get_image_absolutepath(icon_path):
if isfile(icon_path):
return icon_path
from django.conf import settings
if icon_path.startswith(settings.STATIC_URL):
tmp_icon_path = icon_path[len(settings.STATIC_URL):]
if isfile(join(settings.STATIC_ROOT, tmp_icon_path)):
icon_path = join(settings.STATIC_ROOT, tmp_icon_path)
else:
if icon_path[0] == '/':
icon_path = icon_path[1:]
sub_path = tmp_icon_path.split('/')
root_dir = sub_path[0]
try:
from importlib import import_module
module = import_module(root_dir)
icon_path = join(dirname(module.__file__), icon_path)
except Exception:
pass
return icon_path
def get_image_size(image_path):
from PIL import Image
filep = None
try:
if image_path[:len(BASE64_PREFIX)] == BASE64_PREFIX:
filep = open_from_base64(image_path)
else:
filep = open(get_image_absolutepath(image_path), "rb")
image = Image.open(filep)
width, height = image.size
finally:
if filep is not None:
filep.close()
return width, height
def xml_validator(some_xml_string, xsd_file):
try:
schema = etree.XMLSchema(file=xsd_file)
parser = objectify.makeparser(schema=schema)
objectify.fromstring(some_xml_string, parser)
return None
except XMLSyntaxError as xml_error:
return six.text_type(xml_error)
def remove_accent(text, replace_space=False):
if replace_space:
text = text.replace(' ', '_').replace('-', '')
text = text.replace('/', '-')
try:
import unicodedata
return ''.join((letter for letter in unicodedata.normalize('NFD', text) if unicodedata.category(letter) != 'Mn'))
except BaseException:
return text
| Lucterios2/core | lucterios/framework/filetools.py | Python | gpl-3.0 | 5,952 |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from datetime import datetime
import boto3
import cfnresponse
import json
client = boto3.client("cloudwatch")
def log(stack, metric, value):
# Do it for the stack
client.put_metric_data(
Namespace="CloudFormation",
MetricData=[
{
"MetricName": metric,
"Unit": "Count",
"Value": value,
"Timestamp": datetime.now(),
"Dimensions": [
{
"Name": "By Stack Name",
"Value": stack,
},
],
},
],
)
client.put_metric_data(
Namespace="CloudFormation",
MetricData=[
{
"MetricName": metric,
"Unit": "Count",
"Value": value,
"Timestamp": datetime.now(),
},
],
)
def handler(event, context):
print("Received request:", json.dumps(event, indent=4))
action = event["RequestType"]
stack = event["ResourceProperties"]["StackName"]
resources = int(event["ResourceProperties"]["ResourceCount"])
try:
log(stack, action, 1)
if action == "Create":
log(stack, "ResourceCount", resources)
cfnresponse.send(event, context, cfnresponse.SUCCESS, {}, "{} metrics".format(stack))
except Exception as e:
cfnresponse.send(event, context, cfnresponse.FAILED, {
"Data": str(e),
}, "{} metrics".format(stack))
| awslabs/aws-cfn-templates | aws/services/CloudFormation/MacrosExamples/StackMetrics/lambda/resource.py | Python | apache-2.0 | 2,099 |
# ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos.const.eve import EffectId
from eos.eve_obj.effect import EffectFactory
from .base import WarfareBuffEffect
class ModuleBonusWarfareLinkInfo(WarfareBuffEffect):
friendly_only = True
EffectFactory.register_class_by_id(
ModuleBonusWarfareLinkInfo,
EffectId.module_bonus_warfare_link_info)
| pyfa-org/eos | eos/eve_obj/effect/warfare_buff/command_info.py | Python | lgpl-3.0 | 1,214 |
# -*- coding: utf-8 -*-
# Akvo RSR is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from lxml import etree
def sustainability(project):
"""
Generate the sustainability element, a description element with type "1" and akvo type "10".
:param project: Project object
:return: A list of Etree elements
"""
if project.sustainability:
element = etree.Element("description")
element.attrib['type'] = '1'
element.attrib['{http://akvo.org/iati-activities}type'] = '10'
narrative_element = etree.SubElement(element, "narrative")
narrative_element.text = project.sustainability
return [element]
return []
| akvo/akvo-rsr | akvo/iati/exports/elements/sustainability.py | Python | agpl-3.0 | 873 |
#-*- encoding:utf-8 -*-
"""
@author: letian
@homepage: http://www.letiantian.me
@github: https://github.com/someus/
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import networkx as nx
import numpy as np
from . import util
from .Segmentation import Segmentation
class TextRank4Keyword(object):
def __init__(self, stop_words_file = None,
allow_speech_tags = util.allow_speech_tags,
delimiters = util.sentence_delimiters):
"""
Keyword arguments:
stop_words_file -- str,指定停止词文件路径(一行一个停止词),若为其他类型,则使用默认停止词文件
delimiters -- 默认值是`?!;?!。;…\n`,用来将文本拆分为句子。
Object Var:
self.words_no_filter -- 对sentences中每个句子分词而得到的两级列表。
self.words_no_stop_words -- 去掉words_no_filter中的停止词而得到的两级列表。
self.words_all_filters -- 保留words_no_stop_words中指定词性的单词而得到的两级列表。
"""
self.text = ''
self.keywords = None
self.seg = Segmentation(stop_words_file=stop_words_file,
allow_speech_tags=allow_speech_tags,
delimiters=delimiters)
self.sentences = None
self.words_no_filter = None # 2维列表
self.words_no_stop_words = None
self.words_all_filters = None
def analyze(self, text,
window = 2,
lower = False,
vertex_source = 'all_filters',
edge_source = 'no_stop_words',
pagerank_config = {'alpha': 0.85,}):
"""分析文本
Keyword arguments:
text -- 文本内容,字符串。
window -- 窗口大小,int,用来构造单词之间的边。默认值为2。
lower -- 是否将文本转换为小写。默认为False。
vertex_source -- 选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来构造pagerank对应的图中的节点。
默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。关键词也来自`vertex_source`。
edge_source -- 选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来构造pagerank对应的图中的节点之间的边。
默认值为`'no_stop_words'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。边的构造要结合`window`参数。
"""
# self.text = util.as_text(text)
self.text = text
self.word_index = {}
self.index_word = {}
self.keywords = []
self.graph = None
result = self.seg.segment(text=text, lower=lower)
self.sentences = result.sentences
self.words_no_filter = result.words_no_filter
self.words_no_stop_words = result.words_no_stop_words
self.words_all_filters = result.words_all_filters
util.debug(20*'*')
util.debug('self.sentences in TextRank4Keyword:\n', ' || '.join(self.sentences))
util.debug('self.words_no_filter in TextRank4Keyword:\n', self.words_no_filter)
util.debug('self.words_no_stop_words in TextRank4Keyword:\n', self.words_no_stop_words)
util.debug('self.words_all_filters in TextRank4Keyword:\n', self.words_all_filters)
options = ['no_filter', 'no_stop_words', 'all_filters']
if vertex_source in options:
_vertex_source = result['words_'+vertex_source]
else:
_vertex_source = result['words_all_filters']
if edge_source in options:
_edge_source = result['words_'+edge_source]
else:
_edge_source = result['words_no_stop_words']
self.keywords = util.sort_words(_vertex_source, _edge_source, window = window, pagerank_config = pagerank_config)
def get_keywords(self, num = 6, word_min_len = 1):
"""获取最重要的num个长度大于等于word_min_len的关键词。
Return:
关键词列表。
"""
result = []
count = 0
for item in self.keywords:
if count >= num:
break
if len(item.word) >= word_min_len:
result.append(item)
count += 1
return result
def get_keyphrases(self, keywords_num = 12, min_occur_num = 2):
"""获取关键短语。
获取 keywords_num 个关键词构造的可能出现的短语,要求这个短语在原文本中至少出现的次数为min_occur_num。
Return:
关键短语的列表。
"""
keywords_set = set([ item.word for item in self.get_keywords(num=keywords_num, word_min_len = 1)])
keyphrases = set()
for sentence in self.words_no_filter:
one = []
for word in sentence:
if word in keywords_set:
one.append(word)
else:
if len(one) > 1:
keyphrases.add(''.join(one))
if len(one) == 0:
continue
else:
one = []
# 兜底
if len(one) > 1:
keyphrases.add(''.join(one))
return [phrase for phrase in keyphrases
if self.text.count(phrase) >= min_occur_num]
if __name__ == '__main__':
pass | someus/TextRank4ZH | textrank4zh/TextRank4Keyword.py | Python | mit | 5,710 |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'IpArpBagFlagsEnum' : _MetaInfoEnum('IpArpBagFlagsEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'flag-none':'flag_none',
'flag-dynamic':'flag_dynamic',
'flag-evpn-sync':'flag_evpn_sync',
'flag-max':'flag_max',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'IpArpBagMediaEnum' : _MetaInfoEnum('IpArpBagMediaEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'media-arpa':'media_arpa',
'media-srp':'media_srp',
'media-unknown':'media_unknown',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'ArpGmpBagEncapEnum' : _MetaInfoEnum('ArpGmpBagEncapEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'none':'none',
'arpa':'arpa',
'snap':'snap',
'ieee802-1q':'ieee802_1q',
'srp':'srp',
'srpa':'srpa',
'srpb':'srpb',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'ArpResolutionHistoryStatusEnum' : _MetaInfoEnum('ArpResolutionHistoryStatusEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'status-none':'status_none',
'status-resolution-request':'status_resolution_request',
'status-resolved-reply':'status_resolved_reply',
'status-resolved-grat-arp':'status_resolved_grat_arp',
'status-resolved-request':'status_resolved_request',
'status-resolved-lc-sync':'status_resolved_lc_sync',
'status-resolved-lc-sync-purge-delay':'status_resolved_lc_sync_purge_delay',
'status-resolved-client':'status_resolved_client',
'status-removed-client':'status_removed_client',
'status-already-resolved':'status_already_resolved',
'status-failed':'status_failed',
'status-dropped-interface-down':'status_dropped_interface_down',
'status-dropped-broadcast-disabled':'status_dropped_broadcast_disabled',
'status-dropped-interface-unavailable':'status_dropped_interface_unavailable',
'status-dropped-bad-subnet':'status_dropped_bad_subnet',
'status-dropped-dynamic-learning-disabled':'status_dropped_dynamic_learning_disabled',
'status-dropped-out-of-subnet-disabled':'status_dropped_out_of_subnet_disabled',
'status-removed-client-sweep':'status_removed_client_sweep',
'status-added-client':'status_added_client',
'status-added-v1':'status_added_v1',
'status-removed-v1':'status_removed_v1',
'status-resolved-peer-sync':'status_resolved_peer_sync',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'IpArpBagEncapEnum' : _MetaInfoEnum('IpArpBagEncapEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'none':'none',
'arpa':'arpa',
'snap':'snap',
'ieee802-1q':'ieee802_1q',
'srp':'srp',
'srpa':'srpa',
'srpb':'srpb',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'IpArpBagStateEnum' : _MetaInfoEnum('IpArpBagStateEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'state-none':'state_none',
'state-interface':'state_interface',
'state-standby':'state_standby',
'state-static':'state_static',
'state-alias':'state_alias',
'state-mobile':'state_mobile',
'state-incomplete':'state_incomplete',
'state-deleted':'state_deleted',
'state-dynamic':'state_dynamic',
'state-probe':'state_probe',
'state-purge-delayed':'state_purge_delayed',
'state-dhcp':'state_dhcp',
'state-vxlan':'state_vxlan',
'state-evpn-sync':'state_evpn_sync',
'state-sat':'state_sat',
'state-r-sync':'state_r_sync',
'state-max':'state_max',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'ArpGmpBagEntryEnum' : _MetaInfoEnum('ArpGmpBagEntryEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper',
{
'null':'null',
'static':'static',
'alias':'alias',
}, 'Cisco-IOS-XR-ipv4-arp-oper', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper']),
'ArpGmp.VrfInfos.VrfInfo' : {
'meta_info' : _MetaInfoClass('ArpGmp.VrfInfos.VrfInfo',
False,
[
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' VRF name for the default VRF use 'default'
''',
'vrf_name',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('rsi-handle', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' RSI registration handle
''',
'rsi_handle',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('rsi-handle-high', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' RSI registration handle (top 32-bits)
''',
'rsi_handle_high',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('table-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' IPv4 unicast table ID
''',
'table_id',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('vrf-id-number', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' VRF ID
''',
'vrf_id_number',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('vrf-name-xr', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF Name
''',
'vrf_name_xr',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'vrf-info',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.VrfInfos' : {
'meta_info' : _MetaInfoClass('ArpGmp.VrfInfos',
False,
[
_MetaInfoClassMember('vrf-info', REFERENCE_LIST, 'VrfInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.VrfInfos.VrfInfo',
[], [],
''' VRF related ARP-GMP operational data
''',
'vrf_info',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'vrf-infos',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses.ConfiguredIpAddress' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses.ConfiguredIpAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Configured ARP-GMP IP
''',
'address',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('encapsulation-type', REFERENCE_ENUM_CLASS, 'ArpGmpBagEncapEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmpBagEncapEnum',
[], [],
''' Encap type
''',
'encapsulation_type',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('entry-type', REFERENCE_ENUM_CLASS, 'ArpGmpBagEntryEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmpBagEntryEnum',
[], [],
''' Entry type static/alias
''',
'entry_type',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('hardware-address', ATTRIBUTE, 'str' , None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Hardware address
''',
'hardware_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IP address
''',
'ip_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'configured-ip-address',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses',
False,
[
_MetaInfoClassMember('configured-ip-address', REFERENCE_LIST, 'ConfiguredIpAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses.ConfiguredIpAddress',
[], [],
''' ARP-GMP configured IP address information
''',
'configured_ip_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'configured-ip-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.Routes.Route' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.Routes.Route',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IP address
''',
'address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-name', REFERENCE_LEAFLIST, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface names
''',
'interface_name',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-name-xr', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name (first element of InterfaceNames
array)
''',
'interface_name_xr',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IP address
''',
'ip_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('prefix-length', ATTRIBUTE, 'int' , None, None,
[('0', '32')], [],
''' Prefix length
''',
'prefix_length',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('prefix-length-xr', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' IP address length
''',
'prefix_length_xr',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'route',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.Routes' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.Routes',
False,
[
_MetaInfoClassMember('route', REFERENCE_LIST, 'Route' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.Routes.Route',
[], [],
''' ARP GMP route information
''',
'route',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'routes',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp.AssociatedConfigurationEntry' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp.AssociatedConfigurationEntry',
False,
[
_MetaInfoClassMember('encapsulation-type', REFERENCE_ENUM_CLASS, 'ArpGmpBagEncapEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmpBagEncapEnum',
[], [],
''' Encap type
''',
'encapsulation_type',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('entry-type', REFERENCE_ENUM_CLASS, 'ArpGmpBagEntryEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmpBagEntryEnum',
[], [],
''' Entry type static/alias
''',
'entry_type',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('hardware-address', ATTRIBUTE, 'str' , None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Hardware address
''',
'hardware_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IP address
''',
'ip_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'associated-configuration-entry',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Configured ARP-GMP IP
''',
'address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('associated-configuration-entry', REFERENCE_CLASS, 'AssociatedConfigurationEntry' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp.AssociatedConfigurationEntry',
[], [],
''' Associated configuration entry
''',
'associated_configuration_entry',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-name-xr', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name_xr',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('reference-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Route reference count
''',
'reference_count',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'interface-configured-ip',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps',
False,
[
_MetaInfoClassMember('interface-configured-ip', REFERENCE_LIST, 'InterfaceConfiguredIp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp',
[], [],
''' ARP GMP interface and associated configured
IP data
''',
'interface_configured_ip',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'interface-configured-ips',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs.Vrf' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs.Vrf',
False,
[
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' VRF name for the default VRF use 'default'
''',
'vrf_name',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('configured-ip-addresses', REFERENCE_CLASS, 'ConfiguredIpAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses',
[], [],
''' Table of ARP-GMP configured IP addresses
information
''',
'configured_ip_addresses',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-configured-ips', REFERENCE_CLASS, 'InterfaceConfiguredIps' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps',
[], [],
''' Table of ARP GMP interface and associated
configured IP data
''',
'interface_configured_ips',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('routes', REFERENCE_CLASS, 'Routes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf.Routes',
[], [],
''' Table of ARP GMP route information
''',
'routes',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'vrf',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp.Vrfs' : {
'meta_info' : _MetaInfoClass('ArpGmp.Vrfs',
False,
[
_MetaInfoClassMember('vrf', REFERENCE_LIST, 'Vrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs.Vrf',
[], [],
''' Per VRF ARP-GMP operational data
''',
'vrf',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'vrfs',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'ArpGmp' : {
'meta_info' : _MetaInfoClass('ArpGmp',
False,
[
_MetaInfoClassMember('vrf-infos', REFERENCE_CLASS, 'VrfInfos' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.VrfInfos',
[], [],
''' Table of VRF related ARP-GMP operational data
''',
'vrf_infos',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('vrfs', REFERENCE_CLASS, 'Vrfs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpGmp.Vrfs',
[], [],
''' Table of per VRF ARP-GMP operational data
''',
'vrfs',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'arp-gmp',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.ResolutionHistoryDynamic.ArpEntry' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.ResolutionHistoryDynamic.ArpEntry',
False,
[
_MetaInfoClassMember('client-id', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Resolving Client ID
''',
'client_id',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('entry-state', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ARP entry state
''',
'entry_state',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('idb-interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface
''',
'idb_interface_name',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' MAC address
''',
'mac_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('nsec-timestamp', ATTRIBUTE, 'int' , None, None,
[('0', '18446744073709551615')], [],
''' Timestamp for entry in nanoseconds since Epoch,
i.e. since 00:00:00 UTC, January 1, 1970
''',
'nsec_timestamp',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-request-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Resolution Request count
''',
'resolution_request_count',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ArpResolutionHistoryStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpResolutionHistoryStatusEnum',
[], [],
''' Resolution status
''',
'status',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'arp-entry',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.ResolutionHistoryDynamic' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.ResolutionHistoryDynamic',
False,
[
_MetaInfoClassMember('arp-entry', REFERENCE_LIST, 'ArpEntry' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.ResolutionHistoryDynamic.ArpEntry',
[], [],
''' Resolution history array
''',
'arp_entry',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'resolution-history-dynamic',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.TrafficVrfs.TrafficVrf' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.TrafficVrfs.TrafficVrf',
False,
[
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('alias-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total alias entries in the cache
''',
'alias_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('arp-packet-interface-out-of-subnet', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total arp packets on interface due to out of
subnet
''',
'arp_packet_interface_out_of_subnet',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('arp-packet-node-out-of-subnet', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP packets on node due to out of subnet
''',
'arp_packet_node_out_of_subnet',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('dhcp-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total DHCP entries in the cache
''',
'dhcp_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('dynamic-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total dynamic entries in the cache
''',
'dynamic_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('gratuitous-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Gratuituous ARP replies sent
''',
'gratuitous_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('idb-structures', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total idb structures on this node
''',
'idb_structures',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total interface entries in the cache
''',
'interface_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-packets-dropped-interface', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ip packets droped on this interface
''',
'ip_packets_dropped_interface',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-packets-dropped-node', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ip packets droped on this node
''',
'ip_packets_dropped_node',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('local-proxy-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Local Proxy ARP replies sent
''',
'local_proxy_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('no-buffer-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total errors for no buffer
''',
'no_buffer_errors',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('out-of-memory-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total errors for out of memory
''',
'out_of_memory_errors',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('proxy-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Proxy ARP replies sent
''',
'proxy_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('replies-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies received
''',
'replies_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies sent
''',
'replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests received
''',
'requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('requests-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests sent
''',
'requests_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-replies-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP resolution replies received
''',
'resolution_replies_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-requests-dropped', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' total ARP resolution requests dropped
''',
'resolution_requests_dropped',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP resolution requests received
''',
'resolution_requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('standby-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total standby entries in the cache
''',
'standby_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('static-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total static entries in the cache
''',
'static_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-replies-gratg-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP grat replies sent over subscriber
interface
''',
'subscr_replies_gratg_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies sent over subscriber interface
''',
'subscr_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests received over subscriber
interface
''',
'subscr_requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('total-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP entries in the cache
''',
'total_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('vxlan-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total VXLAN entries in the cache
''',
'vxlan_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'traffic-vrf',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.TrafficVrfs' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.TrafficVrfs',
False,
[
_MetaInfoClassMember('traffic-vrf', REFERENCE_LIST, 'TrafficVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.TrafficVrfs.TrafficVrf',
[], [],
''' Per VRF traffic data
''',
'traffic_vrf',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'traffic-vrfs',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.TrafficNode' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.TrafficNode',
False,
[
_MetaInfoClassMember('alias-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total alias entries in the cache
''',
'alias_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('arp-packet-interface-out-of-subnet', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total arp packets on interface due to out of
subnet
''',
'arp_packet_interface_out_of_subnet',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('arp-packet-node-out-of-subnet', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP packets on node due to out of subnet
''',
'arp_packet_node_out_of_subnet',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('dhcp-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total DHCP entries in the cache
''',
'dhcp_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('dynamic-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total dynamic entries in the cache
''',
'dynamic_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('gratuitous-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Gratuituous ARP replies sent
''',
'gratuitous_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('idb-structures', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total idb structures on this node
''',
'idb_structures',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total interface entries in the cache
''',
'interface_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-packets-dropped-interface', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ip packets droped on this interface
''',
'ip_packets_dropped_interface',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-packets-dropped-node', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ip packets droped on this node
''',
'ip_packets_dropped_node',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('local-proxy-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Local Proxy ARP replies sent
''',
'local_proxy_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('no-buffer-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total errors for no buffer
''',
'no_buffer_errors',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('out-of-memory-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total errors for out of memory
''',
'out_of_memory_errors',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('proxy-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Proxy ARP replies sent
''',
'proxy_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('replies-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies received
''',
'replies_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies sent
''',
'replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests received
''',
'requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('requests-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests sent
''',
'requests_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-replies-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP resolution replies received
''',
'resolution_replies_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-requests-dropped', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' total ARP resolution requests dropped
''',
'resolution_requests_dropped',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP resolution requests received
''',
'resolution_requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('standby-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total standby entries in the cache
''',
'standby_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('static-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total static entries in the cache
''',
'static_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-replies-gratg-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP grat replies sent over subscriber
interface
''',
'subscr_replies_gratg_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies sent over subscriber interface
''',
'subscr_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests received over subscriber
interface
''',
'subscr_requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('total-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP entries in the cache
''',
'total_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('vxlan-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total VXLAN entries in the cache
''',
'vxlan_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'traffic-node',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.ResolutionHistoryClient.ArpEntry' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.ResolutionHistoryClient.ArpEntry',
False,
[
_MetaInfoClassMember('client-id', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Resolving Client ID
''',
'client_id',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('entry-state', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ARP entry state
''',
'entry_state',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('idb-interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface
''',
'idb_interface_name',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' MAC address
''',
'mac_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('nsec-timestamp', ATTRIBUTE, 'int' , None, None,
[('0', '18446744073709551615')], [],
''' Timestamp for entry in nanoseconds since Epoch,
i.e. since 00:00:00 UTC, January 1, 1970
''',
'nsec_timestamp',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-request-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Resolution Request count
''',
'resolution_request_count',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ArpResolutionHistoryStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'ArpResolutionHistoryStatusEnum',
[], [],
''' Resolution status
''',
'status',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'arp-entry',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.ResolutionHistoryClient' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.ResolutionHistoryClient',
False,
[
_MetaInfoClassMember('arp-entry', REFERENCE_LIST, 'ArpEntry' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.ResolutionHistoryClient.ArpEntry',
[], [],
''' Resolution history array
''',
'arp_entry',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'resolution-history-client',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.Entries.Entry' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.Entries.Entry',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IP Address of ARP entry
''',
'address',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('age', ATTRIBUTE, 'int' , None, None,
[('0', '18446744073709551615')], [],
''' Age of this entry
''',
'age',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('encapsulation-type', REFERENCE_ENUM_CLASS, 'IpArpBagEncapEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'IpArpBagEncapEnum',
[], [],
''' Source encapsulation type
''',
'encapsulation_type',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('flag', REFERENCE_ENUM_CLASS, 'IpArpBagFlagsEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'IpArpBagFlagsEnum',
[], [],
''' Flags of this entry
''',
'flag',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('hardware-address', ATTRIBUTE, 'str' , None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Hardware address
''',
'hardware_address',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('hardware-length', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Source hardware length
''',
'hardware_length',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('media-type', REFERENCE_ENUM_CLASS, 'IpArpBagMediaEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'IpArpBagMediaEnum',
[], [],
''' Media type for this entry
''',
'media_type',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('state', REFERENCE_ENUM_CLASS, 'IpArpBagStateEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'IpArpBagStateEnum',
[], [],
''' State of this entry
''',
'state',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'entry',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.Entries' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.Entries',
False,
[
_MetaInfoClassMember('entry', REFERENCE_LIST, 'Entry' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.Entries.Entry',
[], [],
''' ARP entry
''',
'entry',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'entries',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.TrafficInterfaces.TrafficInterface' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.TrafficInterfaces.TrafficInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('alias-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total alias entries in the cache
''',
'alias_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('arp-packet-interface-out-of-subnet', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total arp packets on interface due to out of
subnet
''',
'arp_packet_interface_out_of_subnet',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('arp-packet-node-out-of-subnet', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP packets on node due to out of subnet
''',
'arp_packet_node_out_of_subnet',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('dhcp-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total DHCP entries in the cache
''',
'dhcp_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('dynamic-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total dynamic entries in the cache
''',
'dynamic_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('gratuitous-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Gratuituous ARP replies sent
''',
'gratuitous_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('idb-structures', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total idb structures on this node
''',
'idb_structures',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('interface-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total interface entries in the cache
''',
'interface_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-packets-dropped-interface', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ip packets droped on this interface
''',
'ip_packets_dropped_interface',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('ip-packets-dropped-node', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ip packets droped on this node
''',
'ip_packets_dropped_node',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('local-proxy-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Local Proxy ARP replies sent
''',
'local_proxy_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('no-buffer-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total errors for no buffer
''',
'no_buffer_errors',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('out-of-memory-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total errors for out of memory
''',
'out_of_memory_errors',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('proxy-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Proxy ARP replies sent
''',
'proxy_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('replies-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies received
''',
'replies_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies sent
''',
'replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests received
''',
'requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('requests-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests sent
''',
'requests_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-replies-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP resolution replies received
''',
'resolution_replies_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-requests-dropped', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' total ARP resolution requests dropped
''',
'resolution_requests_dropped',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP resolution requests received
''',
'resolution_requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('standby-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total standby entries in the cache
''',
'standby_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('static-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total static entries in the cache
''',
'static_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-replies-gratg-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP grat replies sent over subscriber
interface
''',
'subscr_replies_gratg_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-replies-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP replies sent over subscriber interface
''',
'subscr_replies_sent',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('subscr-requests-received', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP requests received over subscriber
interface
''',
'subscr_requests_received',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('total-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total ARP entries in the cache
''',
'total_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('vxlan-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total VXLAN entries in the cache
''',
'vxlan_entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'traffic-interface',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node.TrafficInterfaces' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node.TrafficInterfaces',
False,
[
_MetaInfoClassMember('traffic-interface', REFERENCE_LIST, 'TrafficInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.TrafficInterfaces.TrafficInterface',
[], [],
''' Per interface traffic data
''',
'traffic_interface',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'traffic-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes.Node' : {
'meta_info' : _MetaInfoClass('Arp.Nodes.Node',
False,
[
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[], [b'([a-zA-Z0-9_]*\\d+/){1,2}([a-zA-Z0-9_]*\\d+)'],
''' Node name
''',
'node_name',
'Cisco-IOS-XR-ipv4-arp-oper', True),
_MetaInfoClassMember('entries', REFERENCE_CLASS, 'Entries' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.Entries',
[], [],
''' Table of ARP entries
''',
'entries',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-history-client', REFERENCE_CLASS, 'ResolutionHistoryClient' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.ResolutionHistoryClient',
[], [],
''' Per node client-installed ARP resolution
history data
''',
'resolution_history_client',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('resolution-history-dynamic', REFERENCE_CLASS, 'ResolutionHistoryDynamic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.ResolutionHistoryDynamic',
[], [],
''' Per node dynamically-resolved ARP resolution
history data
''',
'resolution_history_dynamic',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('traffic-interfaces', REFERENCE_CLASS, 'TrafficInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.TrafficInterfaces',
[], [],
''' ARP Traffic information per interface
''',
'traffic_interfaces',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('traffic-node', REFERENCE_CLASS, 'TrafficNode' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.TrafficNode',
[], [],
''' Per node ARP Traffic data
''',
'traffic_node',
'Cisco-IOS-XR-ipv4-arp-oper', False),
_MetaInfoClassMember('traffic-vrfs', REFERENCE_CLASS, 'TrafficVrfs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node.TrafficVrfs',
[], [],
''' ARP Traffic information per VRF
''',
'traffic_vrfs',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'node',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp.Nodes' : {
'meta_info' : _MetaInfoClass('Arp.Nodes',
False,
[
_MetaInfoClassMember('node', REFERENCE_LIST, 'Node' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes.Node',
[], [],
''' Per-node ARP operational data
''',
'node',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'nodes',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
'Arp' : {
'meta_info' : _MetaInfoClass('Arp',
False,
[
_MetaInfoClassMember('nodes', REFERENCE_CLASS, 'Nodes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper', 'Arp.Nodes',
[], [],
''' Table of per-node ARP operational data
''',
'nodes',
'Cisco-IOS-XR-ipv4-arp-oper', False),
],
'Cisco-IOS-XR-ipv4-arp-oper',
'arp',
_yang_ns._namespaces['Cisco-IOS-XR-ipv4-arp-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_oper'
),
},
}
_meta_table['ArpGmp.VrfInfos.VrfInfo']['meta_info'].parent =_meta_table['ArpGmp.VrfInfos']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses.ConfiguredIpAddress']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.Routes.Route']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf.Routes']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp.AssociatedConfigurationEntry']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps.InterfaceConfiguredIp']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.ConfiguredIpAddresses']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.Routes']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf.InterfaceConfiguredIps']['meta_info'].parent =_meta_table['ArpGmp.Vrfs.Vrf']['meta_info']
_meta_table['ArpGmp.Vrfs.Vrf']['meta_info'].parent =_meta_table['ArpGmp.Vrfs']['meta_info']
_meta_table['ArpGmp.VrfInfos']['meta_info'].parent =_meta_table['ArpGmp']['meta_info']
_meta_table['ArpGmp.Vrfs']['meta_info'].parent =_meta_table['ArpGmp']['meta_info']
_meta_table['Arp.Nodes.Node.ResolutionHistoryDynamic.ArpEntry']['meta_info'].parent =_meta_table['Arp.Nodes.Node.ResolutionHistoryDynamic']['meta_info']
_meta_table['Arp.Nodes.Node.TrafficVrfs.TrafficVrf']['meta_info'].parent =_meta_table['Arp.Nodes.Node.TrafficVrfs']['meta_info']
_meta_table['Arp.Nodes.Node.ResolutionHistoryClient.ArpEntry']['meta_info'].parent =_meta_table['Arp.Nodes.Node.ResolutionHistoryClient']['meta_info']
_meta_table['Arp.Nodes.Node.Entries.Entry']['meta_info'].parent =_meta_table['Arp.Nodes.Node.Entries']['meta_info']
_meta_table['Arp.Nodes.Node.TrafficInterfaces.TrafficInterface']['meta_info'].parent =_meta_table['Arp.Nodes.Node.TrafficInterfaces']['meta_info']
_meta_table['Arp.Nodes.Node.ResolutionHistoryDynamic']['meta_info'].parent =_meta_table['Arp.Nodes.Node']['meta_info']
_meta_table['Arp.Nodes.Node.TrafficVrfs']['meta_info'].parent =_meta_table['Arp.Nodes.Node']['meta_info']
_meta_table['Arp.Nodes.Node.TrafficNode']['meta_info'].parent =_meta_table['Arp.Nodes.Node']['meta_info']
_meta_table['Arp.Nodes.Node.ResolutionHistoryClient']['meta_info'].parent =_meta_table['Arp.Nodes.Node']['meta_info']
_meta_table['Arp.Nodes.Node.Entries']['meta_info'].parent =_meta_table['Arp.Nodes.Node']['meta_info']
_meta_table['Arp.Nodes.Node.TrafficInterfaces']['meta_info'].parent =_meta_table['Arp.Nodes.Node']['meta_info']
_meta_table['Arp.Nodes.Node']['meta_info'].parent =_meta_table['Arp.Nodes']['meta_info']
_meta_table['Arp.Nodes']['meta_info'].parent =_meta_table['Arp']['meta_info']
| 111pontes/ydk-py | cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ipv4_arp_oper.py | Python | apache-2.0 | 73,087 |
# Name: nansat_mapper_merisL1
# Purpose: Mapping for Meris-L1 data
# Authors: Anton Korosov
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
from vrt import VRT
from envisat import Envisat
class Mapper(VRT, Envisat):
''' VRT with mapping of WKV for MERIS Level 1 (FR or RR) '''
def __init__(self, fileName, gdalDataset, gdalMetadata,
geolocation=False, zoomSize=500, step=1, **kwargs):
''' Create MER1 VRT
Parameters
-----------
fileName : string
gdalDataset : gdal dataset
gdalMetadata : gdal metadata
geolocation : bool (default is False)
if True, add gdal geolocation
zoomSize: int (used in envisat.py)
size, to which the ADS array will be zoomed using scipy
array of this size will be stored in memory
step: int (used in envisat.py)
step of pixel and line in GeolocationArrays. lat/lon grids are
generated at that step
'''
# get ENVISAT MPH_PRODUCT
product = gdalMetadata.get("MPH_PRODUCT")
if product[0:9] != "MER_FRS_1" and product[0:9] != "MER_RR__1":
raise AttributeError("MERIS_L1 BAD MAPPER")
# init ADS parameters
Envisat.__init__(self, fileName, product[0:4])
metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand': 1},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '413'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 2},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '443'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 3},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '490'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 4},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '510'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 5},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '560'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 6},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '620'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 7},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '665'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 8},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '681'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 9},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '709'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 10},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '753'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 11},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '761'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 12},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '778'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 13},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '864'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 14},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '849'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 15},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '900'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 16,
'DataType': 1},
'dst': {'wkv': 'quality_flags', 'suffix': 'l1'}}
]
# add DataType into 'src' and suffix into 'dst'
for bandDict in metaDict:
if 'DataType' not in bandDict['src']:
bandDict['src']['DataType'] = 2 # default for meris L1 DataType UInt16
if 'wavelength' in bandDict['dst']:
bandDict['dst']['suffix'] = bandDict['dst']['wavelength']
# get scaling GADS from header
scales = self.read_scaling_gads(range(7, 22))
# set scale factor to the band metadata (only radiances)
for i, bandDict in enumerate(metaDict[:-1]):
bandDict['src']['ScaleRatio'] = str(scales[i])
# get list with resized VRTs from ADS
self.adsVRTs = self.get_ads_vrts(gdalDataset,
['sun zenith angles',
'sun azimuth angles',
'zonal winds',
'meridional winds'],
zoomSize=zoomSize,
step=step)
# add bands from the ADS VRTs
for adsVRT in self.adsVRTs:
metaDict.append({'src': {'SourceFilename': adsVRT.fileName,
'SourceBand': 1},
'dst': {'name': (adsVRT.dataset.GetRasterBand(1).
GetMetadataItem('name')),
'units': (adsVRT.dataset.GetRasterBand(1).
GetMetadataItem('units'))}
})
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# set time
self._set_envisat_time(gdalMetadata)
# add geolocation arrays
if geolocation:
self.add_geolocation_from_ads(gdalDataset,
zoomSize=zoomSize, step=step)
| yuxiaobu/nansat | mappers/mapper_meris_l1.py | Python | gpl-3.0 | 6,729 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009, 2010, 2015, 2017-2018 Rocky Bernstein
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import inspect
import pyficache
from trepan import misc as Mmisc
from trepan.processor.parse.semantics import build_bp_expr
from trepan.processor.parse.parser import LocationError
from trepan.processor.parse.scanner import ScannerError
from trepan.processor.location import resolve_location
def set_break(cmd_obj, func, filename, lineno, condition, temporary,
args, force=False):
if lineno is None:
part1 = ("I don't understand '%s' as a line number, function name,"
% ' '.join(args[1:]))
msg = Mmisc.wrapped_lines(part1, "or file/module plus line number.",
cmd_obj.settings['width'])
cmd_obj.errmsg(msg)
return False
if filename is None:
filename = cmd_obj.proc.curframe.f_code.co_filename
filename = cmd_obj.core.canonic(filename)
pass
if func is None:
ok_linenos = pyficache.trace_line_numbers(filename)
if not ok_linenos or lineno not in ok_linenos:
part1 = ('File %s' % cmd_obj.core.filename(filename))
msg = Mmisc.wrapped_lines(part1,
"is not stoppable at line %d." %
lineno, cmd_obj.settings['width'])
cmd_obj.errmsg(msg)
if force:
cmd_obj.msg("Breakpoint set although it may never be reached")
else:
return False
pass
bp = cmd_obj.core.bpmgr.add_breakpoint(filename, lineno, temporary,
condition, func)
if func and inspect.isfunction(func):
cmd_obj.msg('Breakpoint %d set on calling function %s()'
% (bp.number, func.func_name))
part1 = 'Currently this is line %d of file' % lineno
msg = Mmisc.wrapped_lines(part1, cmd_obj.core.filename(filename),
cmd_obj.settings['width'])
else:
part1 = ( 'Breakpoint %d set at line %d of file'
% (bp.number, lineno))
msg = Mmisc.wrapped_lines(part1, cmd_obj.core.filename(filename),
cmd_obj.settings['width'])
pass
cmd_obj.msg(msg)
return True
INVALID_PARSE_BREAK = (None, None, None, None)
def parse_break_cmd(proc, args):
if proc.current_command is None:
proc.errmsg("Internal error")
return INVALID_PARSE_BREAK
text = proc.current_command[len(args[0])+1:]
if len(args) > 1 and args[1] == 'if':
location = '.'
condition = text[text.find('if ')+3:]
elif text == '':
location = '.'
condition = None
else:
try:
bp_expr = build_bp_expr(text)
except LocationError as e:
proc.errmsg("Error in parsing breakpoint expression at or around:")
proc.errmsg(e.text)
proc.errmsg(e.text_cursor)
return INVALID_PARSE_BREAK
except ScannerError as e:
proc.errmsg("Lexical error in parsing breakpoint expression at or around:")
proc.errmsg(e.text)
proc.errmsg(e.text_cursor)
return INVALID_PARSE_BREAK
location = bp_expr.location
condition = bp_expr.condition
location = resolve_location(proc, location)
if location:
return location.method, location.path, location.line_number, condition
else:
return INVALID_PARSE_BREAK
# Demo it
if __name__=='__main__':
from trepan.processor.command import mock as Mmock
from trepan.processor.cmdproc import CommandProcessor
import sys
d = Mmock.MockDebugger()
cmdproc = CommandProcessor(d.core)
# print '-' * 10
# print_source_line(sys.stdout.write, 100, 'source_line_test.py')
# print '-' * 10
cmdproc.frame = sys._getframe()
cmdproc.setup()
for cmd in (
# "break '''c:\\tmp\\foo.bat''':1",
# 'break """/Users/My Documents/foo.py""":2',
# "break",
# "break 10",
# "break if True",
# "break cmdproc.py:5",
# "break set_break()",
"break 4 if i == 5",
# "break cmdproc.setup()",
):
args = cmd.split(' ')
cmdproc.current_command = cmd
print(parse_break_cmd(cmdproc, args))
pass
| rocky/python2-trepan | trepan/processor/cmdbreak.py | Python | gpl-3.0 | 5,057 |
#
# Copyright 2012 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Sphinx extension with custom stuff for Translate Toolkit docs."""
import docutils
def setup(app):
# :opt: to mark options -P --pot and options values --progress=dots
app.add_generic_role(name="opt", nodeclass=docutils.nodes.literal)
return {"parallel_read_safe": True}
| translate/translate | docs/_ext/translate_docs.py | Python | gpl-2.0 | 1,016 |
import shutil
import tempfile
from contextlib import contextmanager
import gnupg
from django.apps import AppConfig
from django.conf import settings
from darkknight.signals import key_created
from .models import EncryptedPrivateKey
class DarkKnightGpgConfig(AppConfig):
name = 'darkknight_gpg'
def ready(self):
key_created.connect(gpg_encrypt)
@contextmanager
def tmp_gpg_context():
try:
tmpdir = tempfile.mkdtemp()
yield gnupg.GPG(homedir=tmpdir)
finally:
shutil.rmtree(tmpdir)
def gpg_encrypt(sender, instance, private_key, **kwargs):
with open(settings.GPG_PUBLIC_KEY_PATH) as f:
public_key = f.read()
with tmp_gpg_context() as gpg:
import_result = gpg.import_keys(public_key)
assert import_result.counts['count'] >= 1, import_result.stderr
encryption_result = gpg.encrypt(private_key, *import_result.fingerprints)
assert encryption_result.ok, encryption_result.stderr
EncryptedPrivateKey.objects.create(
key=instance,
encrypted_private_key=str(encryption_result),
)
| fusionbox/django-darkknight | darkknight_gpg/apps.py | Python | bsd-2-clause | 1,101 |
# Copyright 2021 Alfredo de la Fuente - AvanzOSC
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import models, fields
class EventTrack(models.Model):
_inherit = 'event.track'
shared_price_event = fields.Boolean(
string='Shared price event', related='event_id.shared_price_event',
store=True)
def _catch_values_for_create_analytic_line(self, partner):
values = super(
EventTrack, self)._catch_values_for_create_analytic_line(
partner)
if not self.event_id.shared_price_event:
return values
date = self.date.date()
registrations = self.event_id.registration_ids.filtered(
lambda x: x.student_id and x.real_date_start and
date >= x.real_date_start and
(not x.real_date_end or
(x.real_date_end and date <= x.real_date_end)))
tasks = self.env['project.task']
for registration in registrations.filtered(lambda x: x.task_id):
if registration.task_id not in tasks:
tasks += registration.task_id
for task in tasks:
task_registrations = registrations.filtered(
lambda x: x.task_id == task)
values['task_id'] = task.id
if task.sale_line_id:
values.update(
{'product_id': task.sale_line_id.product_id.id,
'product_uom_id':
task.sale_line_id.product_id.uom_id.id})
if not task.sale_line_id and task.project_id.timesheet_product_id:
values.update(
{'product_id': task.project_id.timesheet_product_i.id,
'product_uom_id':
task.project_id.timesheet_product_i.uom_id.id})
amount = ((self.duration / len(registrations)) *
len(task_registrations))
values['unit_amount'] = amount
self.env['account.analytic.line'].create(values)
return {}
| avanzosc/odoo-addons | event_price_shared/models/event_track.py | Python | agpl-3.0 | 2,043 |
import copy
import json
import subprocess
from os import system
import pyrebase
import requests
import collections
from Firefly import aliases, logging, scheduler
from Firefly.const import API_ALEXA_VIEW, API_FIREBASE_VIEW, FIREFLY_SECURITY_MONITORING, SOURCE_LOCATION, SOURCE_TIME, TYPE_DEVICE, TYPE_ROUTINE
from Firefly.core.service_handler import ServiceConfig, ServicePackage
from Firefly.helpers.metadata import EXPORT_UI, FF_ID, HIDDEN_BY_USER, PRIMARY_ACTION
from Firefly.helpers.service import Command, Request, Service
from Firefly.services.alexa.alexa import process_alexa_request
from Firefly.services.api_ai import apiai_command_reply
from Firefly.services.firebase.event_logging import EventLogger
FIREBASE_LOCATION_STATUS_PATH = 'locationStatus'
FIREBASE_DEVICE_VIEWS = 'deviceViews'
FIREBASE_DEVICE_STATUS = 'deviceStatus'
FIREBASE_DEVICE_SETTINGS_PATH = 'deviceSettings'
FIREBASE_HOME_STATUS = 'homeStatus'
FIREBASE_COMMAND_REPLY = 'commandReply'
FIREBASE_ALIASES = 'aliases'
ALEXA_CUSTOM_SKILL_ID = 'firefly-alexa'
# This is the action when status messages are updated
STATUS_MESSAGE_UPDATED = {
'status_message': 'updated'
}
def internet_up():
return system("ping -c 1 8.8.8.8") == 0
TITLE = 'Firebase Service for Firefly'
AUTHOR = 'Zachary Priddy [email protected]'
SERVICE_ID = 'service_firebase'
COMMANDS = ['push', 'refresh', 'get_api_id']
REQUESTS = []
SECTION = 'FIREBASE'
MAX_EVENTS = 1000
# TODO: Setup function should get the config from the service config file. If the
# required params are not in the config file then it should log and error message
# and abort install
# TODO: push this data to location weather info.. this could be useful
def Setup(firefly, package, alias, ff_id, service_package: ServicePackage, config: ServiceConfig, **kwargs):
logging.info('Installing Firebase Service')
firebase = Firebase(firefly, alias, ff_id, service_package, config, **kwargs)
firefly.install_component(firebase)
return True
class Firebase(Service):
def __init__(self, firefly, alias, ff_id, service_package: ServicePackage, config: ServiceConfig, **kwargs):
# TODO: Fix this
package = service_package.package
super().__init__(firefly, SERVICE_ID, package, TITLE, AUTHOR, COMMANDS, REQUESTS)
logging.info('[FIREBASE] setting up firebase')
self.service_config = config
self.api_key = config.api_key
self.auth_domain = config.auth_domain
self.database_url = config.database_url
self.storage_bucket = config.storage_bucket
self.email = config.email
self.password = config.password
self.facebook = config.facebook
self.home_id = config.home_id
# Create the event logger
self.event_logger = EventLogger(self)
# Event history will hold the last 1000 events and overwrite existing events when buffer is full
self.event_history = collections.deque(maxlen=MAX_EVENTS)
self.events_since_clear = 0
self.add_command('push', self.push)
self.add_command('refresh', self.refresh_all)
self.add_command('get_api_id', self.get_api_id)
self.config = {
"apiKey": self.api_key,
"authDomain": self.auth_domain,
"databaseURL": self.database_url,
"storageBucket": self.storage_bucket
}
logging.info('[FIREBASE] logging into firebase')
self.firebase = pyrebase.initialize_app(self.config)
# Get a reference to the auth service
self.auth = self.firebase.auth()
# Log the user in
self.user = self.auth.sign_in_with_email_and_password(self.email, self.password)
self.uid = self.user['localId']
self.id_token = self.user['idToken']
# Get a reference to the database service
self.db = self.firebase.database()
if self.home_id is None:
self.register_home()
scheduler.runEveryM(30, self.refresh_user)
scheduler.runEveryM(20, self.refresh_all)
scheduler.runInS(30, self.refresh_all)
logging.info('[FIREBASE] starting stream')
self.stream = self.db.child('homeStatus').child(self.home_id).child('commands').stream(self.command_stream_handler, self.id_token)
self.commandReplyStream = self.db.child('homeStatus').child(self.home_id).child('commandReply').stream(self.command_reply, self.id_token)
logging.info('[FIREBASE] stream started')
def register_home(self):
logging.info('Registering Home On Firebase!!!!')
register_url = 'https://us-central1-firefly-beta-cdb9d.cloudfunctions.net/registerHome'
return_data = requests.post(register_url, data={
'uid': self.uid
}).json()
self.home_id = return_data.get('home_id')
if self.home_id is None:
logging.notify('error registering home')
return
self.service_config.home_id = self.home_id
self.service_config.save()
logging.info('Config file for firebase has been updated.')
def process_settings(self, message, **kwargs):
logging.info('[FIREBASE] PROCESSING SETTINGS: %s' % str(message))
if message.get('notification', {}).get('facebook') is not None:
enable = bool(message.get('notification', {}).get('facebook'))
self.set_facebook_settings(enable)
def set_facebook_settings(self, enable, **kwargs):
self.facebook = enable
logging.info('[FIREBASE] Enabling/Disabling Facebook. %s' % str(enable))
self.service_config.facebook = enable
self.service_config.save()
if enable:
self.send_facebook_notification("Facebook notifications for firefly are now enabled.")
else:
self.send_facebook_notification("Facebook notifications for firefly are now disabled.")
logging.info('Config file for hue has been updated.')
def refresh_stream(self):
if not internet_up():
logging.error('[FIREBASE REFRESH STREAM] Internet is down')
scheduler.runInM(1, self.refresh_stream, 'firebase_internet_down_refresh_stream')
return
self.stream.close()
self.stream = self.db.child('homeStatus').child(self.home_id).child('commands').stream(self.command_stream_handler, self.id_token)
self.commandReplyStream.close()
self.commandReplyStream = self.db.child('homeStatus').child(self.home_id).child('commandReply').stream(self.command_reply, self.id_token)
def command_reply(self, message):
data = message['data']
# Take path and split it. we are only going to process top level paths. This should be the clientID.
raw_path = message['path']
path_list = raw_path[1:].split('/')
path_depth = len(path_list)
if path_depth > 1 or data is None:
logging.debug('[FIREBASE COMMAND REPLY] message was updated or deleted')
return
path = path_list[0]
client_id = path
logging.debug('[FIREBASE COMMAND REPLY] processing for client: %s' % client_id)
response = {}
if path == ALEXA_CUSTOM_SKILL_ID:
alexa_request = data['service_alexa']
response = process_alexa_request(self.firefly, alexa_request)
if response:
logging.debug('[FIREBASE COMMAND REPLY] sending response : %s' % str(response))
self.db.child(FIREBASE_HOME_STATUS).child(self.home_id).child(FIREBASE_COMMAND_REPLY).child(client_id).child('reply').set(response, self.id_token)
return
# TODO: Remove this after upgrading all cloud functions
if not message['data']:
return
if message['data'].get('reply') is not None or message.get('reply') is not None:
return
key = message['path'][1:]
if 'reply' in key or 'speech' in key:
return
try:
reply = apiai_command_reply(self.firefly, message['data'])
self.db.child('homeStatus').child(self.home_id).child('commandReply').child(key).child('reply').set(reply, self.id_token)
except Exception as e:
print(str(e))
def firebase_send_command(self, ff_id, command):
''' process and send command from firebase commands
Args:
ff_id: device to send command to
command: command to be sent
Returns:
'''
logging.info('[FIREBASE SEND COMMAND] : %s:%s' % (str(ff_id), str(command)))
# Location is a special case.
if ff_id == 'location':
if type(command) is not dict:
return
for command_string, command_args in command.items():
send_command = Command(ff_id, 'web_api', command_string, **command_args)
self.firefly.location.process_command(send_command)
return
if ff_id == 'settings':
self.process_settings(command)
return
if ff_id == 'system' and command == 'restart':
subprocess.run(['bash /opt/firefly_system/Firefly/system_files/restart_firefly.sh'], shell=True)
if type(command) is str:
send_command = Command(ff_id, 'web_api', command)
logging.info('FIREBASE SENDING COMMAND: %s ' % str(send_command))
self.firefly.send_command(send_command)
if command == 'delete':
scheduler.runInS(10, self.update_device_views, job_id='firebase_refresh')
return
# TODO Handle install package command
# if list(command.keys())[0] == 'install_package':
# self.firefly.install_package(**dict(list(command.values())[0]))
if type(command) is dict:
for command_string, command_args in command.items():
send_command = Command(ff_id, 'web_api', command_string, **command_args)
logging.info('FIREBASE SENDING COMMAND: %s ' % str(send_command))
self.firefly.send_command(send_command)
return
def command_stream_handler(self, message):
''' Handle commands sent from the ui
Args:
message: message from command stream
Returns:
'''
try:
logging.message('FIREBASE MESSAGE: %s ' % str(message))
# Return if no data
if message['data'] is None:
return
if message['path'] == '/':
for ff_id, command in message['data'].items():
self.firebase_send_command(ff_id, command)
self.db.child('homeStatus').child(self.home_id).child('commands').child(ff_id).remove(self.id_token)
else:
ff_id = message['path'][1:]
command = message['data']
self.firebase_send_command(ff_id, command)
self.db.child('homeStatus').child(self.home_id).child('commands').child(ff_id).remove(self.id_token)
except Exception as e:
logging.error('Firebase Stream Error: %s' % str(e))
def refresh_all(self, **kwargs):
# Hard-coded refresh all device values
# TODO use core api for this.
all_values = {}
for ff_id, device in self.firefly.components.items():
try:
all_values[ff_id] = device.get_all_request_values(True)
except:
pass
# Nasty json sanitation
all_values = scrub(all_values)
all_values = json.dumps(all_values)
all_values = all_values.replace('null', '')
all_values = all_values.replace('#', '')
all_values = all_values.replace('$', '')
all_values = all_values.replace('/', '_-_')
all_values = json.loads(all_values)
try:
alexa_views = self.get_all_alexa_views('firebase')
routines = self.get_routines()
# TODO(zpriddy): Remove old views when new UI is done
# self.db.child("userAlexa").child(self.uid).child("devices").set(alexa_views, self.id_token)
# self.db.child("homeStatus").child(self.home_id).child('devices').update(all_values, self.id_token)
self.db.child("homeStatus").child(self.home_id).child('routines').set(routines['config'], self.id_token)
# End of old views
routine_view = {}
for r in routines['view']:
routine_view[r.get('ff_id')] = r
routine_config = {}
for r in routines['config']:
routine_config[r.get('ff_id')] = r
# Update all devices statuses
self.update_all_device_status(overwrite=True)
# Update device settings
self.update_device_settings()
# This is the new location of routine views [/homeStatus/{homeId}/routineViews]
self.db.child("homeStatus").child(self.home_id).child('routineViews').set(routine_view, self.id_token)
self.db.child("homeStatus").child(self.home_id).child('routineConfigs').set(routine_config, self.id_token)
# This is the new location of location status [/homeStatus/{homeId}/locationStatus]
self.update_location_status(overwrite=True, update_metadata_timestamp=False)
# This is the new location of alexa api data [/homeStatus/{homeId}/alexaAPIView]
self.db.child("homeStatus").child(self.home_id).child('alexaAPIViews').set(alexa_views, self.id_token)
groups = {}
groups_state = {}
for ff_id, group in self.firefly.components.items():
if group.type != 'GROUP':
continue
groups[ff_id] = group.get_metadata()
groups_state[ff_id] = group.get_all_request_values(True)
self.db.child("homeStatus").child(self.home_id).child('groupViews').set(groups, self.id_token)
self.db.child("homeStatus").child(self.home_id).child('groupStatus').set(groups_state, self.id_token)
self.update_device_views()
except Exception as e:
logging.notify("Firebase 271: %s" % str(e))
def update_device_settings(self):
logging.info('[FIREBASE] updating device settings')
device_settings = {}
for ff_id, device in self.firefly.components.items():
try:
if device.type != TYPE_DEVICE:
continue
device_settings[ff_id] = device.get_settings_view()
except:
pass
self.set_home_status(FIREBASE_DEVICE_SETTINGS_PATH, device_settings)
def update_last_metadata_timestamp(self):
''' Update the lastMetadataUpdate timestamp
Returns:
'''
self.set_home_status('locationStatus/lastMetadataUpdate', self.firefly.location.now.timestamp())
def set_home_status(self, path, data, retry=True, **kwargs):
''' Function to set homeStatus in firebase
Args:
path: path from homeStatus/{homeID}/ that will be set.
data: data that will be set.
Returns:
'''
try:
self.db.child("homeStatus").child(self.home_id).child(path).set(data, self.id_token)
return True
except Exception as e:
if not retry:
return False
logging.error('[FIREBASE SET HOME STATUS] ERROR: %s' % str(e))
self.refresh_user()
return self.set_home_status(path, data, False)
def update_home_status(self, path, data, retry=True, **kwargs):
''' Function to update homeStatus in firebase
Args:
path: path from homeStatus/{homeID}/ that will be updateed.
data: data that will be updateed.
Returns:
'''
try:
self.db.child("homeStatus").child(self.home_id).child(path).update(data, self.id_token)
return True
except Exception as e:
if not retry:
return False
logging.error('[FIREBASE UPDATE HOME STATUS] ERROR: %s' % str(e))
self.refresh_user()
return self.update_home_status(path, data, False)
def update_location_status(self, overwrite=False, update_metadata_timestamp=False, update_status_message=False, **kwargs):
''' update the location status in firebase.
Args:
overwrite: if true calls set instead of update.
update_metadata_timestamp: also update metadata timestamp. When calling set without updating the timestamp the timestamp will be removed.
update_status_message: clear all status messages and inset current status messages
**kwargs:
Returns:
'''
location_status = self.get_location_status()
if overwrite:
self.set_home_status(FIREBASE_LOCATION_STATUS_PATH, location_status)
if update_metadata_timestamp:
self.update_last_metadata_timestamp()
return
if update_status_message:
self.set_home_status('%s/statusMessages' % FIREBASE_LOCATION_STATUS_PATH, {})
self.update_home_status(FIREBASE_LOCATION_STATUS_PATH, location_status)
def update_security_status(self, status):
self.update_home_status('%s/security' % FIREBASE_LOCATION_STATUS_PATH, status)
def update_device_min_views(self, device_views, **kwargs):
device_min_view = {}
for ff_id, device_view in device_views.items():
try:
primary_action = device_view['metadata']['primary']
device_min_view[ff_id] = {
FF_ID: device_view [FF_ID],
'alias': device_view ['alias'],
# TODO: Update this to hidden_by_user or hidden_by_firefly when ready.
EXPORT_UI: device_view [EXPORT_UI],
HIDDEN_BY_USER: device_view[EXPORT_UI],
PRIMARY_ACTION: {
primary_action: device_view['metadata']['actions'][primary_action]
}
}
except Exception as e:
logging.error('[FIREBASE DEVICE MIN VIEW] error: %s' % str(e))
logging.debug('[FIREBASE DEVICE MIN VIEW] setting min view: %s' % str(device_min_view))
self.set_home_status('deviceMinView', device_min_view)
def update_device_views(self, **kwargs):
''' Update device views metadata for all devices
Args:
**kwargs:
Returns:
'''
logging.info('[FIREBASE DEVICE VIEW UPDATE] updating all device views')
device_views = {}
devices = self.get_all_component_views('firebase_refresh', filter=TYPE_DEVICE)
for device in devices:
device_views[device.get(FF_ID, 'unknown')] = device
self.set_home_status(FIREBASE_DEVICE_VIEWS, device_views)
self.update_device_min_views(device_views)
# TODO: Remove this
check_all_keys(device_views)
self.set_home_status('devices', device_views)
self.update_aliases()
self.update_last_metadata_timestamp()
def update_all_device_status(self, overwrite=False, **kwargs):
# TODO use core api for this.
all_values = {}
for ff_id, device in self.firefly.components.items():
try:
all_values[ff_id] = device.get_all_request_values(True)
except:
pass
for device, device_view in all_values.items():
try:
if 'PARAMS' in device_view.keys():
device_view.pop('PARAMS')
if 'RAW_VALUES' in device_view.keys():
device_view.pop('RAW_VALUES')
if 'SENSORS' in device_view.keys():
device_view.pop('SENSORS')
if 'ZWAVE_VALUES' in device_view.keys():
device_view.pop('ZWAVE_VALUES')
except:
pass
# TODO Remove this
check_all_keys(all_values)
# self.update_home_status('devices', all_values)
if overwrite:
self.set_home_status(FIREBASE_DEVICE_STATUS, all_values)
return
self.update_home_status(FIREBASE_DEVICE_STATUS, all_values)
def update_device_status(self, ff_id, action: dict, **kwargs):
''' Update a single device status
Args:
ff_id: ff_id of the device to update
action: the action data to update
**kwargs:
Returns:
'''
# TODO(zpriddy): Find a better way to do this
if 'PARAMS' in action.keys():
return
if 'RAW_VALUES' in action.keys():
return
if 'SENSORS' in action.keys():
return
if 'ZWAVE_VALUES' in action.keys():
return
path = '%s/%s' % (FIREBASE_DEVICE_STATUS, ff_id)
self.update_home_status(path, action)
def update_aliases(self, **kwargs):
''' update all device aliases from firefly.
Args:
**kwargs:
Returns:
'''
self.set_home_status(FIREBASE_ALIASES, aliases.aliases)
def get_routines(self):
routines = {
'view': [],
'config': []
}
for ff_id, d in self.firefly.components.items():
logging.info('[FIREBASE]: getting routine view for: %s-%s' % (ff_id, d.type))
if d.type == TYPE_ROUTINE:
logging.info('[FIREBASE]: getting routine view for (2): %s' % ff_id)
routines['view'].append(d.export(firebase_view=True))
routines['config'].append(d.export())
return routines
def get_component_view(self, ff_id, source):
device_request = Request(ff_id, source, API_FIREBASE_VIEW)
data = self.firefly.components[device_request.ff_id].request(device_request)
return data
def get_component_alexa_view(self, ff_id, source):
logging.info('[FIREBASE] getting alexa view for %s' % ff_id)
device_request = Request(ff_id, source, API_ALEXA_VIEW)
data = self.firefly.components[device_request.ff_id].request(device_request)
return data
def get_all_alexa_views(self, source, filter=[TYPE_DEVICE, TYPE_ROUTINE]):
if type(filter) is str:
filter = [filter]
views = []
for ff_id, device in self.firefly.components.items():
if device.type in filter or filter is None:
data = self.get_component_alexa_view(ff_id, source)
if data is not None and len(data.get('capabilities')) > 0:
views.append(data)
return views
def get_all_component_views(self, source, filter=None):
if type(filter) is str:
filter = [filter]
views = []
for ff_id, device in self.firefly.components.items():
if device.type in filter or filter is None:
data = self.get_component_view(ff_id, source)
views.append(data)
return views
def get_location_status(self, **kwargs):
"""
Get the location status.
Args:
**kwargs:
Returns: dict of location status
"""
now = self.firefly.location.now
return_data = {
'security': {},
'time': {
'epoch': now.timestamp(),
'day': now.day,
'month': now.month,
'year': now.year,
'hour': now.hour,
'minute': now.minute,
'str': str(now),
'timeZone': self.firefly.location.geolocation.timezone
},
'location': {
'lat': self.firefly.location.latitude,
'lon': self.firefly.location.longitude,
'address': self.firefly.location.address
},
'isDark': self.firefly.location.isDark,
'mode': self.firefly.location.mode,
'lastMode': self.firefly.location.lastMode,
'statusMessages': self.firefly.location.status_messages,
'modes': self.firefly.location.modes
}
try:
return_data['security']['status'] = self.firefly.security_and_monitoring.get_alarm_status()
except:
pass
return return_data
def refresh_user(self):
''' Refresh user token and auth
Returns:
'''
logging.info('[FIREBASE] REFRESHING USER')
if not internet_up():
logging.error('[FIREBASE REFRESH] Internet seems to be down')
scheduler.runInM(1, self.refresh_user, 'refresh_user_internet_down')
return
try:
try:
self.stream.close()
self.commandReplyStream.close()
except:
pass
self.user = self.auth.sign_in_with_email_and_password(self.email, self.password)
self.id_token = self.user['idToken']
self.stream = self.db.child('homeStatus').child(self.home_id).child('commands').stream(self.command_stream_handler, self.id_token)
self.commandReplyStream = self.db.child('homeStatus').child(self.home_id).child('commandReply').stream(self.command_reply, self.id_token)
except Exception as e:
logging.info("Firebase 266: %s" % str(e))
scheduler.runInH(1, self.refresh_user, 'firebase_refresh_user')
pass
def security_update(self, security_status):
logging.info('Updating Firebase security status')
self.set_home_status("securityStatus", security_status)
def push(self, source, action, retry=True):
logging.info('[FIREBASE PUSH] Pushing Data: %s: %s' % (str(source), str(action)))
try:
# Update time events
if source == SOURCE_TIME:
self.update_location_status()
return
# Update location events
if source == SOURCE_LOCATION:
update_status_message = action == STATUS_MESSAGE_UPDATED
self.update_location_status(update_status_message=update_status_message)
self.send_event(source, action)
return
if source == FIREFLY_SECURITY_MONITORING:
self.update_security_status(action)
self.send_event(source, action)
return
if source not in self.firefly.components:
logging.error('[FIREBASE PUSH] ERROR: Source not in firefly components.')
return
if self.firefly.components[source].type == 'GROUP':
self.db.child("homeStatus").child(self.home_id).child('groupStatus').child(source).update(action, self.id_token)
self.send_event(source, action)
return
self.update_device_status(source, action)
# TODO(zpriddy): Remove this when new UI is done.
if 'PARAMS' in action.keys():
return
if 'RAW_VALUES' in action.keys():
return
if 'SENSORS' in action.keys():
return
if 'ZWAVE_VALUES' in action.keys():
return
self.db.child("homeStatus").child(self.home_id).child('devices').child(source).update(action, self.id_token)
self.send_event(source, action)
except Exception as e:
logging.info('[FIREBASE PUSH] ERROR: %s' % str(e))
self.refresh_user()
if retry:
self.push(source, action, False)
def send_event(self, source, action):
''' add new event in the event log
Args:
source: ff_id of the device
action: action to enter into event log
Returns:
'''
if 'last_update' in action.keys():
action.pop('last_update')
if 'status_message' in action.keys():
action.pop('status_message')
if not action:
return
now = self.firefly.location.now
now_time = now.strftime("%B %d %Y %I:%M:%S %p")
self.event_logger.event(source, action, now.timestamp())
event_key = '%s-%s' % (str(now.timestamp()).replace('.', ''), source)
event_data = {
'ff_id': source,
'event': action,
'timestamp': now.timestamp(),
'time': now_time
}
if self.events_since_clear > MAX_EVENTS:
last_x_events = {}
self.events_since_clear = 0
for e in self.event_history:
last_x_events[e.get('key')] = e.get('data')
self.db.child("homeStatus").child(self.home_id).child('events').set(last_x_events, self.id_token)
self.db.child("homeStatus").child(self.home_id).child('events').child(event_key).set(event_data, self.id_token)
self.event_history.append({'key':event_key, 'data':event_data})
self.events_since_clear += 1
'''
self.db.child("homeStatus").child(self.home_id).child('events').push({
'ff_id': source,
'event': action,
'timestamp': now.timestamp(),
'time': now_time
}, self.id_token)
'''
def push_notification(self, message, priority, retry=True):
try:
self.send_notification(message, priority)
if self.facebook:
self.send_facebook_notification(message)
except:
self.refresh_user()
if retry:
self.push_notification(message, priority, False)
def send_notification(self, message, priority):
now = self.firefly.location.now
now_time = now.strftime("%B %d %Y %I:%M:%S %p")
self.db.child("homeStatus").child(self.home_id).child('notifications').push({
'message': message,
'priority': priority,
'timestamp': now.timestamp(),
'time': now_time
}, self.id_token)
def send_facebook_notification(self, message, **kwargs):
logging.info("[FIREBASE FACEBOOK] SENDING NOTIFICATION")
self.db.child("homeStatus").child(self.home_id).child("facebookNotifcations").push(message, self.id_token)
def get_api_id(self, **kwargs):
ff_id = kwargs.get('api_ff_id')
callback = kwargs.get('callback')
my_stream = None
if ff_id is None or callback is None:
return False
def stream_api_key(message):
data = message.get('data')
if data is None:
return
api_key = data
if api_key is None:
return
callback(firebase_api_key=api_key)
try:
my_stream.close()
except:
pass
now = self.firefly.location.now.timestamp()
self.db.child("homeStatus").child(self.home_id).child("apiDevices").update({
ff_id: {
'added': now
}
}, self.id_token)
my_stream = self.db.child("homeStatus").child(self.home_id).child("apiDevices").child(ff_id).child('apiKey').stream(stream_api_key, self.id_token)
def upload_log(self, filename, **kwargs):
storage = self.firebase.storage()
remote_file_name = '%s_event_log_%s.json' % (self.home_id, self.firefly.location.now.timestamp())
storage.child(self.home_id).child(remote_file_name).put(filename, self.id_token)
def scrub(x):
# Converts None to empty string
ret = copy.deepcopy(x)
# Handle dictionaries, lits & tuples. Scrub all values
if isinstance(x, dict):
for k, v in ret.items():
ret[k] = scrub(v)
if isinstance(x, (list, tuple)):
if isinstance(x, (tuple)):
logging.notify(str(x))
for k, v in enumerate(ret):
ret[k] = scrub(v)
# Handle None
if x is None:
ret = ''
# Finished scrubbing
return ret
FIREBASE_INVALID_CHARS = ['/', '\\', '$', '#']
def has_invalid_char(string_to_check):
for c in FIREBASE_INVALID_CHARS:
if c in string_to_check:
return True
return False
def check_all_keys(firebase_dict):
for key in firebase_dict:
if has_invalid_char(key):
logging.critical('[FIREBASE CHECK ALL KEYS] ****************** BAD KEY: %s' % key)
if type(firebase_dict[key]) is dict:
check_all_keys(firebase_dict[key])
| Firefly-Automation/Firefly | Firefly/services/firebase/firebase.py | Python | apache-2.0 | 29,353 |
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common.utils import data_utils
from tempest import config
from tempest.openstack.common import log as logging
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestSwiftBasicOps(manager.OfficialClientTest):
"""
Test swift with the follow operations:
* get swift stat.
* create container.
* upload a file to the created container.
* list container's objects and assure that the uploaded file is present.
* delete object from container.
* list container's objects and assure that the deleted file is gone.
* delete a container.
* list containers and assure that the deleted container is gone.
"""
@classmethod
def setUpClass(cls):
cls.set_network_resources()
super(TestSwiftBasicOps, cls).setUpClass()
if not CONF.service_available.swift:
skip_msg = ("%s skipped as swift is not available" %
cls.__name__)
raise cls.skipException(skip_msg)
def _get_swift_stat(self):
"""get swift status for our user account."""
self.object_storage_client.get_account()
LOG.debug('Swift status information obtained successfully')
def _create_container(self, container_name=None):
name = container_name or data_utils.rand_name(
'swift-scenario-container')
self.object_storage_client.put_container(name)
# look for the container to assure it is created
self._list_and_check_container_objects(name)
LOG.debug('Container %s created' % (name))
return name
def _delete_container(self, container_name):
self.object_storage_client.delete_container(container_name)
LOG.debug('Container %s deleted' % (container_name))
def _upload_object_to_container(self, container_name, obj_name=None):
obj_name = obj_name or data_utils.rand_name('swift-scenario-object')
self.object_storage_client.put_object(container_name, obj_name,
data_utils.rand_name('obj_data'),
content_type='text/plain')
return obj_name
def _delete_object(self, container_name, filename):
self.object_storage_client.delete_object(container_name, filename)
self._list_and_check_container_objects(container_name,
not_present_obj=[filename])
def _list_and_check_container_objects(self, container_name, present_obj=[],
not_present_obj=[]):
"""
List objects for a given container and assert which are present and
which are not.
"""
meta, response = self.object_storage_client.get_container(
container_name)
# create a list with file name only
object_list = [obj['name'] for obj in response]
if present_obj:
for obj in present_obj:
self.assertIn(obj, object_list)
if not_present_obj:
for obj in not_present_obj:
self.assertNotIn(obj, object_list)
@test.services('object_storage')
def test_swift_basic_ops(self):
self._get_swift_stat()
container_name = self._create_container()
obj_name = self._upload_object_to_container(container_name)
self._list_and_check_container_objects(container_name, [obj_name])
self._delete_object(container_name, obj_name)
self._delete_container(container_name)
| vedujoshi/os_tempest | tempest/scenario/test_swift_basic_ops.py | Python | apache-2.0 | 4,211 |
# -*- coding: utf-8 -*-
"""
sphinx.writers.latex
~~~~~~~~~~~~~~~~~~~~
Custom docutils writer for LaTeX.
Much of this code is adapted from Dave Kuhlman's "docpy" writer from his
docutils sandbox.
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
from os import path
from docutils import nodes, writers
from docutils.writers.latex2e import Babel
from sphinx import addnodes
from sphinx import highlighting
from sphinx.errors import SphinxError
from sphinx.locale import admonitionlabels, _
from sphinx.util import split_into
from sphinx.util.osutil import ustrftime
from sphinx.util.pycompat import any
from sphinx.util.texescape import tex_escape_map, tex_replace_map
from sphinx.util.smartypants import educate_quotes_latex
HEADER = r'''%% Generated by Sphinx.
\def\sphinxdocclass{%(docclass)s}
\documentclass[%(papersize)s,%(pointsize)s%(classoptions)s]{%(wrapperclass)s}
%(inputenc)s
%(utf8extra)s
%(cmappkg)s
%(fontenc)s
%(babel)s
%(fontpkg)s
%(fncychap)s
%(longtable)s
\usepackage{sphinx}
\usepackage{multirow}
%(preamble)s
\title{%(title)s}
\date{%(date)s}
\release{%(release)s}
\author{%(author)s}
\newcommand{\sphinxlogo}{%(logo)s}
\renewcommand{\releasename}{%(releasename)s}
%(makeindex)s
'''
BEGIN_DOC = r'''
\begin{document}
%(shorthandoff)s
%(maketitle)s
%(tableofcontents)s
'''
FOOTER = r'''
\renewcommand{\indexname}{%(indexname)s}
%(printindex)s
\end{document}
'''
class collected_footnote(nodes.footnote):
"""Footnotes that are collected are assigned this class."""
class UnsupportedError(SphinxError):
category = 'Markup is unsupported in LaTeX'
class LaTeXWriter(writers.Writer):
supported = ('sphinxlatex',)
settings_spec = ('LaTeX writer options', '', (
('Document name', ['--docname'], {'default': ''}),
('Document class', ['--docclass'], {'default': 'manual'}),
('Author', ['--author'], {'default': ''}),
))
settings_defaults = {}
output = None
def __init__(self, builder):
writers.Writer.__init__(self)
self.builder = builder
def translate(self):
visitor = LaTeXTranslator(self.document, self.builder)
self.document.walkabout(visitor)
self.output = visitor.astext()
# Helper classes
class ExtBabel(Babel):
def get_shorthandoff(self):
shortlang = self.language.split('_')[0]
if shortlang in ('de', 'ngerman', 'sl', 'slovene', 'pt', 'portuges',
'es', 'spanish', 'nl', 'dutch', 'pl', 'polish', 'it',
'italian'):
return '\\shorthandoff{"}'
return ''
def uses_cyrillic(self):
shortlang = self.language.split('_')[0]
return shortlang in ('bg','bulgarian', 'kk','kazakh',
'mn','mongolian', 'ru','russian',
'uk','ukrainian')
# in latest trunk, the attribute is called Babel.language_codes and already
# includes Slovene
if hasattr(Babel, '_ISO639_TO_BABEL'):
Babel._ISO639_TO_BABEL['sl'] = 'slovene'
class Table(object):
def __init__(self):
self.col = 0
self.colcount = 0
self.colspec = None
self.rowcount = 0
self.had_head = False
self.has_problematic = False
self.has_verbatim = False
self.caption = None
self.longtable = False
class LaTeXTranslator(nodes.NodeVisitor):
sectionnames = ["part", "chapter", "section", "subsection",
"subsubsection", "paragraph", "subparagraph"]
ignore_missing_images = False
default_elements = {
'papersize': 'letterpaper',
'pointsize': '10pt',
'classoptions': '',
'extraclassoptions': '',
'inputenc': '\\usepackage[utf8]{inputenc}',
'utf8extra': '\\DeclareUnicodeCharacter{00A0}{\\nobreakspace}',
'cmappkg': '\\usepackage{cmap}',
'fontenc': '\\usepackage[T1]{fontenc}',
'babel': '\\usepackage{babel}',
'fontpkg': '\\usepackage{times}',
'fncychap': '\\usepackage[Bjarne]{fncychap}',
'longtable': '\\usepackage{longtable}',
'preamble': '',
'title': '',
'date': '',
'release': '',
'author': '',
'logo': '',
'releasename': 'Release',
'makeindex': '\\makeindex',
'shorthandoff': '',
'maketitle': '\\maketitle',
'tableofcontents': '\\tableofcontents',
'footer': '',
'printindex': '\\printindex',
'transition': '\n\n\\bigskip\\hrule{}\\bigskip\n\n',
}
# sphinx specific document classes
docclasses = ('howto', 'manual')
def __init__(self, document, builder):
nodes.NodeVisitor.__init__(self, document)
self.builder = builder
self.body = []
# sort out some elements
papersize = builder.config.latex_paper_size + 'paper'
if papersize == 'paper': # e.g. command line "-D latex_paper_size="
papersize = 'letterpaper'
self.elements = self.default_elements.copy()
self.elements.update({
'wrapperclass': self.format_docclass(document.settings.docclass),
'papersize': papersize,
'pointsize': builder.config.latex_font_size,
# if empty, the title is set to the first section title
'title': document.settings.title,
'release': builder.config.release,
'author': document.settings.author,
'releasename': _('Release'),
'preamble': builder.config.latex_preamble,
'indexname': _('Index'),
})
if document.settings.docclass == 'howto':
docclass = builder.config.latex_docclass.get('howto', 'article')
else:
docclass = builder.config.latex_docclass.get('manual', 'report')
self.elements['docclass'] = docclass
if builder.config.today:
self.elements['date'] = builder.config.today
else:
self.elements['date'] = ustrftime(builder.config.today_fmt
or _('%B %d, %Y'))
if builder.config.latex_logo:
self.elements['logo'] = '\\includegraphics{%s}\\par' % \
path.basename(builder.config.latex_logo)
if builder.config.language:
babel = ExtBabel(builder.config.language)
lang = babel.get_language()
if lang:
self.elements['classoptions'] += ',' + babel.get_language()
else:
self.builder.warn('no Babel option known for language %r' %
builder.config.language)
self.elements['shorthandoff'] = babel.get_shorthandoff()
self.elements['fncychap'] = '\\usepackage[Sonny]{fncychap}'
# Times fonts don't work with Cyrillic languages
if babel.uses_cyrillic():
self.elements['fontpkg'] = ''
# pTeX (Japanese TeX) for support
if builder.config.language == 'ja':
# use dvipdfmx as default class option in Japanese
self.elements['classoptions'] = ',dvipdfmx'
# disable babel which has not publishing quality in Japanese
self.elements['babel'] = ''
# disable fncychap in Japanese documents
self.elements['fncychap'] = ''
else:
self.elements['classoptions'] += ',english'
# allow the user to override them all
self.elements.update(builder.config.latex_elements)
if self.elements['extraclassoptions']:
self.elements['classoptions'] += ',' + \
self.elements['extraclassoptions']
self.highlighter = highlighting.PygmentsBridge('latex',
builder.config.pygments_style, builder.config.trim_doctest_flags)
self.context = []
self.descstack = []
self.bibitems = []
self.table = None
self.next_table_colspec = None
# stack of [language, linenothreshold] settings per file
# the first item here is the default and must not be changed
# the second item is the default for the master file and can be changed
# by .. highlight:: directive in the master file
self.hlsettingstack = 2 * [[builder.config.highlight_language,
sys.maxint]]
self.footnotestack = []
self.curfilestack = []
self.handled_abbrs = set()
if document.settings.docclass == 'howto':
self.top_sectionlevel = 2
else:
if builder.config.latex_use_parts:
self.top_sectionlevel = 0
else:
self.top_sectionlevel = 1
self.next_section_ids = set()
self.next_figure_ids = set()
self.next_table_ids = set()
# flags
self.in_title = 0
self.in_production_list = 0
self.in_footnote = 0
self.in_caption = 0
self.first_document = 1
self.this_is_the_title = 1
self.literal_whitespace = 0
self.no_contractions = 0
self.compact_list = 0
self.first_param = 0
self.previous_spanning_row = 0
self.previous_spanning_column = 0
self.remember_multirow = {}
def format_docclass(self, docclass):
""" prepends prefix to sphinx document classes
"""
if docclass in self.docclasses:
docclass = 'sphinx' + docclass
return docclass
def astext(self):
return (HEADER % self.elements +
self.highlighter.get_stylesheet() +
u''.join(self.body) +
'\n' + self.elements['footer'] + '\n' +
self.generate_indices() +
FOOTER % self.elements)
def hypertarget(self, id, withdoc=True, anchor=True):
if withdoc:
id = self.curfilestack[-1] + ':' + id
return (anchor and '\\phantomsection' or '') + \
'\\label{%s}' % self.idescape(id)
def hyperlink(self, id):
return '{\\hyperref[%s]{' % self.idescape(id)
def hyperpageref(self, id):
return '\\autopageref*{%s}' % self.idescape(id)
def idescape(self, id):
return unicode(id).translate(tex_replace_map).\
encode('ascii', 'backslashreplace').decode('ascii').\
replace('\\', '_')
def generate_indices(self):
def generate(content, collapsed):
ret.append('\\begin{theindex}\n')
ret.append('\\def\\bigletter#1{{\\Large\\sffamily#1}'
'\\nopagebreak\\vspace{1mm}}\n')
for i, (letter, entries) in enumerate(content):
if i > 0:
ret.append('\\indexspace\n')
ret.append('\\bigletter{%s}\n' %
unicode(letter).translate(tex_escape_map))
for entry in entries:
if not entry[3]:
continue
ret.append('\\item {\\texttt{%s}}' % self.encode(entry[0]))
if entry[4]:
# add "extra" info
ret.append(' \\emph{(%s)}' % self.encode(entry[4]))
ret.append(', \\pageref{%s:%s}\n' %
(entry[2], self.idescape(entry[3])))
ret.append('\\end{theindex}\n')
ret = []
# latex_domain_indices can be False/True or a list of index names
indices_config = self.builder.config.latex_domain_indices
if indices_config:
for domain in self.builder.env.domains.itervalues():
for indexcls in domain.indices:
indexname = '%s-%s' % (domain.name, indexcls.name)
if isinstance(indices_config, list):
if indexname not in indices_config:
continue
# deprecated config value
if indexname == 'py-modindex' and \
not self.builder.config.latex_use_modindex:
continue
content, collapsed = indexcls(domain).generate(
self.builder.docnames)
if not content:
continue
ret.append(u'\\renewcommand{\\indexname}{%s}\n' %
indexcls.localname)
generate(content, collapsed)
return ''.join(ret)
def visit_document(self, node):
self.footnotestack.append(self.collect_footnotes(node))
self.curfilestack.append(node.get('docname', ''))
if self.first_document == 1:
# the first document is all the regular content ...
self.body.append(BEGIN_DOC % self.elements)
self.first_document = 0
elif self.first_document == 0:
# ... and all others are the appendices
self.body.append(u'\n\\appendix\n')
self.first_document = -1
if 'docname' in node:
self.body.append(self.hypertarget(':doc'))
# "- 1" because the level is increased before the title is visited
self.sectionlevel = self.top_sectionlevel - 1
def depart_document(self, node):
if self.bibitems:
widest_label = ""
for bi in self.bibitems:
if len(widest_label) < len(bi[0]):
widest_label = bi[0]
self.body.append(u'\n\\begin{thebibliography}{%s}\n' % widest_label)
for bi in self.bibitems:
target = self.hypertarget(bi[2] + ':' + bi[3],
withdoc=False)
self.body.append(u'\\bibitem[%s]{%s}{%s %s}\n' %
(bi[0], self.idescape(bi[0]), target, bi[1]))
self.body.append(u'\\end{thebibliography}\n')
self.bibitems = []
def visit_start_of_file(self, node):
# collect new footnotes
self.footnotestack.append(self.collect_footnotes(node))
# also add a document target
self.next_section_ids.add(':doc')
self.curfilestack.append(node['docname'])
# use default highlight settings for new file
self.hlsettingstack.append(self.hlsettingstack[0])
def collect_footnotes(self, node):
fnotes = {}
def footnotes_under(n):
if isinstance(n, nodes.footnote):
yield n
else:
for c in n.children:
if isinstance(c, addnodes.start_of_file):
continue
for k in footnotes_under(c):
yield k
for fn in footnotes_under(node):
num = fn.children[0].astext().strip()
fnotes[num] = [collected_footnote(*fn.children), False]
return fnotes
def depart_start_of_file(self, node):
self.footnotestack.pop()
self.curfilestack.pop()
self.hlsettingstack.pop()
def visit_highlightlang(self, node):
self.hlsettingstack[-1] = [node['lang'], node['linenothreshold']]
raise nodes.SkipNode
def visit_section(self, node):
if not self.this_is_the_title:
self.sectionlevel += 1
self.body.append('\n\n')
if node.get('ids'):
self.next_section_ids.update(node['ids'])
def depart_section(self, node):
self.sectionlevel = max(self.sectionlevel - 1,
self.top_sectionlevel - 1)
def visit_problematic(self, node):
self.body.append(r'{\color{red}\bfseries{}')
def depart_problematic(self, node):
self.body.append('}')
def visit_topic(self, node):
self.body.append('\\setbox0\\vbox{\n'
'\\begin{minipage}{0.95\\linewidth}\n')
def depart_topic(self, node):
self.body.append('\\end{minipage}}\n'
'\\begin{center}\\setlength{\\fboxsep}{5pt}'
'\\shadowbox{\\box0}\\end{center}\n')
visit_sidebar = visit_topic
depart_sidebar = depart_topic
def visit_glossary(self, node):
pass
def depart_glossary(self, node):
pass
def visit_productionlist(self, node):
self.body.append('\n\n\\begin{productionlist}\n')
self.in_production_list = 1
def depart_productionlist(self, node):
self.body.append('\\end{productionlist}\n\n')
self.in_production_list = 0
def visit_production(self, node):
if node['tokenname']:
tn = node['tokenname']
self.body.append(self.hypertarget('grammar-token-' + tn))
self.body.append('\\production{%s}{' % self.encode(tn))
else:
self.body.append('\\productioncont{')
def depart_production(self, node):
self.body.append('}\n')
def visit_transition(self, node):
self.body.append(self.elements['transition'])
def depart_transition(self, node):
pass
def visit_title(self, node):
parent = node.parent
if isinstance(parent, addnodes.seealso):
# the environment already handles this
raise nodes.SkipNode
elif self.this_is_the_title:
if len(node.children) != 1 and not isinstance(node.children[0],
nodes.Text):
self.builder.warn('document title is not a single Text node',
(self.curfilestack[-1], node.line))
if not self.elements['title']:
# text needs to be escaped since it is inserted into
# the output literally
self.elements['title'] = node.astext().translate(tex_escape_map)
self.this_is_the_title = 0
raise nodes.SkipNode
elif isinstance(parent, nodes.section):
try:
self.body.append(r'\%s{' % self.sectionnames[self.sectionlevel])
except IndexError:
# just use "subparagraph", it's not numbered anyway
self.body.append(r'\%s{' % self.sectionnames[-1])
self.context.append('}\n')
if self.next_section_ids:
for id in self.next_section_ids:
self.context[-1] += self.hypertarget(id, anchor=False)
self.next_section_ids.clear()
elif isinstance(parent, (nodes.topic, nodes.sidebar)):
self.body.append(r'\textbf{')
self.context.append('}\n\n\medskip\n\n')
elif isinstance(parent, nodes.Admonition):
self.body.append('{')
self.context.append('}\n')
elif isinstance(parent, nodes.table):
self.table.caption = self.encode(node.astext())
raise nodes.SkipNode
else:
self.builder.warn(
'encountered title node not in section, topic, table, '
'admonition or sidebar',
(self.curfilestack[-1], node.line or ''))
self.body.append('\\textbf{')
self.context.append('}\n')
self.in_title = 1
def depart_title(self, node):
self.in_title = 0
self.body.append(self.context.pop())
def visit_subtitle(self, node):
if isinstance(node.parent, nodes.sidebar):
self.body.append('~\\\\\n\\textbf{')
self.context.append('}\n\\smallskip\n')
else:
self.context.append('')
def depart_subtitle(self, node):
self.body.append(self.context.pop())
def visit_desc(self, node):
self.body.append('\n\n\\begin{fulllineitems}\n')
if self.table:
self.table.has_problematic = True
def depart_desc(self, node):
self.body.append('\n\\end{fulllineitems}\n\n')
def visit_desc_signature(self, node):
if node.parent['objtype'] != 'describe' and node['ids']:
hyper = self.hypertarget(node['ids'][0])
else:
hyper = ''
self.body.append(hyper)
for child in node:
if isinstance(child, addnodes.desc_parameterlist):
self.body.append(r'\pysiglinewithargsret{')
break
else:
self.body.append(r'\pysigline{')
def depart_desc_signature(self, node):
self.body.append('}')
def visit_desc_addname(self, node):
self.body.append(r'\code{')
self.literal_whitespace += 1
def depart_desc_addname(self, node):
self.body.append('}')
self.literal_whitespace -= 1
def visit_desc_type(self, node):
pass
def depart_desc_type(self, node):
pass
def visit_desc_returns(self, node):
self.body.append(r'{ $\rightarrow$ ')
def depart_desc_returns(self, node):
self.body.append(r'}')
def visit_desc_name(self, node):
self.body.append(r'\bfcode{')
self.no_contractions += 1
self.literal_whitespace += 1
def depart_desc_name(self, node):
self.body.append('}')
self.literal_whitespace -= 1
self.no_contractions -= 1
def visit_desc_parameterlist(self, node):
# close name, open parameterlist
self.body.append('}{')
self.first_param = 1
def depart_desc_parameterlist(self, node):
# close parameterlist, open return annotation
self.body.append('}{')
def visit_desc_parameter(self, node):
if not self.first_param:
self.body.append(', ')
else:
self.first_param = 0
if not node.hasattr('noemph'):
self.body.append(r'\emph{')
def depart_desc_parameter(self, node):
if not node.hasattr('noemph'):
self.body.append('}')
def visit_desc_optional(self, node):
self.body.append(r'\optional{')
def depart_desc_optional(self, node):
self.body.append('}')
def visit_desc_annotation(self, node):
self.body.append(r'\strong{')
def depart_desc_annotation(self, node):
self.body.append('}')
def visit_desc_content(self, node):
if node.children and not isinstance(node.children[0], nodes.paragraph):
# avoid empty desc environment which causes a formatting bug
self.body.append('~')
def depart_desc_content(self, node):
pass
def visit_seealso(self, node):
self.body.append(u'\n\n\\strong{%s:}\n\n' % admonitionlabels['seealso'])
def depart_seealso(self, node):
self.body.append("\n\n")
def visit_rubric(self, node):
if len(node.children) == 1 and node.children[0].astext() in \
('Footnotes', _('Footnotes')):
raise nodes.SkipNode
self.body.append('\\paragraph{')
self.context.append('}\n')
def depart_rubric(self, node):
self.body.append(self.context.pop())
def visit_footnote(self, node):
raise nodes.SkipNode
def visit_collected_footnote(self, node):
self.in_footnote += 1
self.body.append('\\footnote{')
def depart_collected_footnote(self, node):
self.body.append('}')
self.in_footnote -= 1
def visit_label(self, node):
if isinstance(node.parent, nodes.citation):
self.bibitems[-1][0] = node.astext()
self.bibitems[-1][2] = self.curfilestack[-1]
self.bibitems[-1][3] = node.parent['ids'][0]
raise nodes.SkipNode
def visit_tabular_col_spec(self, node):
self.next_table_colspec = node['spec']
raise nodes.SkipNode
def visit_table(self, node):
if self.table:
raise UnsupportedError(
'%s:%s: nested tables are not yet implemented.' %
(self.curfilestack[-1], node.line or ''))
self.table = Table()
self.table.longtable = 'longtable' in node['classes']
self.tablebody = []
self.tableheaders = []
# Redirect body output until table is finished.
self._body = self.body
self.body = self.tablebody
def depart_table(self, node):
if self.table.rowcount > 30:
self.table.longtable = True
self.body = self._body
if not self.table.longtable and self.table.caption is not None:
self.body.append(u'\n\n\\begin{threeparttable}\n'
u'\\capstart\\caption{%s}\n' % self.table.caption)
if self.table.longtable:
self.body.append('\n\\begin{longtable}')
endmacro = '\\end{longtable}\n\n'
elif self.table.has_verbatim:
self.body.append('\n\\begin{tabular}')
endmacro = '\\end{tabular}\n\n'
elif self.table.has_problematic and not self.table.colspec:
# if the user has given us tabularcolumns, accept them and use
# tabulary nevertheless
self.body.append('\n\\begin{tabular}')
endmacro = '\\end{tabular}\n\n'
else:
self.body.append('\n\\begin{tabulary}{\\linewidth}')
endmacro = '\\end{tabulary}\n\n'
if self.table.colspec:
self.body.append(self.table.colspec)
else:
if self.table.has_problematic:
colwidth = 0.95 / self.table.colcount
colspec = ('p{%.3f\\linewidth}|' % colwidth) * \
self.table.colcount
self.body.append('{|' + colspec + '}\n')
elif self.table.longtable:
self.body.append('{|' + ('l|' * self.table.colcount) + '}\n')
else:
self.body.append('{|' + ('L|' * self.table.colcount) + '}\n')
if self.table.longtable and self.table.caption is not None:
self.body.append(u'\\caption{%s} \\\\\n' % self.table.caption)
if self.table.caption is not None:
for id in self.next_table_ids:
self.body.append(self.hypertarget(id, anchor=False))
self.next_table_ids.clear()
if self.table.longtable:
self.body.append('\\hline\n')
self.body.extend(self.tableheaders)
self.body.append('\\endfirsthead\n\n')
self.body.append('\\multicolumn{%s}{c}%%\n' % self.table.colcount)
self.body.append(r'{{\textsf{\tablename\ \thetable{} -- %s}}} \\'
% _('continued from previous page'))
self.body.append('\n\\hline\n')
self.body.extend(self.tableheaders)
self.body.append('\\endhead\n\n')
self.body.append(ur'\hline \multicolumn{%s}{|r|}{{\textsf{%s}}} \\ \hline'
% (self.table.colcount,
_('Continued on next page')))
self.body.append('\n\\endfoot\n\n')
self.body.append('\\endlastfoot\n\n')
else:
self.body.append('\\hline\n')
self.body.extend(self.tableheaders)
self.body.extend(self.tablebody)
self.body.append(endmacro)
if not self.table.longtable and self.table.caption is not None:
self.body.append('\\end{threeparttable}\n\n')
self.table = None
self.tablebody = None
def visit_colspec(self, node):
self.table.colcount += 1
def depart_colspec(self, node):
pass
def visit_tgroup(self, node):
pass
def depart_tgroup(self, node):
pass
def visit_thead(self, node):
self.table.had_head = True
if self.next_table_colspec:
self.table.colspec = '{%s}\n' % self.next_table_colspec
self.next_table_colspec = None
# Redirect head output until header is finished. see visit_tbody.
self.body = self.tableheaders
def depart_thead(self, node):
self.body.append('\\hline')
def visit_tbody(self, node):
if not self.table.had_head:
self.visit_thead(node)
self.body = self.tablebody
def depart_tbody(self, node):
self.body.append('\\hline')
def visit_row(self, node):
self.table.col = 0
def depart_row(self, node):
if self.previous_spanning_row == 1:
self.previous_spanning_row = 0
self.body.append('\\\\\n')
self.table.rowcount += 1
def visit_entry(self, node):
if self.table.col > 0:
self.body.append(' & ')
elif self.remember_multirow.get(1, 0) > 1:
self.remember_multirow[1] -= 1
self.body.append(' & ')
self.table.col += 1
context = ''
if 'morerows' in node:
self.body.append(' \multirow{')
self.previous_spanning_row = 1
self.body.append(str(node.get('morerows') + 1))
self.body.append('}{*}{')
context += '}'
self.remember_multirow[self.table.col] = node.get('morerows') + 1
if 'morecols' in node:
self.body.append(' \multicolumn{')
self.body.append(str(node.get('morecols') + 1))
if self.table.col == 1:
self.body.append('}{|l|}{')
else:
self.body.append('}{l|}{')
context += '}'
if isinstance(node.parent.parent, nodes.thead):
self.body.append('\\textsf{\\relax ')
context += '}'
if self.remember_multirow.get(self.table.col + 1, 0) > 1:
self.remember_multirow[self.table.col + 1] -= 1
context += ' & '
self.context.append(context)
def depart_entry(self, node):
self.body.append(self.context.pop()) # header
def visit_acks(self, node):
# this is a list in the source, but should be rendered as a
# comma-separated list here
self.body.append('\n\n')
self.body.append(', '.join(n.astext()
for n in node.children[0].children) + '.')
self.body.append('\n\n')
raise nodes.SkipNode
def visit_bullet_list(self, node):
if not self.compact_list:
self.body.append('\\begin{itemize}\n' )
if self.table:
self.table.has_problematic = True
def depart_bullet_list(self, node):
if not self.compact_list:
self.body.append('\\end{itemize}\n' )
def visit_enumerated_list(self, node):
self.body.append('\\begin{enumerate}\n' )
if 'start' in node:
self.body.append('\\setcounter{enumi}{%d}\n' % (node['start'] - 1))
if self.table:
self.table.has_problematic = True
def depart_enumerated_list(self, node):
self.body.append('\\end{enumerate}\n' )
def visit_list_item(self, node):
# Append "{}" in case the next character is "[", which would break
# LaTeX's list environment (no numbering and the "[" is not printed).
self.body.append(r'\item {} ')
def depart_list_item(self, node):
self.body.append('\n')
def visit_definition_list(self, node):
self.body.append('\\begin{description}\n')
if self.table:
self.table.has_problematic = True
def depart_definition_list(self, node):
self.body.append('\\end{description}\n')
def visit_definition_list_item(self, node):
pass
def depart_definition_list_item(self, node):
pass
def visit_term(self, node):
ctx = '}] \\leavevmode'
if node.get('ids'):
ctx += self.hypertarget(node['ids'][0])
self.body.append('\\item[{')
self.context.append(ctx)
def depart_term(self, node):
self.body.append(self.context.pop())
def visit_termsep(self, node):
self.body.append(', ')
raise nodes.SkipNode
def visit_classifier(self, node):
self.body.append('{[}')
def depart_classifier(self, node):
self.body.append('{]}')
def visit_definition(self, node):
pass
def depart_definition(self, node):
self.body.append('\n')
def visit_field_list(self, node):
self.body.append('\\begin{quote}\\begin{description}\n')
if self.table:
self.table.has_problematic = True
def depart_field_list(self, node):
self.body.append('\\end{description}\\end{quote}\n')
def visit_field(self, node):
pass
def depart_field(self, node):
pass
visit_field_name = visit_term
depart_field_name = depart_term
visit_field_body = visit_definition
depart_field_body = depart_definition
def visit_paragraph(self, node):
self.body.append('\n')
def depart_paragraph(self, node):
self.body.append('\n')
def visit_centered(self, node):
self.body.append('\n\\begin{center}')
if self.table:
self.table.has_problematic = True
def depart_centered(self, node):
self.body.append('\n\\end{center}')
def visit_hlist(self, node):
# for now, we don't support a more compact list format
# don't add individual itemize environments, but one for all columns
self.compact_list += 1
self.body.append('\\begin{itemize}\\setlength{\\itemsep}{0pt}'
'\\setlength{\\parskip}{0pt}\n')
if self.table:
self.table.has_problematic = True
def depart_hlist(self, node):
self.compact_list -= 1
self.body.append('\\end{itemize}\n')
def visit_hlistcol(self, node):
pass
def depart_hlistcol(self, node):
pass
def latex_image_length(self, width_str):
match = re.match('(\d*\.?\d*)\s*(\S*)', width_str)
if not match:
# fallback
return width_str
res = width_str
amount, unit = match.groups()[:2]
if not unit or unit == "px":
# pixels: let LaTeX alone
return None
elif unit == "%":
res = "%.3f\\linewidth" % (float(amount) / 100.0)
return res
def is_inline(self, node):
"""Check whether a node represents an inline element."""
return isinstance(node.parent, nodes.TextElement)
def visit_image(self, node):
attrs = node.attributes
pre = [] # in reverse order
post = []
include_graphics_options = []
is_inline = self.is_inline(node)
if 'scale' in attrs:
# Could also be done with ``scale`` option to
# ``\includegraphics``; doing it this way for consistency.
pre.append('\\scalebox{%f}{' % (attrs['scale'] / 100.0,))
post.append('}')
if 'width' in attrs:
w = self.latex_image_length(attrs['width'])
if w:
include_graphics_options.append('width=%s' % w)
if 'height' in attrs:
h = self.latex_image_length(attrs['height'])
if h:
include_graphics_options.append('height=%s' % h)
if 'align' in attrs:
align_prepost = {
# By default latex aligns the top of an image.
(1, 'top'): ('', ''),
(1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'),
(1, 'bottom'): ('\\raisebox{-\\height}{', '}'),
(0, 'center'): ('{\\hfill', '\\hfill}'),
# These 2 don't exactly do the right thing. The image should
# be floated alongside the paragraph. See
# http://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG
(0, 'left'): ('{', '\\hfill}'),
(0, 'right'): ('{\\hfill', '}'),}
try:
pre.append(align_prepost[is_inline, attrs['align']][0])
post.append(align_prepost[is_inline, attrs['align']][1])
except KeyError:
pass
if not is_inline:
pre.append('\n')
post.append('\n')
pre.reverse()
if node['uri'] in self.builder.images:
uri = self.builder.images[node['uri']]
else:
# missing image!
if self.ignore_missing_images:
return
uri = node['uri']
if uri.find('://') != -1:
# ignore remote images
return
self.body.extend(pre)
options = ''
if include_graphics_options:
options = '[%s]' % ','.join(include_graphics_options)
self.body.append('\\includegraphics%s{%s}' % (options, uri))
self.body.extend(post)
def depart_image(self, node):
pass
def visit_figure(self, node):
ids = ''
for id in self.next_figure_ids:
ids += self.hypertarget(id, anchor=False)
self.next_figure_ids.clear()
if 'width' in node and node.get('align', '') in ('left', 'right'):
self.body.append('\\begin{wrapfigure}{%s}{%s}\n\\centering' %
(node['align'] == 'right' and 'r' or 'l',
node['width']))
self.context.append(ids + '\\end{wrapfigure}\n')
else:
if (not 'align' in node.attributes or
node.attributes['align'] == 'center'):
# centering does not add vertical space like center.
align = '\n\\centering'
align_end = ''
else:
# TODO non vertical space for other alignments.
align = '\\begin{flush%s}' % node.attributes['align']
align_end = '\\end{flush%s}' % node.attributes['align']
self.body.append('\\begin{figure}[htbp]%s\n' % align)
if any(isinstance(child, nodes.caption) for child in node):
self.body.append('\\capstart\n')
self.context.append(ids + align_end + '\\end{figure}\n')
def depart_figure(self, node):
self.body.append(self.context.pop())
def visit_caption(self, node):
self.in_caption += 1
self.body.append('\\caption{')
def depart_caption(self, node):
self.body.append('}')
self.in_caption -= 1
def visit_legend(self, node):
self.body.append('{\\small ')
def depart_legend(self, node):
self.body.append('}')
def visit_admonition(self, node):
self.body.append('\n\\begin{notice}{note}')
def depart_admonition(self, node):
self.body.append('\\end{notice}\n')
def _make_visit_admonition(name):
def visit_admonition(self, node):
self.body.append(u'\n\\begin{notice}{%s}{%s:}' %
(name, admonitionlabels[name]))
return visit_admonition
def _depart_named_admonition(self, node):
self.body.append('\\end{notice}\n')
visit_attention = _make_visit_admonition('attention')
depart_attention = _depart_named_admonition
visit_caution = _make_visit_admonition('caution')
depart_caution = _depart_named_admonition
visit_danger = _make_visit_admonition('danger')
depart_danger = _depart_named_admonition
visit_error = _make_visit_admonition('error')
depart_error = _depart_named_admonition
visit_hint = _make_visit_admonition('hint')
depart_hint = _depart_named_admonition
visit_important = _make_visit_admonition('important')
depart_important = _depart_named_admonition
visit_note = _make_visit_admonition('note')
depart_note = _depart_named_admonition
visit_tip = _make_visit_admonition('tip')
depart_tip = _depart_named_admonition
visit_warning = _make_visit_admonition('warning')
depart_warning = _depart_named_admonition
def visit_versionmodified(self, node):
pass
def depart_versionmodified(self, node):
pass
def visit_target(self, node):
def add_target(id):
# indexing uses standard LaTeX index markup, so the targets
# will be generated differently
if id.startswith('index-'):
return
# do not generate \phantomsection in \section{}
anchor = not self.in_title
self.body.append(self.hypertarget(id, anchor=anchor))
# postpone the labels until after the sectioning command
parindex = node.parent.index(node)
try:
try:
next = node.parent[parindex+1]
except IndexError:
# last node in parent, look at next after parent
# (for section of equal level) if it exists
if node.parent.parent is not None:
next = node.parent.parent[
node.parent.parent.index(node.parent)]
else:
raise
if isinstance(next, nodes.section):
if node.get('refid'):
self.next_section_ids.add(node['refid'])
self.next_section_ids.update(node['ids'])
return
elif isinstance(next, nodes.figure):
# labels for figures go in the figure body, not before
if node.get('refid'):
self.next_figure_ids.add(node['refid'])
self.next_figure_ids.update(node['ids'])
return
elif isinstance(next, nodes.table):
# same for tables, but only if they have a caption
for n in node:
if isinstance(n, nodes.title):
if node.get('refid'):
self.next_table_ids.add(node['refid'])
self.next_table_ids.update(node['ids'])
return
except IndexError:
pass
if 'refuri' in node:
return
if node.get('refid'):
add_target(node['refid'])
for id in node['ids']:
add_target(id)
def depart_target(self, node):
pass
def visit_attribution(self, node):
self.body.append('\n\\begin{flushright}\n')
self.body.append('---')
def depart_attribution(self, node):
self.body.append('\n\\end{flushright}\n')
def visit_index(self, node, scre=re.compile(r';\s*')):
if not node.get('inline', True):
self.body.append('\n')
entries = node['entries']
for type, string, tid, ismain in entries:
m = ''
if ismain:
m = '|textbf'
try:
if type == 'single':
p = scre.sub('!', self.encode(string))
self.body.append(r'\index{%s%s}' % (p, m))
elif type == 'pair':
p1, p2 = map(self.encode, split_into(2, 'pair', string))
self.body.append(r'\index{%s!%s%s}\index{%s!%s%s}' %
(p1, p2, m, p2, p1, m))
elif type == 'triple':
p1, p2, p3 = map(self.encode,
split_into(3, 'triple', string))
self.body.append(
r'\index{%s!%s %s%s}\index{%s!%s, %s%s}'
r'\index{%s!%s %s%s}' %
(p1, p2, p3, m, p2, p3, p1, m, p3, p1, p2, m))
elif type == 'see':
p1, p2 = map(self.encode, split_into(2, 'see', string))
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
elif type == 'seealso':
p1, p2 = map(self.encode, split_into(2, 'seealso', string))
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
else:
self.builder.warn(
'unknown index entry type %s found' % type)
except ValueError, err:
self.builder.warn(str(err))
raise nodes.SkipNode
def visit_raw(self, node):
if 'latex' in node.get('format', '').split():
self.body.append(node.astext())
raise nodes.SkipNode
def visit_reference(self, node):
uri = node.get('refuri', '')
if not uri and node.get('refid'):
uri = '%' + self.curfilestack[-1] + '#' + node['refid']
if self.in_title or not uri:
self.context.append('')
elif uri.startswith('mailto:') or uri.startswith('http:') or \
uri.startswith('https:') or uri.startswith('ftp:'):
self.body.append('\\href{%s}{' % self.encode_uri(uri))
# if configured, put the URL after the link
show_urls = self.builder.config.latex_show_urls
if node.astext() != uri and show_urls and show_urls != 'no':
if uri.startswith('mailto:'):
uri = uri[7:]
if show_urls == 'footnote' and not \
(self.in_footnote or self.in_caption):
# obviously, footnotes in footnotes are not going to work
self.context.append(
r'}\footnote{%s}' % self.encode_uri(uri))
else: # all other true values (b/w compat)
self.context.append('} (%s)' % self.encode_uri(uri))
else:
self.context.append('}')
elif uri.startswith('#'):
# references to labels in the same document
id = self.curfilestack[-1] + ':' + uri[1:]
self.body.append(self.hyperlink(id))
if self.builder.config.latex_show_pagerefs and not \
self.in_production_list:
self.context.append('}} (%s)' % self.hyperpageref(id))
else:
self.context.append('}}')
elif uri.startswith('%'):
# references to documents or labels inside documents
hashindex = uri.find('#')
if hashindex == -1:
# reference to the document
id = uri[1:] + '::doc'
else:
# reference to a label
id = uri[1:].replace('#', ':')
self.body.append(self.hyperlink(id))
if len(node) and hasattr(node[0], 'attributes') and \
'std-term' in node[0].get('classes', []):
# don't add a pageref for glossary terms
self.context.append('}}')
else:
if self.builder.config.latex_show_pagerefs:
self.context.append('}} (%s)' % self.hyperpageref(id))
else:
self.context.append('}}')
else:
self.builder.warn('unusable reference target found: %s' % uri,
(self.curfilestack[-1], node.line))
self.context.append('')
def depart_reference(self, node):
self.body.append(self.context.pop())
def visit_download_reference(self, node):
pass
def depart_download_reference(self, node):
pass
def visit_pending_xref(self, node):
pass
def depart_pending_xref(self, node):
pass
def visit_emphasis(self, node):
self.body.append(r'\emph{')
def depart_emphasis(self, node):
self.body.append('}')
def visit_literal_emphasis(self, node):
self.body.append(r'\emph{\texttt{')
self.no_contractions += 1
def depart_literal_emphasis(self, node):
self.body.append('}}')
self.no_contractions -= 1
def visit_strong(self, node):
self.body.append(r'\textbf{')
def depart_strong(self, node):
self.body.append('}')
def visit_abbreviation(self, node):
abbr = node.astext()
self.body.append(r'\textsc{')
# spell out the explanation once
if node.hasattr('explanation') and abbr not in self.handled_abbrs:
self.context.append('} (%s)' % self.encode(node['explanation']))
self.handled_abbrs.add(abbr)
else:
self.context.append('}')
def depart_abbreviation(self, node):
self.body.append(self.context.pop())
def visit_title_reference(self, node):
self.body.append(r'\emph{')
def depart_title_reference(self, node):
self.body.append('}')
def visit_citation(self, node):
# TODO maybe use cite bibitems
# bibitem: [citelabel, citetext, docname, citeid]
self.bibitems.append(['', '', '', ''])
self.context.append(len(self.body))
def depart_citation(self, node):
size = self.context.pop()
text = ''.join(self.body[size:])
del self.body[size:]
self.bibitems[-1][1] = text
def visit_citation_reference(self, node):
# This is currently never encountered, since citation_reference nodes
# are already replaced by pending_xref nodes in the environment.
self.body.append('\\cite{%s}' % self.idescape(node.astext()))
raise nodes.SkipNode
def visit_literal(self, node):
self.no_contractions += 1
if self.in_title:
self.body.append(r'\texttt{')
else:
self.body.append(r'\code{')
def depart_literal(self, node):
self.no_contractions -= 1
self.body.append('}')
def visit_footnote_reference(self, node):
num = node.astext().strip()
try:
footnode, used = self.footnotestack[-1][num]
except (KeyError, IndexError):
raise nodes.SkipNode
# if a footnote has been inserted once, it shouldn't be repeated
# by the next reference
if used:
self.body.append('\\footnotemark[%s]' % num)
else:
if self.in_caption:
raise UnsupportedError('%s:%s: footnotes in float captions '
'are not supported by LaTeX' %
(self.curfilestack[-1], node.line))
footnode.walkabout(self)
self.footnotestack[-1][num][1] = True
raise nodes.SkipChildren
def depart_footnote_reference(self, node):
pass
def visit_literal_block(self, node):
if self.in_footnote:
raise UnsupportedError('%s:%s: literal blocks in footnotes are '
'not supported by LaTeX' %
(self.curfilestack[-1], node.line))
if node.rawsource != node.astext():
# most probably a parsed-literal block -- don't highlight
self.body.append('\\begin{alltt}\n')
else:
code = node.astext().rstrip('\n')
lang = self.hlsettingstack[-1][0]
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.curfilestack[-1], node.line))
hlcode = self.highlighter.highlight_block(code, lang, warn=warner,
linenos=linenos, **highlight_args)
# workaround for Unicode issue
hlcode = hlcode.replace(u'€', u'@texteuro[]')
# must use original Verbatim environment and "tabular" environment
if self.table:
hlcode = hlcode.replace('\\begin{Verbatim}',
'\\begin{OriginalVerbatim}')
self.table.has_problematic = True
self.table.has_verbatim = True
# get consistent trailer
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
hlcode = hlcode.rstrip() + '\n'
self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' %
(self.table and 'Original' or ''))
raise nodes.SkipNode
def depart_literal_block(self, node):
self.body.append('\n\\end{alltt}\n')
visit_doctest_block = visit_literal_block
depart_doctest_block = depart_literal_block
def visit_line(self, node):
self.body.append('\item[] ')
def depart_line(self, node):
self.body.append('\n')
def visit_line_block(self, node):
if isinstance(node.parent, nodes.line_block):
self.body.append('\\item[]\n'
'\\begin{DUlineblock}{\\DUlineblockindent}\n')
else:
self.body.append('\n\\begin{DUlineblock}{0em}\n')
if self.table:
self.table.has_problematic = True
def depart_line_block(self, node):
self.body.append('\\end{DUlineblock}\n')
def visit_block_quote(self, node):
# If the block quote contains a single object and that object
# is a list, then generate a list not a block quote.
# This lets us indent lists.
done = 0
if len(node.children) == 1:
child = node.children[0]
if isinstance(child, nodes.bullet_list) or \
isinstance(child, nodes.enumerated_list):
done = 1
if not done:
self.body.append('\\begin{quote}\n')
if self.table:
self.table.has_problematic = True
def depart_block_quote(self, node):
done = 0
if len(node.children) == 1:
child = node.children[0]
if isinstance(child, nodes.bullet_list) or \
isinstance(child, nodes.enumerated_list):
done = 1
if not done:
self.body.append('\\end{quote}\n')
# option node handling copied from docutils' latex writer
def visit_option(self, node):
if self.context[-1]:
# this is not the first option
self.body.append(', ')
def depart_option(self, node):
# flag that the first option is done.
self.context[-1] += 1
def visit_option_argument(self, node):
"""The delimiter betweeen an option and its argument."""
self.body.append(node.get('delimiter', ' '))
def depart_option_argument(self, node):
pass
def visit_option_group(self, node):
self.body.append('\\item [')
# flag for first option
self.context.append(0)
def depart_option_group(self, node):
self.context.pop() # the flag
self.body.append('] ')
def visit_option_list(self, node):
self.body.append('\\begin{optionlist}{3cm}\n')
if self.table:
self.table.has_problematic = True
def depart_option_list(self, node):
self.body.append('\\end{optionlist}\n')
def visit_option_list_item(self, node):
pass
def depart_option_list_item(self, node):
pass
def visit_option_string(self, node):
ostring = node.astext()
self.no_contractions += 1
self.body.append(self.encode(ostring))
self.no_contractions -= 1
raise nodes.SkipNode
def visit_description(self, node):
self.body.append(' ')
def depart_description(self, node):
pass
def visit_superscript(self, node):
self.body.append('$^{\\text{')
def depart_superscript(self, node):
self.body.append('}}$')
def visit_subscript(self, node):
self.body.append('$_{\\text{')
def depart_subscript(self, node):
self.body.append('}}$')
def visit_substitution_definition(self, node):
raise nodes.SkipNode
def visit_substitution_reference(self, node):
raise nodes.SkipNode
def visit_inline(self, node):
classes = node.get('classes', [])
self.body.append(r'\DUspan{%s}{' % ','.join(classes))
def depart_inline(self, node):
self.body.append('}')
def visit_generated(self, node):
pass
def depart_generated(self, node):
pass
def visit_compound(self, node):
pass
def depart_compound(self, node):
pass
def visit_container(self, node):
pass
def depart_container(self, node):
pass
def visit_decoration(self, node):
pass
def depart_decoration(self, node):
pass
# docutils-generated elements that we don't support
def visit_header(self, node):
raise nodes.SkipNode
def visit_footer(self, node):
raise nodes.SkipNode
def visit_docinfo(self, node):
raise nodes.SkipNode
# text handling
def encode(self, text):
text = unicode(text).translate(tex_escape_map)
if self.literal_whitespace:
# Insert a blank before the newline, to avoid
# ! LaTeX Error: There's no line here to end.
text = text.replace(u'\n', u'~\\\\\n').replace(u' ', u'~')
if self.no_contractions:
text = text.replace('--', u'-{-}')
text = text.replace("''", u"'{'}")
return text
def encode_uri(self, text):
# in \href, the tilde is allowed and must be represented literally
return self.encode(text).replace('\\textasciitilde{}', '~')
def visit_Text(self, node):
text = self.encode(node.astext())
if not self.no_contractions:
text = educate_quotes_latex(text)
self.body.append(text)
def depart_Text(self, node):
pass
def visit_comment(self, node):
raise nodes.SkipNode
def visit_meta(self, node):
# only valid for HTML
raise nodes.SkipNode
def visit_system_message(self, node):
pass
def depart_system_message(self, node):
self.body.append('\n')
def visit_math(self, node):
self.builder.warn('using "math" markup without a Sphinx math extension '
'active, please use one of the math extensions '
'described at http://sphinx-doc.org/ext/math.html',
(self.curfilestack[-1], node.line))
raise nodes.SkipNode
visit_math_block = visit_math
def unknown_visit(self, node):
raise NotImplementedError('Unknown node: ' + node.__class__.__name__)
| SurfasJones/icecream-info | icecream/lib/python2.7/site-packages/sphinx/writers/latex.py | Python | mit | 58,296 |
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Aladom SAS & Hosting Dvpt SAS
from django.db.models import BooleanField
__all__ = [
'VariableHelpTextBooleanField',
]
class VariableHelpTextBooleanField(BooleanField):
"""Fixes an issue with help_text depending on a variable.
See https://github.com/Aladom/django-mailing/issues/2 for details.
"""
# FIXME DEPRECATED: remove when squashing migrations
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if 'help_text' in kwargs:
del kwargs['help_text']
return name, path, args, kwargs
| Aladom/django-mailing | mailing/models/fields.py | Python | mit | 614 |
from bempy import context_blocks
context_blocks('dropdown', locals())
| svetlyak40wt/bempy | bempy/django/blocks/dropdown/__init__.py | Python | bsd-3-clause | 71 |
#!/usr/bin/env python3
import sys
import os
#sys.path.append(os.path.join(sys.path[0],"..","..","kicad_mod")) # load kicad_mod path
# export PYTHONPATH="${PYTHONPATH}<path to kicad-footprint-generator directory>"
sys.path.append(os.path.join(sys.path[0], "..", "..", "..")) # load parent path of KicadModTree
import argparse
import yaml
from helpers import *
from KicadModTree import *
sys.path.append(os.path.join(sys.path[0], "..", "..", "tools")) # load parent path of tools
from footprint_text_fields import addTextFields
series = "EH"
manufacturer = 'JST'
orientation = 'V'
number_of_rows = 1
datasheet = 'http://www.jst-mfg.com/product/pdf/eng/eEH.pdf'
pitch = 2.50
pad_to_pad_clearance = 0.8
pad_copper_y_solder_length = 0.5 #How much copper should be in y direction?
min_annular_ring = 0.15
def generate_one_footprint(pincount, configuration):
mpn = "B{pincount}B-EH-A".format(pincount=pincount)
orientation_str = configuration['orientation_options'][orientation]
footprint_name = configuration['fp_name_format_string'].format(man=manufacturer,
series=series,
mpn=mpn, num_rows=number_of_rows, pins_per_row=pincount, mounting_pad = "",
pitch=pitch, orientation=orientation_str)
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription("JST {:s} series connector, {:s} ({:s}), generated with kicad-footprint-generator".format(series, mpn, datasheet))
kicad_mod.setTags(configuration['keyword_fp_string'].format(series=series,
orientation=orientation_str, man=manufacturer,
entry=configuration['entry_direction'][orientation]))
A = (pincount - 1) * pitch
B = A + 5.0
# set general values
if pincount == 2:
drill = 1.0
else:
drill = 0.95
pad_size = [pitch - pad_to_pad_clearance, drill + 2*pad_copper_y_solder_length]
if pad_size[0] - drill < 2*min_annular_ring:
pad_size[0] = drill + 2*min_annular_ring
# create pads
# kicad_mod.append(Pad(number=1, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT,
# at=[0, 0], size=pad_size,
# drill=drill, layers=Pad.LAYERS_THT))
optional_pad_params = {}
if configuration['kicad4_compatible']:
optional_pad_params['tht_pad1_shape'] = Pad.SHAPE_RECT
else:
optional_pad_params['tht_pad1_shape'] = Pad.SHAPE_ROUNDRECT
kicad_mod.append(PadArray(initial=1, start=[0, 0],
x_spacing=pitch, pincount=pincount,
size=pad_size, drill=drill,
type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, layers=Pad.LAYERS_THT,
**optional_pad_params))
x1 = -2.5
y1 = -1.6
x2 = x1 + B
y2 = y1 + 3.8
body_edge={'left':x1, 'right':x2, 'top':y1, 'bottom':y2}
#draw the main outline on F.Fab layer
kicad_mod.append(RectLine(start={'x':x1,'y':y1}, end={'x':x2,'y':y2}, layer='F.Fab', width=configuration['fab_line_width']))
########################### CrtYd #################################
cx1 = roundToBase(x1-configuration['courtyard_offset']['connector'], configuration['courtyard_grid'])
cy1 = roundToBase(y1-configuration['courtyard_offset']['connector'], configuration['courtyard_grid'])
cx2 = roundToBase(x2+configuration['courtyard_offset']['connector'], configuration['courtyard_grid'])
cy2 = roundToBase(y2+configuration['courtyard_offset']['connector'], configuration['courtyard_grid'])
kicad_mod.append(RectLine(
start=[cx1, cy1], end=[cx2, cy2],
layer='F.CrtYd', width=configuration['courtyard_line_width']))
#line offset
off = configuration['silk_fab_offset']
x1 -= off
y1 -= off
x2 += off
y2 += off
#draw the main outline around the footprint
kicad_mod.append(RectLine(start={'x':x1,'y':y1},end={'x':x2,'y':y2}, layer='F.SilkS', width=configuration['silk_line_width']))
T = 0.5
#add top line
kicad_mod.append(PolygoneLine(polygone=[{'x': x1,'y': 0},
{'x': x1 + T,'y': 0},
{'x': x1 + T,'y': y1 + T},
{'x': x2 - T,'y': y1 + T},
{'x': x2 - T,'y': 0},
{'x': x2,'y':0}], layer='F.SilkS', width=configuration['silk_line_width']))
#add bottom line (left)
kicad_mod.append(PolygoneLine(polygone=[{'x':x1,'y':y2-3*T},
{'x':x1+2*T,'y':y2-3*T},
{'x':x1+2*T,'y':y2}], layer='F.SilkS', width=configuration['silk_line_width']))
#add bottom line (right)
kicad_mod.append(PolygoneLine(polygone=[{'x':x2,'y':y2-3*T},
{'x':x2-2*T,'y':y2-3*T},
{'x':x2-2*T,'y':y2}], layer='F.SilkS', width=configuration['silk_line_width']))
#add pin-1 marker
D = 0.3
L = 2.5
pin = [
{'x': x1-D,'y': y2+D-L},
{'x': x1-D,'y': y2+D},
{'x': x1-D+L,'y': y2+D},
]
kicad_mod.append(PolygoneLine(polygone=pin))
kicad_mod.append(PolygoneLine(polygone=pin, layer='F.Fab', width=configuration['fab_line_width']))
######################### Text Fields ###############################
addTextFields(kicad_mod=kicad_mod, configuration=configuration, body_edges=body_edge,
courtyard={'top':cy1, 'bottom':cy2}, fp_name=footprint_name, text_y_inside_position='bottom')
##################### Output and 3d model ############################
model3d_path_prefix = configuration.get('3d_model_prefix','${KISYS3DMOD}/')
lib_name = configuration['lib_name_format_string'].format(series=series, man=manufacturer)
model_name = '{model3d_path_prefix:s}{lib_name:s}.3dshapes/{fp_name:s}.wrl'.format(
model3d_path_prefix=model3d_path_prefix, lib_name=lib_name, fp_name=footprint_name)
kicad_mod.append(Model(filename=model_name))
output_dir = '{lib_name:s}.pretty/'.format(lib_name=lib_name)
if not os.path.isdir(output_dir): #returns false if path does not yet exist!! (Does not check path validity)
os.makedirs(output_dir)
filename = '{outdir:s}{fp_name:s}.kicad_mod'.format(outdir=output_dir, fp_name=footprint_name)
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='use confing .yaml files to create footprints.')
parser.add_argument('--global_config', type=str, nargs='?', help='the config file defining how the footprint will look like. (KLC)', default='../../tools/global_config_files/config_KLCv3.0.yaml')
parser.add_argument('--series_config', type=str, nargs='?', help='the config file defining series parameters.', default='../conn_config_KLCv3.yaml')
parser.add_argument('--kicad4_compatible', action='store_true', help='Create footprints kicad 4 compatible')
args = parser.parse_args()
with open(args.global_config, 'r') as config_stream:
try:
configuration = yaml.load(config_stream)
except yaml.YAMLError as exc:
print(exc)
with open(args.series_config, 'r') as config_stream:
try:
configuration.update(yaml.load(config_stream))
except yaml.YAMLError as exc:
print(exc)
configuration['kicad4_compatible'] = args.kicad4_compatible
for pincount in range(2, 16):
generate_one_footprint(pincount, configuration)
| SchrodingersGat/kicad-footprint-generator | scripts/Connector/Connector_JST/conn_jst_eh_tht_top.py | Python | gpl-3.0 | 7,412 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: timer.py
# Author: Yuxin Wu <[email protected]>
from contextlib import contextmanager
import time
from collections import defaultdict
import six
import atexit
from .stat import StatCounter
from . import logger
__all__ = ['total_timer', 'timed_operation',
'print_total_timer', 'IterSpeedCounter']
class IterSpeedCounter(object):
""" To count how often some code gets reached"""
def __init__(self, print_every, name=None):
self.cnt = 0
self.print_every = int(print_every)
self.name = name if name else 'IterSpeed'
def reset(self):
self.start = time.time()
def __call__(self):
if self.cnt == 0:
self.reset()
self.cnt += 1
if self.cnt % self.print_every != 0:
return
t = time.time() - self.start
logger.info("{}: {:.2f} sec, {} times, {:.3g} sec/time".format(
self.name, t, self.cnt, t / self.cnt))
@contextmanager
def timed_operation(msg, log_start=False):
if log_start:
logger.info('Start {} ...'.format(msg))
start = time.time()
yield
logger.info('{} finished, time={:.2f}sec.'.format(
msg, time.time() - start))
_TOTAL_TIMER_DATA = defaultdict(StatCounter)
@contextmanager
def total_timer(msg):
start = time.time()
yield
t = time.time() - start
_TOTAL_TIMER_DATA[msg].feed(t)
def print_total_timer():
if len(_TOTAL_TIMER_DATA) == 0:
return
for k, v in six.iteritems(_TOTAL_TIMER_DATA):
logger.info("Total Time: {} -> {:.2f} sec, {} times, {:.3g} sec/time".format(
k, v.sum, v.count, v.average))
atexit.register(print_total_timer)
| czhu95/ternarynet | tensorpack/utils/timer.py | Python | apache-2.0 | 1,714 |
"""
Support for the Microsoft Cognitive Services text-to-speech service.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/tts.microsoft/
"""
from http.client import HTTPException
import logging
import voluptuous as vol
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
from homeassistant.const import CONF_API_KEY, CONF_TYPE
import homeassistant.helpers.config_validation as cv
CONF_GENDER = 'gender'
CONF_OUTPUT = 'output'
CONF_RATE = 'rate'
CONF_VOLUME = 'volume'
CONF_PITCH = 'pitch'
CONF_CONTOUR = 'contour'
REQUIREMENTS = ["pycsspeechtts==1.0.2"]
_LOGGER = logging.getLogger(__name__)
SUPPORTED_LANGUAGES = [
'ar-eg', 'ar-sa', 'ca-es', 'cs-cz', 'da-dk', 'de-at', 'de-ch', 'de-de',
'el-gr', 'en-au', 'en-ca', 'en-gb', 'en-ie', 'en-in', 'en-us', 'es-es',
'es-mx', 'fi-fi', 'fr-ca', 'fr-ch', 'fr-fr', 'he-il', 'hi-in', 'hu-hu',
'id-id', 'it-it', 'ja-jp', 'ko-kr', 'nb-no', 'nl-nl', 'pl-pl', 'pt-br',
'pt-pt', 'ro-ro', 'ru-ru', 'sk-sk', 'sv-se', 'th-th', 'tr-tr', 'zh-cn',
'zh-hk', 'zh-tw',
]
GENDERS = [
'Female', 'Male',
]
DEFAULT_LANG = 'en-us'
DEFAULT_GENDER = 'Female'
DEFAULT_TYPE = 'ZiraRUS'
DEFAULT_OUTPUT = 'audio-16khz-128kbitrate-mono-mp3'
DEFAULT_RATE = 0
DEFAULT_VOLUME = 0
DEFAULT_PITCH = "default"
DEFAULT_CONTOUR = ""
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORTED_LANGUAGES),
vol.Optional(CONF_GENDER, default=DEFAULT_GENDER): vol.In(GENDERS),
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE): cv.string,
vol.Optional(CONF_RATE, default=DEFAULT_RATE):
vol.All(vol.Coerce(int), vol.Range(-100, 100)),
vol.Optional(CONF_VOLUME, default=DEFAULT_VOLUME):
vol.All(vol.Coerce(int), vol.Range(-100, 100)),
vol.Optional(CONF_PITCH, default=DEFAULT_PITCH): cv.string,
vol.Optional(CONF_CONTOUR, default=DEFAULT_CONTOUR): cv.string,
})
def get_engine(hass, config):
"""Set up Microsoft speech component."""
return MicrosoftProvider(config[CONF_API_KEY], config[CONF_LANG],
config[CONF_GENDER], config[CONF_TYPE],
config[CONF_RATE], config[CONF_VOLUME],
config[CONF_PITCH], config[CONF_CONTOUR])
class MicrosoftProvider(Provider):
"""The Microsoft speech API provider."""
def __init__(self, apikey, lang, gender, ttype, rate, volume,
pitch, contour):
"""Init Microsoft TTS service."""
self._apikey = apikey
self._lang = lang
self._gender = gender
self._type = ttype
self._output = DEFAULT_OUTPUT
self._rate = "{}%".format(rate)
self._volume = "{}%".format(volume)
self._pitch = pitch
self._contour = contour
self.name = 'Microsoft'
@property
def default_language(self):
"""Return the default language."""
return self._lang
@property
def supported_languages(self):
"""Return list of supported languages."""
return SUPPORTED_LANGUAGES
def get_tts_audio(self, message, language, options=None):
"""Load TTS from Microsoft."""
if language is None:
language = self._lang
from pycsspeechtts import pycsspeechtts
try:
trans = pycsspeechtts.TTSTranslator(self._apikey)
data = trans.speak(language=language, gender=self._gender,
voiceType=self._type, output=self._output,
rate=self._rate, volume=self._volume,
pitch=self._pitch, contour=self._contour,
text=message)
except HTTPException as ex:
_LOGGER.error("Error occurred for Microsoft TTS: %s", ex)
return(None, None)
return ("mp3", data)
| jamespcole/home-assistant | homeassistant/components/microsoft/tts.py | Python | apache-2.0 | 3,963 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import tabs
from openstack_dashboard.contrib.sahara.api import sahara as saharaclient
LOG = logging.getLogger(__name__)
class GeneralTab(tabs.Tab):
name = _("General Info")
slug = "job_details_tab"
template_name = ("project/data_processing.jobs/_details.html")
def get_context_data(self, request):
job_id = self.tab_group.kwargs['job_id']
try:
job = saharaclient.job_get(request, job_id)
except Exception as e:
job = {}
LOG.error("Unable to fetch job template details: %s" % str(e))
return {"job": job}
class JobDetailsTabs(tabs.TabGroup):
slug = "job_details"
tabs = (GeneralTab,)
sticky = True
| FNST-OpenStack/horizon | openstack_dashboard/contrib/sahara/content/data_processing/jobs/tabs.py | Python | apache-2.0 | 1,331 |
import collections
import curses
from visidata import vd, VisiData, BaseSheet, Sheet, ColumnItem, Column, RowColorizer, options, colors, wrmap, clipdraw, ExpectedException, update_attr, MissingAttrFormatter
vd.option('disp_rstatus_fmt', ' {sheet.longname} {sheet.nRows:9d} {sheet.rowtype} {sheet.modifiedStatus} {sheet.options.disp_selected_note}{sheet.nSelectedRows}', 'right-side status format string')
vd.option('disp_status_fmt', '{sheet.shortcut}› {sheet.name}| ', 'status line prefix')
vd.option('disp_lstatus_max', 0, 'maximum length of left status line')
vd.option('disp_status_sep', ' | ', 'separator between statuses')
vd.option('color_keystrokes', 'bold 233 black on 110 cyan', 'color of input keystrokes on status line')
vd.option('color_status', 'bold black on 110 cyan', 'status line color')
vd.option('color_error', 'red', 'error message color')
vd.option('color_warning', 'yellow', 'warning message color')
vd.option('color_top_status', 'underline', 'top window status bar color')
vd.option('color_active_status', 'black on 110 cyan', ' active window status bar color')
vd.option('color_inactive_status', '8 on black', 'inactive window status bar color')
BaseSheet.init('longname', lambda: '')
vd.beforeExecHooks.append(lambda sheet, cmd, args, ks: setattr(sheet, 'longname', cmd.longname))
@BaseSheet.property
def modifiedStatus(sheet):
return ' [M]' if sheet.hasBeenModified else ''
@VisiData.lazy_property
def statuses(vd):
return collections.OrderedDict() # (priority, statusmsg) -> num_repeats; shown until next action
@VisiData.lazy_property
def statusHistory(vd):
return list() # list of [priority, statusmsg, repeats] for all status messages ever
@VisiData.api
def status(vd, *args, priority=0):
'Display *args* on status until next action.'
if not args:
return True
k = (priority, tuple(map(str, args)))
vd.statuses[k] = vd.statuses.get(k, 0) + 1
return vd.addToStatusHistory(*args, priority=priority)
@VisiData.api
def addToStatusHistory(vd, *args, priority=0):
if vd.statusHistory:
prevpri, prevargs, prevn = vd.statusHistory[-1]
if prevpri == priority and prevargs == args:
vd.statusHistory[-1][2] += 1
return True
vd.statusHistory.append([priority, args, 1])
return True
@VisiData.api
def error(vd, *args):
'Abort with ExpectedException, and display *args* on status as an error.'
vd.status(*args, priority=3)
raise ExpectedException(args[0] if args else '')
@VisiData.api
def fail(vd, *args):
'Abort with ExpectedException, and display *args* on status as a warning.'
vd.status(*args, priority=2)
raise ExpectedException(args[0] if args else '')
@VisiData.api
def warning(vd, *args):
'Display *args* on status as a warning.'
vd.status(*args, priority=1)
@VisiData.api
def debug(vd, *args, **kwargs):
'Display *args* on status if options.debug is set.'
if options.debug:
return vd.status(*args, **kwargs)
def middleTruncate(s, w):
if len(s) <= w:
return s
return s[:w] + options.disp_truncator + s[-w:]
def composeStatus(msgparts, n):
msg = '; '.join(wrmap(str, msgparts))
if n > 1:
msg = '[%sx] %s' % (n, msg)
return msg
@BaseSheet.api
def leftStatus(sheet):
'Return left side of status bar for this sheet. Overridable.'
return options.disp_status_fmt.format(sheet=sheet, vd=vd)
@VisiData.api
def drawLeftStatus(vd, scr, vs):
'Draw left side of status bar.'
cattr = colors.get_color('color_status')
active = (vs is vd.activeSheet)
if active:
cattr = update_attr(cattr, colors.color_active_status, 1)
else:
cattr = update_attr(cattr, colors.color_inactive_status, 1)
if scr is vd.winTop:
cattr = update_attr(cattr, colors.color_top_status, 1)
attr = cattr.attr
error_attr = update_attr(cattr, colors.color_error, 1).attr
warn_attr = update_attr(cattr, colors.color_warning, 2).attr
sep = options.disp_status_sep
x = 0
y = vs.windowHeight-1 # status for each window
try:
lstatus = vs.leftStatus()
maxwidth = options.disp_lstatus_max
if maxwidth > 0:
lstatus = middleTruncate(lstatus, maxwidth//2)
x = clipdraw(scr, y, 0, lstatus, attr, w=vs.windowWidth-1)
vd.onMouse(scr, y, 0, 1, x,
BUTTON1_PRESSED='sheets',
BUTTON3_PRESSED='rename-sheet',
BUTTON3_CLICKED='rename-sheet')
except Exception as e:
vd.exceptionCaught(e)
if not active:
return
one = False
for (pri, msgparts), n in sorted(vd.statuses.items(), key=lambda k: -k[0][0]):
try:
if x > vs.windowWidth:
break
if one: # any messages already:
x += clipdraw(scr, y, x, sep, attr, w=vs.windowWidth-x)
one = True
msg = composeStatus(msgparts, n)
if pri == 3: msgattr = error_attr
elif pri == 2: msgattr = warn_attr
elif pri == 1: msgattr = warn_attr
else: msgattr = attr
x += clipdraw(scr, y, x, msg, msgattr, w=vs.windowWidth-x)
except Exception as e:
vd.exceptionCaught(e)
@VisiData.api
def rightStatus(vd, sheet):
'Return right side of status bar. Overrideable.'
return MissingAttrFormatter().format(sheet.options.disp_rstatus_fmt, sheet=sheet, vd=vd)
@VisiData.api
def drawRightStatus(vd, scr, vs):
'Draw right side of status bar. Return length displayed.'
rightx = vs.windowWidth
ret = 0
statcolors = [
(vd.rightStatus(vs), 'color_status'),
]
active = vs is vd.activeSheet
if active:
statcolors.append((f'{vd.prettykeys(vd.keystrokes)} ' or '', 'color_keystrokes'))
if vs.currentThreads:
statcolors.insert(0, vd.checkMemoryUsage())
gerunds = [p.gerund for p in vs.progresses if p.gerund] or ['processing']
statcolors.insert(1, (' %s %s…' % (vs.progressPct, gerunds[0]), 'color_working'))
if active and vd.currentReplay:
statcolors.insert(0, (vd.replayStatus, 'color_status_replay'))
for rstatcolor in statcolors:
if rstatcolor:
try:
rstatus, coloropt = rstatcolor
rstatus = ' '+rstatus
cattr = colors.get_color(coloropt)
if scr is vd.winTop:
cattr = update_attr(cattr, colors.color_top_status, 0)
if active:
cattr = update_attr(cattr, colors.color_active_status, 1)
else:
cattr = update_attr(cattr, colors.color_inactive_status, 1)
statuslen = clipdraw(scr, vs.windowHeight-1, rightx, rstatus, cattr.attr, w=vs.windowWidth-1, rtl=True)
rightx -= statuslen
ret += statuslen
except Exception as e:
vd.exceptionCaught(e)
if scr:
curses.doupdate()
return ret
class StatusSheet(Sheet):
precious = False
rowtype = 'statuses' # rowdef: (priority, args, nrepeats)
columns = [
ColumnItem('priority', 0, type=int, width=0),
ColumnItem('nrepeats', 2, type=int, width=0),
ColumnItem('args', 1, width=0),
Column('message', getter=lambda col,row: composeStatus(row[1], row[2])),
]
colorizers = [
RowColorizer(1, 'color_error', lambda s,c,r,v: r and r[0] == 3),
RowColorizer(1, 'color_warning', lambda s,c,r,v: r and r[0] in [1,2]),
]
def reload(self):
self.rows = self.source
@VisiData.property
def statusHistorySheet(vd):
return StatusSheet("status_history", source=vd.statusHistory[::-1]) # in reverse order
BaseSheet.addCommand('^P', 'open-statuses', 'vd.push(vd.statusHistorySheet)', 'open Status History')
| saulpw/visidata | visidata/statusbar.py | Python | gpl-3.0 | 7,885 |
TEXT_ALIGN_LEFT = 0
TEXT_ALIGN_CENTER = 1
TEXT_ALIGN_RIGHT = 2
def drawtextcentered(surface, position, font, text="", aa=1, color=(255, 255, 255), alignment=(1, 1)):
surf = font.render(text, aa, color)
rect = surf.get_rect()
newpos = [0, 0]
for i in range(2):
if alignment[i] == TEXT_ALIGN_LEFT:
newpos[i] = position[i]
elif alignment[i] == TEXT_ALIGN_CENTER:
newpos[i] = position[i] - rect.center[i]
elif alignment[i] == TEXT_ALIGN_RIGHT:
newpos[i] = position[i] - rect.bottomright[i]
# print newpos
surface.blit(surf, newpos)
def loadhighscores(filename="highscores/hiscores.csv"):
try:
lfile = open(filename)
except IOError:
return {}
returndict = {}
for line in lfile:
sline = line.strip()
if sline:
name, score = sline.split(";")
returndict[int(score)] = name
lfile.close()
return returndict
def savehighscores(scores, filename="highscores/hiscores.csv"):
lfile = open(filename, "w")
for i in list(scores.items()):
lfile.write(i[1] + ";" + str(i[0]) + "\n")
lfile.close()
| mousetail/WOAIII-mousetail | textutil.py | Python | mit | 1,167 |
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.battle.MovieCamera
from panda3d.direct import ShowInterval, WaitInterval
from panda3d.core import Camera, Point3, lookAt
from direct.interval.IntervalGlobal import *
from BattleBase import *
from BattleProps import *
from toontown.toonbase.ToontownBattleGlobals import *
from SuitBattleGlobals import *
from direct.directnotify import DirectNotifyGlobal
import random
import MovieUtil
notify = DirectNotifyGlobal.directNotify.newCategory('MovieCamera')
def chooseHealShot(heals, attackDuration):
isUber = 0
for heal in heals:
if heal['level'] == 6 and not heal.get('petId'):
isUber = 1
if isUber:
openShot = chooseHealOpenShot(heals, attackDuration, isUber)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseHealCloseShot(heals, openDuration, openName, attackDuration * 3, isUber)
track = Sequence(closeShot)
else:
openShot = chooseHealOpenShot(heals, attackDuration, isUber)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseHealCloseShot(heals, openDuration, openName, attackDuration, isUber)
track = Sequence(openShot, closeShot)
return track
def chooseHealOpenShot(heals, attackDuration, isUber = 0):
numHeals = len(heals)
av = None
duration = 2.8
if isUber:
duration = 5.0
shotChoices = [toonGroupShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseHealMidShot(heals, attackDuration, isUber = 0):
numHeals = len(heals)
av = None
duration = 2.1
if isUber:
duration = 2.1
shotChoices = [toonGroupHighShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseHealCloseShot(heals, openDuration, openName, attackDuration, isUber = 0):
av = None
duration = attackDuration - openDuration
shotChoices = [toonGroupShot]
if isUber:
shotChoices = [allGroupLowShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseTrapShot(traps, attackDuration, enterDuration = 0, exitDuration = 0):
enterShot = chooseNPCEnterShot(traps, enterDuration)
openShot = chooseTrapOpenShot(traps, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseTrapCloseShot(traps, openDuration, openName, attackDuration)
exitShot = chooseNPCExitShot(traps, exitDuration)
track = Sequence(enterShot, openShot, closeShot, exitShot)
return track
def chooseTrapOpenShot(traps, attackDuration):
numTraps = len(traps)
av = None
duration = 3.0
shotChoices = [allGroupLowShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseTrapCloseShot(traps, openDuration, openName, attackDuration):
av = None
duration = attackDuration - openDuration
shotChoices = [allGroupLowShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseLureShot(lures, attackDuration, enterDuration = 0.0, exitDuration = 0.0):
enterShot = chooseNPCEnterShot(lures, enterDuration)
openShot = chooseLureOpenShot(lures, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseLureCloseShot(lures, openDuration, openName, attackDuration)
exitShot = chooseNPCExitShot(lures, exitDuration)
track = Sequence(enterShot, openShot, closeShot, exitShot)
return track
def chooseLureOpenShot(lures, attackDuration):
numLures = len(lures)
av = None
duration = 3.0
shotChoices = [allGroupLowShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseLureCloseShot(lures, openDuration, openName, attackDuration):
av = None
duration = attackDuration - openDuration
hasTrainTrackTrap = False
battle = lures[0]['battle']
for suit in battle.suits:
if hasattr(suit, 'battleTrap') and suit.battleTrap == UBER_GAG_LEVEL_INDEX:
hasTrainTrackTrap = True
if hasTrainTrackTrap:
shotChoices = [avatarLureTrainTrackShot]
av = lures[0]['toon']
else:
shotChoices = [allGroupLowShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def avatarLureTrainTrackShot(avatar, duration):
return heldRelativeShot(avatar, 0, -7.5, 1 + avatar.getHeight(), 0, 0, 0, duration, 'avatarLureTrainTrackShot')
def chooseSoundShot(sounds, targets, attackDuration, enterDuration = 0.0, exitDuration = 0.0):
enterShot = chooseNPCEnterShot(sounds, enterDuration)
openShot = chooseSoundOpenShot(sounds, targets, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseSoundCloseShot(sounds, targets, openDuration, openName, attackDuration)
exitShot = chooseNPCExitShot(sounds, exitDuration)
track = Sequence(enterShot, openShot, closeShot, exitShot)
return track
def chooseSoundOpenShot(sounds, targets, attackDuration):
duration = 3.1
isUber = 0
for sound in sounds:
if sound['level'] == 6:
isUber = 1
duration = 5.0
numSounds = len(sounds)
av = None
if numSounds == 1:
av = sounds[0]['toon']
if isUber:
shotChoices = [avatarCloseUpThreeQuarterRightShotWide, allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
shotChoices = [avatarCloseUpThreeQuarterRightShot, allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
elif numSounds >= 2 and numSounds <= 4:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of sounds: %s' % numSounds)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseSoundCloseShot(sounds, targets, openDuration, openName, attackDuration):
numSuits = len(targets)
av = None
duration = attackDuration - openDuration
if numSuits == 1:
av = targets[0]['suit']
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterLeftShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numSuits >= 2 and numSuits <= 4:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of suits: %s' % numSuits)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseThrowShot(throws, suitThrowsDict, attackDuration):
openShot = chooseThrowOpenShot(throws, suitThrowsDict, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseThrowCloseShot(throws, suitThrowsDict, openDuration, openName, attackDuration)
track = Sequence(openShot, closeShot)
return track
def chooseThrowOpenShot(throws, suitThrowsDict, attackDuration):
numThrows = len(throws)
av = None
duration = 3.0
if numThrows == 1:
av = throws[0]['toon']
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterRightShot,
avatarBehindShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numThrows >= 2 and numThrows <= 4:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of throws: %s' % numThrows)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseThrowCloseShot(throws, suitThrowsDict, openDuration, openName, attackDuration):
numSuits = len(suitThrowsDict)
av = None
duration = attackDuration - openDuration
if numSuits == 1:
av = base.cr.doId2do[suitThrowsDict.keys()[0]]
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterLeftShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numSuits >= 2 and numSuits <= 4 or numSuits == 0:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of suits: %s' % numSuits)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseSquirtShot(squirts, suitSquirtsDict, attackDuration):
openShot = chooseSquirtOpenShot(squirts, suitSquirtsDict, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseSquirtCloseShot(squirts, suitSquirtsDict, openDuration, openName, attackDuration)
track = Sequence(openShot, closeShot)
return track
def chooseSquirtOpenShot(squirts, suitSquirtsDict, attackDuration):
numSquirts = len(squirts)
av = None
duration = 3.0
if numSquirts == 1:
av = squirts[0]['toon']
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterRightShot,
avatarBehindShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numSquirts >= 2 and numSquirts <= 4:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of squirts: %s' % numSquirts)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseSquirtCloseShot(squirts, suitSquirtsDict, openDuration, openName, attackDuration):
numSuits = len(suitSquirtsDict)
av = None
duration = attackDuration - openDuration
if numSuits == 1:
av = base.cr.doId2do[suitSquirtsDict.keys()[0]]
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterLeftShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numSuits >= 2 and numSuits <= 4:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of suits: %s' % numSuits)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseDropShot(drops, suitDropsDict, attackDuration, enterDuration = 0.0, exitDuration = 0.0):
enterShot = chooseNPCEnterShot(drops, enterDuration)
openShot = chooseDropOpenShot(drops, suitDropsDict, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseDropCloseShot(drops, suitDropsDict, openDuration, openName, attackDuration)
exitShot = chooseNPCExitShot(drops, exitDuration)
track = Sequence(enterShot, openShot, closeShot, exitShot)
return track
def chooseDropOpenShot(drops, suitDropsDict, attackDuration):
numDrops = len(drops)
av = None
duration = 3.0
if numDrops == 1:
av = drops[0]['toon']
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterRightShot,
avatarBehindShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numDrops >= 2 and numDrops <= 4 or numDrops == 0:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of drops: %s' % numDrops)
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseDropCloseShot(drops, suitDropsDict, openDuration, openName, attackDuration):
numSuits = len(suitDropsDict)
av = None
duration = attackDuration - openDuration
if numSuits == 1:
av = base.cr.doId2do[suitDropsDict.keys()[0]]
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterLeftShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numSuits >= 2 and numSuits <= 4 or numSuits == 0:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of suits: %s' % numSuits)
choice = random.choice(shotChoices)
track = choice(av, duration)
return track
def chooseNPCEnterShot(enters, entersDuration):
av = None
duration = entersDuration
shotChoices = [toonGroupShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseNPCExitShot(exits, exitsDuration):
av = None
duration = exitsDuration
shotChoices = [toonGroupShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseSuitShot(attack, attackDuration):
suit = attack['suit']
name = attack['id']
camTrack = Sequence()
def defaultCamera(attack = attack, attackDuration = attackDuration, openShotDuration = 3.5):
if 'target' not in attack:
return randomActorShot(attack['suit'], attack['battle'], openShotDuration, 'suit')
else:
target = attack['target']
if attack['group'] == ATK_TGT_GROUP:
return randomGroupAttackCam(attack['suit'], target, attack['battle'], attackDuration, openShotDuration)
return randomAttackCam(attack['suit'], target['toon'], attack['battle'], attackDuration, openShotDuration, 'suit')
if name == AUDIT:
camTrack.append(defaultCamera())
elif name == BITE:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == BOUNCE_CHECK:
camTrack.append(defaultCamera())
elif name == BRAIN_STORM:
camTrack.append(defaultCamera(openShotDuration=2.4))
elif name == BUZZ_WORD:
camTrack.append(defaultCamera(openShotDuration=4.7))
elif name == CALCULATE:
camTrack.append(defaultCamera())
elif name == CANNED:
camTrack.append(defaultCamera(openShotDuration=2.9))
elif name == CHOMP:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == CIGAR_SMOKE:
camTrack.append(defaultCamera(openShotDuration=4.0))
elif name == CLIPON_TIE:
camTrack.append(defaultCamera(openShotDuration=3.3))
elif name == CRUNCH:
camTrack.append(defaultCamera(openShotDuration=3.4))
elif name == DEMOTION:
camTrack.append(defaultCamera(openShotDuration=1.7))
elif name == DOUBLE_TALK:
camTrack.append(defaultCamera(openShotDuration=3.9))
elif name == EVICTION_NOTICE:
camTrack.append(defaultCamera(openShotDuration=3.2))
elif name == EVIL_EYE:
camTrack.append(defaultCamera(openShotDuration=2.7))
elif name == FILIBUSTER:
camTrack.append(defaultCamera(openShotDuration=2.7))
elif name == FILL_WITH_LEAD:
camTrack.append(defaultCamera(openShotDuration=3.2))
elif name == FINGER_WAG:
camTrack.append(defaultCamera(openShotDuration=2.3))
elif name == FIRED:
camTrack.append(defaultCamera(openShotDuration=1.7))
elif name == FOUNTAIN_PEN:
camTrack.append(defaultCamera(openShotDuration=2.6))
elif name == FREEZE_ASSETS:
camTrack.append(defaultCamera(openShotDuration=2.5))
elif name == HALF_WINDSOR:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == HEAD_SHRINK:
camTrack.append(defaultCamera(openShotDuration=1.3))
elif name == GLOWER_POWER:
camTrack.append(defaultCamera(openShotDuration=1.4))
elif name == GUILT_TRIP:
camTrack.append(defaultCamera(openShotDuration=0.9))
elif name == HANG_UP:
camTrack.append(defaultCamera(openShotDuration=5.1))
elif name == HOT_AIR:
camTrack.append(defaultCamera(openShotDuration=2.5))
elif name == JARGON:
camTrack.append(defaultCamera())
elif name == LEGALESE:
camTrack.append(defaultCamera(openShotDuration=1.5))
elif name == LIQUIDATE:
camTrack.append(defaultCamera(openShotDuration=2.5))
elif name == MARKET_CRASH:
camTrack.append(defaultCamera(openShotDuration=2.9))
elif name == MUMBO_JUMBO:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == PARADIGM_SHIFT:
camTrack.append(defaultCamera(openShotDuration=1.6))
elif name == PECKING_ORDER:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == PLAY_HARDBALL:
camTrack.append(defaultCamera(openShotDuration=2.3))
elif name == PICK_POCKET:
camTrack.append(allGroupLowShot(suit, 2.7))
elif name == PINK_SLIP:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == POUND_KEY:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == POWER_TIE:
camTrack.append(defaultCamera(openShotDuration=2.4))
elif name == POWER_TRIP:
camTrack.append(defaultCamera(openShotDuration=1.1))
elif name == QUAKE:
shakeIntensity = 5.15
quake = 1
camTrack.append(suitCameraShakeShot(suit, attackDuration, shakeIntensity, quake))
elif name == RAZZLE_DAZZLE:
camTrack.append(defaultCamera(openShotDuration=2.2))
elif name == RED_TAPE:
camTrack.append(defaultCamera(openShotDuration=3.5))
elif name == RE_ORG:
camTrack.append(defaultCamera(openShotDuration=1.1))
elif name == RESTRAINING_ORDER:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == ROLODEX:
camTrack.append(defaultCamera())
elif name == RUBBER_STAMP:
camTrack.append(defaultCamera(openShotDuration=3.2))
elif name == RUB_OUT:
camTrack.append(defaultCamera(openShotDuration=2.2))
elif name == SACKED:
camTrack.append(defaultCamera(openShotDuration=2.9))
elif name == SCHMOOZE:
camTrack.append(defaultCamera(openShotDuration=2.8))
elif name == SHAKE:
shakeIntensity = 1.75
camTrack.append(suitCameraShakeShot(suit, attackDuration, shakeIntensity))
elif name == SHRED:
camTrack.append(defaultCamera(openShotDuration=4.1))
elif name == SPIN:
camTrack.append(defaultCamera(openShotDuration=1.7))
elif name == SYNERGY:
camTrack.append(defaultCamera(openShotDuration=1.7))
elif name == TABULATE:
camTrack.append(defaultCamera())
elif name == TEE_OFF:
camTrack.append(defaultCamera(openShotDuration=4.5))
elif name == TREMOR:
shakeIntensity = 0.25
camTrack.append(suitCameraShakeShot(suit, attackDuration, shakeIntensity))
elif name == WATERCOOLER:
camTrack.append(defaultCamera())
elif name == WITHDRAWAL:
camTrack.append(defaultCamera(openShotDuration=1.2))
elif name == WRITE_OFF:
camTrack.append(defaultCamera())
elif name == THROW_BOOK:
camTrack.append(defaultCamera(openShotDuration=2.9))
elif name in (COGS_HIT, POWER_UP):
camTrack.append(defaultCamera(openShotDuration=3.8))
elif name in (TOONS_MISS, POWER_DOWN):
camTrack.append(defaultCamera(openShotDuration=9.1))
elif name == COG_UP:
suits = attack['battle'].activeSuits
if len(suits) == 1:
camTrack.append(defaultCamera(openShotDuration=3.8))
else:
camTrack.append(suitGroupShot(None, 5.5 + 3.5 * (len(suits) - 1)))
elif name == SOUND_IMMUNITY:
camTrack.append(defaultCamera(openShotDuration=5.9))
elif name == KEY_LOG:
camTrack.append(defaultCamera(openShotDuration=3.8))
elif name == DEPRECATE:
camTrack.append(randomActorShot(suit, attack['battle'], 3.3, 'suit'))
camTrack.append(allGroupShot(None, 3.2))
elif name == DISASSEMBLE:
camTrack.append(randomActorShot(suit, attack['battle'], 3.3, 'suit'))
camTrack.append(allGroupShot(None, 6))
elif name == BASH:
camTrack.append(randomActorShot(suit, attack['battle'], 2.5, 'suit'))
camTrack.append(allGroupShot(None, 3.5))
elif name == DATA_CORRUPTION:
camTrack.append(randomActorShot(suit, attack['battle'], 2.5, 'suit'))
camTrack.append(allGroupShot(None, 3.3))
elif name == ELECTROSTATIC_ENERGY:
camTrack.append(randomActorShot(suit, attack['battle'], 2.8, 'suit'))
camTrack.append(allGroupShot(None, 4.4))
elif name == CLOUD_STORAGE:
camTrack.append(randomActorShot(suit, attack['battle'], 2.8, 'suit'))
camTrack.append(allGroupShot(None, 8.4))
elif name == DISK_SCRATCH:
camTrack.append(randomActorShot(suit, attack['battle'], 2.65, 'suit'))
camTrack.append(allGroupShot(None, 1.75))
elif name == REPROGRAM:
camTrack.append(randomActorShot(suit, attack['battle'], 1, 'suit'))
camTrack.append(allGroupShot(None, 4))
elif name == VOODOO_MAGIC:
toon = attack['target']['toon']
camTrack.append(avatarCloseUpShot(toon, 6.5))
elif name == SONG_AND_DANCE:
camTrack.append(defaultCamera(openShotDuration=4.1))
else:
notify.warning('unknown attack id in chooseSuitShot: %d using default cam' % name)
camTrack.append(defaultCamera())
pbpText = attack['playByPlayText']
displayName = TTLocalizer.SuitAttackNames[attack['name']]
pbpTrack = pbpText.getShowInterval(displayName, 3.5 if name not in (TOONS_MISS, POWER_DOWN) else 7.5)
return Parallel(camTrack, pbpTrack)
def makeShot(x, y, z, h, p, r, duration, other = None, name = 'makeShot'):
if other:
return heldRelativeShot(other, x, y, z, h, p, r, duration, name)
else:
return heldShot(x, y, z, h, p, r, duration, name)
def focusShot(x, y, z, duration, target, other = None, splitFocusPoint = None, name = 'focusShot'):
track = Sequence()
if other:
track.append(Func(camera.setPos, other, Point3(x, y, z)))
else:
track.append(Func(camera.setPos, Point3(x, y, z)))
if splitFocusPoint:
track.append(Func(focusCameraBetweenPoints, target, splitFocusPoint))
else:
track.append(Func(camera.lookAt, target))
track.append(Wait(duration))
return track
def moveShot(x, y, z, h, p, r, duration, other = None, name = 'moveShot'):
return motionShot(x, y, z, h, p, r, duration, other, name)
def focusMoveShot(x, y, z, duration, target, other = None, name = 'focusMoveShot'):
camera.setPos(Point3(x, y, z))
camera.lookAt(target)
hpr = camera.getHpr()
return motionShot(x, y, z, hpr[0], hpr[1], hpr[2], duration, other, name)
def chooseSOSShot(av, duration):
shotChoices = [avatarCloseUpThreeQuarterRightShot,
avatarBehindShot,
avatarBehindHighShot,
suitGroupThreeQuarterLeftBehindShot]
track = apply(random.choice(shotChoices), [av, duration])
return track
def chooseRewardShot(av, duration, allowGroupShot = 1):
def chooseRewardShotNow(av):
if av.playingAnim == 'victory' or not allowGroupShot:
shotChoices = [(0,
8,
av.getHeight() * 0.66,
179,
15,
0), (5.2,
5.45,
av.getHeight() * 0.66,
131.5,
3.6,
0)]
shot = random.choice(shotChoices)
camera.setPosHpr(av, *shot)
else:
camera.setPosHpr(10, 0, 10, 115, -30, 0)
return Sequence(Func(chooseRewardShotNow, av), Wait(duration))
def heldShot(x, y, z, h, p, r, duration, name = 'heldShot'):
track = Sequence(name=name)
track.append(Func(camera.setPosHpr, x, y, z, h, p, r))
track.append(Wait(duration))
return track
def heldRelativeShot(other, x, y, z, h, p, r, duration, name = 'heldRelativeShot'):
track = Sequence(name=name)
track.append(Func(camera.setPosHpr, other, x, y, z, h, p, r))
track.append(Wait(duration))
return track
def motionShot(x, y, z, h, p, r, duration, other = None, name = 'motionShot'):
if other:
posTrack = LerpPosInterval(camera, duration, pos=Point3(x, y, z), other=other)
hprTrack = LerpHprInterval(camera, duration, hpr=Point3(h, p, r), other=other)
else:
posTrack = LerpPosInterval(camera, duration, pos=Point3(x, y, z))
hprTrack = LerpHprInterval(camera, duration, hpr=Point3(h, p, r))
return Parallel(posTrack, hprTrack)
def allGroupShot(avatar, duration):
return heldShot(10, 0, 10, 89, -30, 0, duration, 'allGroupShot')
def allGroupLowShot(avatar, duration):
return heldShot(15, 0, 3, 89, 0, 0, duration, 'allGroupLowShot')
def allGroupLowDiagonalShot(avatar, duration):
return heldShot(7, 5, 6, 119, -30, 0, duration, 'allGroupLowShot')
def toonGroupShot(avatar, duration):
return heldShot(10, 0, 10, 115, -30, 0, duration, 'toonGroupShot')
def toonGroupHighShot(avatar, duration):
return heldShot(5, 0, 1, 115, 45, 0, duration, 'toonGroupHighShot')
def suitGroupShot(avatar, duration):
return heldShot(10, 0, 10, 65, -30, 0, duration, 'suitGroupShot')
def suitGroupLowLeftShot(avatar, duration):
return heldShot(8.4, -3.85, 2.75, 36.3, 3.25, 0, duration, 'suitGroupLowLeftShot')
def suitGroupThreeQuarterLeftBehindShot(avatar, duration):
if random.random() > 0.5:
x = 12.37
h = 134.61
else:
x = -12.37
h = -134.61
return heldShot(x, 11.5, 8.16, h, -22.7, 0, duration, 'suitGroupThreeQuarterLeftBehindShot')
def suitWakeUpShot(avatar, duration):
return heldShot(10, -5, 10, 65, -30, 0, duration, 'suitWakeUpShot')
def suitCameraShakeShot(avatar, duration, shakeIntensity, quake = 0):
track = Sequence(name='suitShakeCameraShot')
if quake == 1:
shakeDelay = 1.1
numShakes = 4
else:
shakeDelay = 0.3
numShakes = 5
postShakeDelay = 0.5
shakeTime = (duration - shakeDelay - postShakeDelay) / numShakes
shakeDuration = shakeTime * (1.0 / numShakes)
shakeWaitInterval = shakeTime * ((numShakes - 1.0) / numShakes)
def shakeCameraTrack(intensity, shakeWaitInterval = shakeWaitInterval, quake = quake, shakeDuration = shakeDuration, numShakes = numShakes):
vertShakeTrack = Sequence(Wait(shakeWaitInterval), Func(camera.setZ, camera.getZ() + intensity / 2), Wait(shakeDuration / 2), Func(camera.setZ, camera.getZ() - intensity), Wait(shakeDuration / 2), Func(camera.setZ, camera.getZ() + intensity / 2))
horizShakeTrack = Sequence(Wait(shakeWaitInterval - shakeDuration / 2), Func(camera.setY, camera.getY() + intensity / 4), Wait(shakeDuration / 2), Func(camera.setY, camera.getY() - intensity / 2), Wait(shakeDuration / 2), Func(camera.setY, camera.getY() + intensity / 4), Wait(shakeDuration / 2), Func(camera.lookAt, Point3(0, 0, 0)))
shakeTrack = Sequence()
for i in xrange(0, numShakes):
if quake == 0:
shakeTrack.append(vertShakeTrack)
else:
shakeTrack.append(Parallel(vertShakeTrack, horizShakeTrack))
return shakeTrack
x = 10 + random.random() * 3
if random.random() > 0.5:
x = -x
z = 7 + random.random() * 3
track.append(Func(camera.setPos, x, -5, z))
track.append(Func(camera.lookAt, Point3(0, 0, 0)))
track.append(Wait(shakeDelay))
track.append(shakeCameraTrack(shakeIntensity))
track.append(Wait(postShakeDelay))
return track
def avatarCloseUpShot(avatar, duration):
return heldRelativeShot(avatar, 0, 8, avatar.getHeight() * 0.66, 179, 15, 0, duration, 'avatarCloseUpShot')
def avatarCloseUpThrowShot(avatar, duration):
return heldRelativeShot(avatar, 3, 8, avatar.getHeight() * 0.66, 159, 3.6, 0, duration, 'avatarCloseUpThrowShot')
def avatarCloseUpThreeQuarterRightShot(avatar, duration):
return heldRelativeShot(avatar, 5.2, 5.45, avatar.getHeight() * 0.66, 131.5, 3.6, 0, duration, 'avatarCloseUpThreeQuarterRightShot')
def avatarCloseUpThreeQuarterRightShotWide(avatar, duration):
return heldRelativeShot(avatar, 7.2, 8.45, avatar.getHeight() * 0.66, 131.5, 3.6, 0, duration, 'avatarCloseUpThreeQuarterRightShot')
def avatarCloseUpThreeQuarterLeftShot(avatar, duration):
return heldRelativeShot(avatar, -5.2, 5.45, avatar.getHeight() * 0.66, -131.5, 3.6, 0, duration, 'avatarCloseUpThreeQuarterLeftShot')
def avatarCloseUpThreeQuarterRightFollowShot(avatar, duration):
track = Sequence(name='avatarCloseUpThreeQuarterRightFollowShot')
track.append(heldRelativeShot(avatar, 5.2, 5.45, avatar.getHeight() * 0.66, 131.5, 3.6, 0, duration * 0.65))
track.append(LerpHprInterval(nodePath=camera, other=avatar, duration=duration * 0.2, hpr=Point3(110, 3.6, 0), blendType='easeInOut'))
track.append(Wait(duration * 0.25))
return track
def avatarCloseUpZoomShot(avatar, duration):
track = Sequence('avatarCloseUpZoomShot')
track.append(LerpPosHprInterval(nodePath=camera, other=avatar, duration=duration / 2, startPos=Point3(0, 10, avatar.getHeight()), startHpr=Point3(179, -10, 0), pos=Point3(0, 6, avatar.getHeight()), hpr=Point3(179, -10, 0), blendType='easeInOut'))
track.append(Wait(duration / 2))
return track
def avatarBehindShot(avatar, duration):
return heldRelativeShot(avatar, 5, -7, avatar.getHeight(), 40, -12, 0, duration, 'avatarBehindShot')
def avatarBehindHighShot(avatar, duration):
return heldRelativeShot(avatar, -4, -7, 5 + avatar.getHeight(), -30, -35, 0, duration, 'avatarBehindHighShot')
def avatarBehindHighRightShot(avatar, duration):
return heldRelativeShot(avatar, 7, -3, 5 + avatar.getHeight(), 45, -30, 0, duration, 'avatarBehindHighShot')
def avatarBehindThreeQuarterRightShot(avatar, duration):
return heldRelativeShot(avatar, 7.67, -8.52, avatar.getHeight() * 0.66, 25, 7.5, 0, duration, 'avatarBehindThreeQuarterRightShot')
def avatarSideFollowAttack(suit, toon, duration, battle):
windupDuration = duration * (0.1 + random.random() * 0.1)
projectDuration = duration * 0.75
impactDuration = duration - windupDuration - projectDuration
suitHeight = suit.getHeight()
toonHeight = toon.getHeight()
suitCentralPoint = suit.getPos(battle)
suitCentralPoint.setZ(suitCentralPoint.getZ() + suitHeight * 0.75)
toonCentralPoint = toon.getPos(battle)
toonCentralPoint.setZ(toonCentralPoint.getZ() + toonHeight * 0.75)
initialX = random.randint(12, 14)
finalX = random.randint(7, 8)
initialY = finalY = random.randint(-3, 0)
initialZ = suitHeight * 0.5 + random.random() * suitHeight
finalZ = toonHeight * 0.5 + random.random() * toonHeight
if random.random() > 0.5:
initialX = -initialX
finalX = -finalX
return Sequence(focusShot(initialX, initialY, initialZ, windupDuration, suitCentralPoint), focusMoveShot(finalX, finalY, finalZ, projectDuration, toonCentralPoint), Wait(impactDuration))
def focusCameraBetweenPoints(point1, point2):
if point1[0] > point2[0]:
x = point2[0] + (point1[0] - point2[0]) * 0.5
else:
x = point1[0] + (point2[0] - point1[0]) * 0.5
if point1[1] > point2[1]:
y = point2[1] + (point1[1] - point2[1]) * 0.5
else:
y = point1[1] + (point2[1] - point1[1]) * 0.5
if point1[2] > point2[2]:
z = point2[2] + (point1[2] - point2[2]) * 0.5
else:
z = point1[2] + (point2[2] - point1[2]) * 0.5
camera.lookAt(Point3(x, y, z))
def randomCamera(suit, toon, battle, attackDuration, openShotDuration):
return randomAttackCam(suit, toon, battle, attackDuration, openShotDuration, 'suit')
def randomAttackCam(suit, toon, battle, attackDuration, openShotDuration, attackerString = 'suit'):
if openShotDuration > attackDuration:
openShotDuration = attackDuration
closeShotDuration = attackDuration - openShotDuration
if attackerString == 'suit':
attacker = suit
defender = toon
defenderString = 'toon'
else:
attacker = toon
defender = suit
defenderString = 'suit'
randomDouble = random.random()
if randomDouble > 0.6:
openShot = randomActorShot(attacker, battle, openShotDuration, attackerString)
elif randomDouble > 0.2:
openShot = randomOverShoulderShot(suit, toon, battle, openShotDuration, focus=attackerString)
else:
openShot = randomSplitShot(attacker, defender, battle, openShotDuration)
randomDouble = random.random()
if randomDouble > 0.6:
closeShot = randomActorShot(defender, battle, closeShotDuration, defenderString)
elif randomDouble > 0.2:
closeShot = randomOverShoulderShot(suit, toon, battle, closeShotDuration, focus=defenderString)
else:
closeShot = randomSplitShot(attacker, defender, battle, closeShotDuration)
return Sequence(openShot, closeShot)
def randomGroupAttackCam(suit, targets, battle, attackDuration, openShotDuration):
if openShotDuration > attackDuration:
openShotDuration = attackDuration
closeShotDuration = attackDuration - openShotDuration
openShot = randomActorShot(suit, battle, openShotDuration, 'suit', groupShot=0)
closeShot = randomToonGroupShot(targets, suit, closeShotDuration, battle)
return Sequence(openShot, closeShot)
def randomActorShot(actor, battle, duration, actorType, groupShot = 0):
height = actor.getHeight()
centralPoint = actor.getPos(battle)
centralPoint.setZ(centralPoint.getZ() + height * 0.75)
if actorType == 'suit':
x = 4 + random.random() * 8
y = -2 - random.random() * 4
z = height * 0.5 + random.random() * height * 1.5
if groupShot == 1:
y = -4
z = height * 0.5
else:
x = 2 + random.random() * 8
y = -2 + random.random() * 3
z = height + random.random() * height * 1.5
if groupShot == 1:
y = y + 3
z = height * 0.5
if MovieUtil.shotDirection == 'left':
x = -x
return focusShot(x, y, z, duration, centralPoint)
def randomSplitShot(suit, toon, battle, duration):
suitHeight = suit.getHeight()
toonHeight = toon.getHeight()
suitCentralPoint = suit.getPos(battle)
suitCentralPoint.setZ(suitCentralPoint.getZ() + suitHeight * 0.75)
toonCentralPoint = toon.getPos(battle)
toonCentralPoint.setZ(toonCentralPoint.getZ() + toonHeight * 0.75)
x = 9 + random.random() * 2
y = -2 - random.random() * 2
z = suitHeight * 0.5 + random.random() * suitHeight
if MovieUtil.shotDirection == 'left':
x = -x
return focusShot(x, y, z, duration, toonCentralPoint, splitFocusPoint=suitCentralPoint)
def randomOverShoulderShot(suit, toon, battle, duration, focus):
suitHeight = suit.getHeight()
toonHeight = toon.getHeight()
suitCentralPoint = suit.getPos(battle)
suitCentralPoint.setZ(suitCentralPoint.getZ() + suitHeight * 0.75)
toonCentralPoint = toon.getPos(battle)
toonCentralPoint.setZ(toonCentralPoint.getZ() + toonHeight * 0.75)
x = 2 + random.random() * 10
if focus == 'toon':
y = 8 + random.random() * 6
z = suitHeight * 1.2 + random.random() * suitHeight
else:
y = -10 - random.random() * 6
z = toonHeight * 1.5
if MovieUtil.shotDirection == 'left':
x = -x
return focusShot(x, y, z, duration, toonCentralPoint, splitFocusPoint=suitCentralPoint)
def randomToonGroupShot(toons, suit, duration, battle):
sum = 0
for t in toons:
toon = t['toon']
height = toon.getHeight()
sum = sum + height
avgHeight = sum / len(toons) * 0.75
suitPos = suit.getPos(battle)
x = 1 + random.random() * 6
if suitPos.getX() > 0:
x = -x
if random.random() > 0.5:
y = 4 + random.random() * 1
z = avgHeight + random.random() * 6
else:
y = 11 + random.random() * 2
z = 13 + random.random() * 2
focalPoint = Point3(0, -4, avgHeight)
return focusShot(x, y, z, duration, focalPoint)
def chooseFireShot(throws, suitThrowsDict, attackDuration):
openShot = chooseFireOpenShot(throws, suitThrowsDict, attackDuration)
openDuration = openShot.getDuration()
openName = openShot.getName()
closeShot = chooseFireCloseShot(throws, suitThrowsDict, openDuration, openName, attackDuration)
track = Sequence(openShot, closeShot)
return track
def chooseFireOpenShot(throws, suitThrowsDict, attackDuration):
numThrows = len(throws)
av = None
duration = 3.0
if numThrows == 1:
av = throws[0]['toon']
shotChoices = [avatarCloseUpThrowShot,
avatarCloseUpThreeQuarterRightShot,
avatarBehindShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numThrows >= 2 and numThrows <= 4:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of throws: %s' % numThrows)
shotChoice = random.choice(shotChoices)
track = apply(shotChoice, [av, duration])
return track
def chooseFireCloseShot(throws, suitThrowsDict, openDuration, openName, attackDuration):
numSuits = len(suitThrowsDict)
av = None
duration = attackDuration - openDuration
if numSuits == 1:
av = base.cr.doId2do[suitThrowsDict.keys()[0]]
shotChoices = [avatarCloseUpFireShot,
avatarCloseUpThreeQuarterLeftFireShot,
allGroupLowShot,
suitGroupThreeQuarterLeftBehindShot]
elif numSuits >= 2 and numSuits <= 4 or numSuits == 0:
shotChoices = [allGroupLowShot, suitGroupThreeQuarterLeftBehindShot]
else:
notify.error('Bad number of suits: %s' % numSuits)
shotChoice = random.choice(shotChoices)
track = apply(shotChoice, [av, duration])
return track
def avatarCloseUpFireShot(avatar, duration):
return heldRelativeShot(avatar, 7, 17, avatar.getHeight() * 0.66, 159, 3.6, 0, duration, 'avatarCloseUpFireShot')
def avatarCloseUpThreeQuarterLeftFireShot(avatar, duration):
return heldRelativeShot(avatar, -8.2, 8.45, avatar.getHeight() * 0.66, -131.5, 3.6, 0, duration, 'avatarCloseUpThreeQuarterLeftShot') | DedMemez/ODS-August-2017 | battle/MovieCamera.py | Python | apache-2.0 | 38,639 |
#: E101 W191
for a in 'abc':
for b in 'xyz':
print a # indented with 8 spaces
print b # indented with 1 tab
#: E101 E122 W191 W191
if True:
pass
change_2_log = \
"""Change 2 by slamb@testclient on 2006/04/13 21:46:23
creation
"""
p4change = {
2: change_2_log,
}
class TestP4Poller(unittest.TestCase):
def setUp(self):
self.setUpGetProcessOutput()
return self.setUpChangeSource()
def tearDown(self):
pass
#
#: E101 W191 W191
if True:
foo(1,
2)
#: E101 E101 W191 W191
def test_keys(self):
"""areas.json - All regions are accounted for."""
expected = set([
u'Norrbotten',
u'V\xe4sterbotten',
])
#: E101 W191
if True:
print("""
tab at start of this line
""")
| ruchee/vimrc | vimfiles/bundle/vim-python/submodules/pycodestyle/testsuite/E10.py | Python | mit | 740 |
# coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Jobs that are run by CRON scheduler."""
from __future__ import annotations
from core.domain import recommendations_services
from core.jobs import base_jobs
from core.jobs.io import ndb_io
from core.jobs.transforms import job_result_transforms
from core.jobs.types import job_run_result
from core.platform import models
import apache_beam as beam
from typing import Dict, Iterable, List, Tuple, Union, cast
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import datastore_services
from mypy_imports import exp_models
from mypy_imports import recommendations_models
(exp_models, recommendations_models) = models.Registry.import_models(
[models.NAMES.exploration, models.NAMES.recommendations])
datastore_services = models.Registry.import_datastore_services()
MAX_RECOMMENDATIONS = 10
# Note: There is a threshold so that bad recommendations will be
# discarded even if an exploration has few similar explorations.
SIMILARITY_SCORE_THRESHOLD = 3.0
class ComputeExplorationRecommendationsJob(base_jobs.JobBase):
"""Job that indexes the explorations in Elastic Search."""
def run(self) -> beam.PCollection[job_run_result.JobRunResult]:
"""Returns a PCollection of 'SUCCESS' or 'FAILURE' results from
the Elastic Search.
Returns:
PCollection. A PCollection of 'SUCCESS' or 'FAILURE' results from
the Elastic Search.
"""
exp_summary_models = (
self.pipeline
| 'Get all non-deleted models' >> (
ndb_io.GetModels(exp_models.ExpSummaryModel.get_all()))
)
exp_summary_iter = beam.pvalue.AsIter(exp_summary_models)
exp_recommendations_models = (
exp_summary_models
| 'Compute similarity' >> beam.ParDo(
ComputeSimilarity(), exp_summary_iter)
| 'Group similarities per exploration ID' >> beam.GroupByKey()
| 'Sort and slice similarities' >> beam.MapTuple(
lambda exp_id, similarities: (
exp_id, self._sort_and_slice_similarities(similarities)))
| 'Create recommendation models' >> beam.MapTuple(
self._create_recommendation)
)
unused_put_result = (
exp_recommendations_models
| 'Put models into the datastore' >> ndb_io.PutModels()
)
return (
exp_recommendations_models
| 'Create job run result' >> (
job_result_transforms.CountObjectsToJobRunResult())
)
@staticmethod
def _sort_and_slice_similarities(
similarities: Iterable[Dict[str, Union[str, float]]]
) -> List[str]:
"""Sorts similarities of explorations and slices them to
a maximum length.
Args:
similarities:iterable(). Iterable of dictionaries. The structure of
the dictionaries is:
exp_id: str. The ID of the similar exploration.
similarity_score: float. The similarity score for
the exploration.
Returns:
list(str). List of exploration IDs, sorted by the similarity.
"""
sorted_similarities = sorted(
similarities, reverse=True, key=lambda x: x['similarity_score'])
return [
str(item['exp_id']) for item in sorted_similarities
][:MAX_RECOMMENDATIONS]
@staticmethod
def _create_recommendation(
exp_id: str, recommended_exp_ids: Iterable[str]
) -> recommendations_models.ExplorationRecommendationsModel:
"""Creates exploration recommendation model.
Args:
exp_id: str. The exploration ID for which the recommendation is
created.
recommended_exp_ids: list(str). The list of recommended
exploration IDs.
Returns:
ExplorationRecommendationsModel. The created model.
"""
with datastore_services.get_ndb_context():
exp_recommendation_model = (
recommendations_models.ExplorationRecommendationsModel(
id=exp_id, recommended_exploration_ids=recommended_exp_ids))
exp_recommendation_model.update_timestamps()
return exp_recommendation_model
class ComputeSimilarity(beam.DoFn): # type: ignore[misc]
"""DoFn to compute similarities between exploration."""
def process(
self,
ref_exp_summary_model: datastore_services.Model,
compared_exp_summary_models: Iterable[datastore_services.Model]
) -> Iterable[Tuple[str, Dict[str, Union[str, float]]]]:
"""Compute similarities between exploraitons.
Args:
ref_exp_summary_model: ExpSummaryModel. Reference exploration
summary. We are trying to find explorations similar to this
reference summary.
compared_exp_summary_models: list(ExpSummaryModel). List of other
explorations summaries against which we compare the reference
summary.
Yields:
(str, dict(str, str|float)). Tuple, the first element is
the exploration ID of the reference exploration summary.
The second is a dictionary. The structure of the dictionary is:
exp_id: str. The ID of the similar exploration.
similarity_score: float. The similarity score for
the exploration.
"""
ref_exp_summary_model = cast(
exp_models.ExpSummaryModel, ref_exp_summary_model)
with datastore_services.get_ndb_context():
for compared_exp_summary_model in compared_exp_summary_models:
compared_exp_summary_model = cast(
exp_models.ExpSummaryModel,
compared_exp_summary_model
)
if compared_exp_summary_model.id == ref_exp_summary_model.id:
continue
similarity_score = recommendations_services.get_item_similarity( # type: ignore[no-untyped-call]
ref_exp_summary_model, compared_exp_summary_model
)
if similarity_score >= SIMILARITY_SCORE_THRESHOLD:
yield (
ref_exp_summary_model.id, {
'similarity_score': similarity_score,
'exp_id': compared_exp_summary_model.id
}
)
| kevinlee12/oppia | core/jobs/batch_jobs/exp_recommendation_computation_jobs.py | Python | apache-2.0 | 7,157 |
#!/usr/bin/env python
import matplotlib.pyplot as plt
import matplotlib as mpl
from pylab import *
import numpy as np
import sys
sys.path.insert(0, '../')
import kicks
params = {'backend': 'pdf',
'figure.figsize': [4.3, 3.0],
'font.family':'serif',
'font.size':10,
'font.serif': 'Times Roman',
'axes.titlesize': 'medium',
'axes.labelsize': 'medium',
'legend.fontsize': 8,
'legend.frameon' : False,
'text.usetex': True,
'figure.dpi': 600,
'lines.markersize': 4,
'lines.linewidth': 3,
'lines.antialiased': False,
'path.simplify': False,
'legend.handlelength':3,
'figure.subplot.bottom':0.15,
'figure.subplot.top':0.95,
'figure.subplot.left':0.15,
'figure.subplot.right':0.92}
hexcols = ['#332288', '#88CCEE', '#44AA99', '#117733', '#999933', '#DDCC77',\
'#CC6677', '#882255', '#AA4499', '#661100', '#6699CC', '#AA4466','#4477AA']
mpl.rcParams.update(params)
import colormaps as cmaps
plt.register_cmap(name='viridis', cmap=cmaps.viridis)
plt.set_cmap(cmaps.viridis)
data = np.loadtxt("kick.dataNS")
orbit = kicks.post_kick_parameters_P(2.348553e+01, 5.486435e+00, 5.504611e+01, 1.4,0,0,0)
norm = mpl.colors.Normalize(vmin=0,vmax=16)
fig, axes= plt.subplots(1)
scatter = axes.scatter(data[:,3][np.logical_and(data[:,5]>0,True)],\
data[:,4][np.logical_and(data[:,5]>0,True)], c=data[:,5][np.logical_and(data[:,5]>0,True)],\
marker="o", s=5, linewidth='0', cmap = "viridis", norm=norm, rasterized = True)
axes.scatter(orbit[0], orbit[1], marker="s", s=20, linewidth='0')
print("fraction below 1Gyr", \
np.sum(data[:,6][np.logical_and(data[:,5]>0,data[:,5]<1)])\
/np.sum(data[:,6][np.logical_and(data[:,5]>0,data[:,5]<13.8)]),
np.sum(data[:,6][np.logical_and(data[:,5]>0,True)]),
np.sum(data[:,6][np.logical_and(data[:,5]>0,data[:,5]<13.8)]))
print("fraction below 2Gyr", \
np.sum(data[:,6][np.logical_and(data[:,5]>0,data[:,5]<2)])\
/np.sum(data[:,6][np.logical_and(data[:,5]>0,data[:,5]<13.8)]),
np.sum(data[:,6][np.logical_and(data[:,5]>0,True)]),
np.sum(data[:,6][np.logical_and(data[:,5]>0,data[:,5]<13.8)]))
for kickv in [100,200,300]:#,400]:
orbit = [kicks.post_kick_parameters_P(2.348553e+01, 5.486435e+00, 5.504611e+01, 1.4,kickv,theta,0) \
for theta in np.linspace(0,math.pi,50)]
orbit = np.array(orbit)
axes.plot(orbit[:,0],orbit[:,1],"k-",linewidth=0.5)
for kicktheta in [math.pi,math.pi-math.pi/4,math.pi-math.pi/3]:
orbit = [kicks.post_kick_parameters_P(2.348553e+01, 5.486435e+00, 5.504611e+01, 1.4,kickv,kicktheta,0) \
for kickv in np.linspace(0,1000,500)]
orbit = np.array(orbit)
axes.plot(orbit[:,0],orbit[:,1],color="0.5",linestyle="--",linewidth=0.5)
axes.text(30,0.38,"$v=100\\;{\\rm km\\;s^{-1}}$", rotation =15, fontsize = 7)
axes.text(30,0.61,"$v=200\\;{\\rm km\\;s^{-1}}$", rotation =3, fontsize = 7)
axes.text(30,0.83,"$v=300\\;{\\rm km\\;s^{-1}}$", rotation =-3, fontsize = 7)
axes.text(8,0.75,"$\\theta=\\pi$", rotation =100, fontsize = 7)
axes.text(11.5,0.7,"$\\theta=3\\pi/4$", rotation =100, fontsize = 7)
axes.text(16.5,0.65,"$\\theta=2\\pi/3$", rotation =90, fontsize = 7)
axes.text(1,0.2,"$P_{\\rm f}=23\\;{\\rm d}$", fontsize = 8)
axes.text(1,0.13,"$M_{1,\\rm f}=55M_\\odot$", fontsize = 8)
axes.text(1,0.06,"$M_{2,\\rm f}=5.5M_\\odot\\rightarrow 1.4M_\\odot$", fontsize = 8)
axes.text(49,0.13,"disrupt=$67\\%$", ha = "right", fontsize = 8)
axes.text(49,0.06,"merge=$3.4\\%$", ha = "right", fontsize = 8)
cbar = plt.colorbar(scatter)
cbar.set_label("Merger time ${\\rm[Gyr]}$")
axes.set_xlabel("$P_{\\rm orb}\\;{\\rm[days]}$")
axes.set_ylabel("eccentricity")
axes.set_xlim([0,50])
axes.set_ylim([0,1])
axes.legend(loc="best", scatterpoints=1)
#CS = plt.contour(lg_mass, vrot, He, levels=[0.3,0.5,0.7])
#plt.clabel(CS, inline=1, fontsize=10)
plt.savefig("kick_result.pdf")
plt.clf()
plt.close(plt.gcf())
| orlox/massive_bins_2015 | 2016_ULX/scripts/NSBH/kick_result.py | Python | gpl-3.0 | 4,074 |
################################################################################
# THIS FILE IS 100% GENERATED BY ZPROJECT; DO NOT EDIT EXCEPT EXPERIMENTALLY #
# Read the zproject/README.md for information about making permanent changes. #
################################################################################
import utils
from . import destructors
libczmq_destructors = destructors.lib
class Zfile(object):
"""
helper functions for working with files.
"""
def __init__(self, path, name):
"""
If file exists, populates properties. CZMQ supports portable symbolic
links, which are files with the extension ".ln". A symbolic link is a
text file containing one line, the filename of a target file. Reading
data from the symbolic link actually reads from the target file. Path
may be NULL, in which case it is not used.
"""
p = utils.lib.zfile_new(utils.to_bytes(path), utils.to_bytes(name))
if p == utils.ffi.NULL:
raise MemoryError("Could not allocate person")
# ffi.gc returns a copy of the cdata object which will have the
# destructor called when the Python object is GC'd:
# https://cffi.readthedocs.org/en/latest/using.html#ffi-interface
self._p = utils.ffi.gc(p, libczmq_destructors.zfile_destroy_py)
def dup(self):
"""
Duplicate a file item, returns a newly constructed item. If the file
is null, or memory was exhausted, returns null.
"""
return utils.lib.zfile_dup(self._p)
def filename(self, path):
"""
Return file name, remove path if provided
"""
return utils.lib.zfile_filename(self._p, utils.to_bytes(path))
def restat(self):
"""
Refresh file properties from disk; this is not done automatically
on access methods, otherwise it is not possible to compare directory
snapshots.
"""
utils.lib.zfile_restat(self._p)
def modified(self):
"""
Return when the file was last modified. If you want this to reflect the
current situation, call zfile_restat before checking this property.
"""
return utils.lib.zfile_modified(self._p)
def cursize(self):
"""
Return the last-known size of the file. If you want this to reflect the
current situation, call zfile_restat before checking this property.
"""
return utils.lib.zfile_cursize(self._p)
def is_directory(self):
"""
Return true if the file is a directory. If you want this to reflect
any external changes, call zfile_restat before checking this property.
"""
return utils.lib.zfile_is_directory(self._p)
def is_regular(self):
"""
Return true if the file is a regular file. If you want this to reflect
any external changes, call zfile_restat before checking this property.
"""
return utils.lib.zfile_is_regular(self._p)
def is_readable(self):
"""
Return true if the file is readable by this process. If you want this to
reflect any external changes, call zfile_restat before checking this
property.
"""
return utils.lib.zfile_is_readable(self._p)
def is_writeable(self):
"""
Return true if the file is writeable by this process. If you want this
to reflect any external changes, call zfile_restat before checking this
property.
"""
return utils.lib.zfile_is_writeable(self._p)
def is_stable(self):
"""
Check if file has stopped changing and can be safely processed.
Updates the file statistics from disk at every call.
"""
return utils.lib.zfile_is_stable(self._p)
def has_changed(self):
"""
Return true if the file was changed on disk since the zfile_t object
was created, or the last zfile_restat() call made on it.
"""
return utils.lib.zfile_has_changed(self._p)
def remove(self):
"""
Remove the file from disk
"""
utils.lib.zfile_remove(self._p)
def input(self):
"""
Open file for reading
Returns 0 if OK, -1 if not found or not accessible
"""
return utils.lib.zfile_input(self._p)
def output(self):
"""
Open file for writing, creating directory if needed
File is created if necessary; chunks can be written to file at any
location. Returns 0 if OK, -1 if error.
"""
return utils.lib.zfile_output(self._p)
def read(self, bytes, offset):
"""
Read chunk from file at specified position. If this was the last chunk,
sets the eof property. Returns a null chunk in case of error.
"""
return utils.lib.zfile_read(self._p, bytes, offset)
def eof(self):
"""
Returns true if zfile_read() just read the last chunk in the file.
"""
return utils.lib.zfile_eof(self._p)
def write(self, chunk, offset):
"""
Write chunk to file at specified position
Return 0 if OK, else -1
"""
return utils.lib.zfile_write(self._p, chunk._p, offset)
def readln(self):
"""
Read next line of text from file. Returns a pointer to the text line,
or NULL if there was nothing more to read from the file.
"""
return utils.lib.zfile_readln(self._p)
def close(self):
"""
Close file, if open
"""
utils.lib.zfile_close(self._p)
def handle(self):
"""
Return file handle, if opened
"""
return utils.lib.zfile_handle(self._p)
def digest(self):
"""
Calculate SHA1 digest for file, using zdigest class.
"""
return utils.lib.zfile_digest(self._p)
def test(verbose):
"""
Self test of this class.
"""
utils.lib.zfile_test(verbose)
################################################################################
# THIS FILE IS 100% GENERATED BY ZPROJECT; DO NOT EDIT EXCEPT EXPERIMENTALLY #
# Read the zproject/README.md for information about making permanent changes. #
################################################################################
| evoskuil/czmq | bindings/python_cffi/czmq_cffi/Zfile.py | Python | mpl-2.0 | 6,377 |
# encoding: UTF-8
#
# Copyright 2012-2013 Alejandro Autalán
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://pygubu.web.here
from __future__ import unicode_literals
from collections import OrderedDict
import sys
import xml.etree.ElementTree as ET
import re
try:
import tkinter as tk
import tkinter.ttk as ttk
except:
import Tkinter as tk
import ttk
import pygubu
from pygubu.stockimage import StockImage
import pygubudesigner.widgets.toplevelframe
try:
basestring
except NameError:
basestring = str
RE_FONT = re.compile("(?P<family>\{\w+(\w|\s)*\}|\w+)\s?(?P<size>-?\d+)?\s?(?P<modifiers>\{\w+(\w|\s)*\}|\w+)?")
class BuilderForPreview(pygubu.Builder):
def _pre_process_data(self, data):
super(BuilderForPreview, self)._pre_process_data(data)
cname = data['class']
# Do not resize main window when
# Sizegrip is dragged on preview panel.
if cname == 'ttk.Sizegrip':
data['properties']['class_'] = 'DUMMY_CLASS'
class Preview(object):
def __init__(self, id_, canvas, x=0, y=0, rpaths=None):
self.id = 'preview_{0}'.format(id_)
self.x = x
self.y = y
self.w = 10
self.h = 10
self.min_w = self.w
self.min_h = self.h
self.resizer_h = 10
self.canvas = canvas
self.shapes = {}
self._create_shapes()
# --------
self.builder = None
self.canvas_window = None
self._resource_paths = rpaths if rpaths is not None else []
def width(self):
return self.w
def height(self):
return self.h + self.resizer_h
def _create_builder(self):
b = BuilderForPreview()
for p in self._resource_paths:
b.add_resource_path(p)
return b
def _create_shapes(self):
# Preview box
c = self.canvas
x, y, x2, y2 = (-1001, -1000, -1001, -1000)
s1 = c.create_rectangle(x, y, x2, y2,
width=2, outline='blue', tags=self.id)
s2 = c.create_rectangle(x, y, x2, y2, fill='blue', outline='blue',
tags=(self.id, 'resizer'))
s3 = c.create_text(x, y, text='widget_id', anchor=tk.NW,
fill='white', tags=self.id)
s4 = c.create_window(x, y, anchor=tk.NW, tags=self.id)
self.shapes = {
'outline': s1,
'resizer': s2,
'text': s3,
'window': s4
}
self.draw()
def erase(self):
self.canvas_window.destroy()
for key, sid in self.shapes.items():
self.canvas.delete(sid)
def draw(self):
c = self.canvas
x, y, x2, y2 = (self.x, self.y, self.x + self.w, self.y + self.h)
c.coords(self.shapes['outline'], x, y, x2, y2)
tbbox = c.bbox(self.shapes['text'])
tw, th = tbbox[2] - tbbox[0] + 10, tbbox[3] - tbbox[1] + 6
self.resizer_h = th
rx2 = self.x + self.w
ry2 = self.y + self.h + self.resizer_h
rx = rx2 - tw
ry = self.y + self.h
c.coords(self.shapes['resizer'], rx, ry, rx2, ry2)
tx = rx + 5
ty = ry + 3
c.coords(self.shapes['text'], tx, ty)
c.coords(self.shapes['window'], x, y)
def move_by(self, dx, dy):
self.x += dx
self.y += dy
self.draw()
def resize_to(self, w, h):
self.resize_by(w - self.w, h - self.h)
def resize_by(self, dw, dh):
new_w = self.w + dw
new_h = self.h + dh
changed = False
if new_w >= self.min_w:
self.w = new_w
changed = True
if new_h >= self.min_h:
self.h = new_h
changed = True
if changed:
self.draw()
self._resize_preview_window()
def _resize_preview_window(self):
if self.canvas_window:
self.canvas_window.configure(width=self.w, height=self.h)
def update(self, widget_id, xmlnode):
# delete current preview
# FIXME maybe do something to update preview without re-creating all ?
del self.builder
self.builder = None
self.canvas.itemconfigure(self.shapes['window'], window='')
if self.canvas_window:
self.canvas_window.destroy()
# Create preview
canvas_window = ttk.Frame(self.canvas)
canvas_window.rowconfigure(0, weight=1)
canvas_window.columnconfigure(0, weight=1)
self.canvas.itemconfigure(self.shapes['text'], text=widget_id)
self._preview_widget = \
self.create_preview_widget(canvas_window, widget_id, xmlnode)
self.canvas_window = canvas_window
self.canvas.itemconfigure(self.shapes['window'], window=canvas_window)
canvas_window.update_idletasks()
canvas_window.grid_propagate(0)
self.min_w = self._get_wreqwidth()
self.min_h = self._get_wreqheight()
self.w = self.min_w * 2
self.h = self.min_h * 2
self.resize_to(self.min_w, self.min_h)
def create_preview_widget(self, parent, widget_id, xmlnode):
self.builder = self._create_builder()
self.builder.add_from_xmlnode(xmlnode)
widget = self.builder.get_object(widget_id, parent)
return widget
def get_widget_by_id(self, widget_id):
return self.builder.get_object(widget_id)
def create_toplevel(self, widget_id, xmlnode):
# Create preview
builder = pygubu.Builder()
builder.add_from_xmlnode(xmlnode)
top = tk.Toplevel(self.canvas)
top.columnconfigure(0, weight=1)
top.rowconfigure(0, weight=1)
builder.get_object(widget_id, top)
return top
def _get_wreqwidth(self):
return self._preview_widget.winfo_reqwidth()
def _get_wreqheight(self):
return self._preview_widget.winfo_reqheight()
class DefaultMenuPreview(Preview):
def create_preview_widget(self, parent, widget_id, xmlnode):
self.builder = self._create_builder()
self.builder.add_from_xmlnode(xmlnode)
menubutton = ttk.Menubutton(parent, text='Menu preview')
menubutton.grid()
widget = self.builder.get_object(widget_id, menubutton)
menubutton.configure(menu=widget)
return menubutton
def create_toplevel(self, widget_id, xmlnode):
# Create preview
builder = pygubu.Builder()
builder.add_from_xmlnode(xmlnode)
top = tk.Toplevel(self.canvas)
top.columnconfigure(0, weight=1)
top.rowconfigure(0, weight=1)
menu = builder.get_object(widget_id, top)
top['menu'] = menu
return top
def resize_by(self, dw, hw):
return
class OnCanvasMenuPreview(Preview):
fonts = {}
def __init__(self, id_, canvas, x=0, y=0, rpaths=None):
super(OnCanvasMenuPreview, self).__init__(id_, canvas, x, y, rpaths)
self._menu = None
self._cwidth = 0
self._cheight = 0
def _get_wreqwidth(self):
return self._cwidth
def _get_wreqheight(self):
return self._cheight
def _get_font(self, font):
fontname = family = 'TkMenuFont'
size = 12
modifiers = ''
tclobject = False
if font and isinstance(font, basestring):
fontname = family = font
elif isinstance(font, tk._tkinter.Tcl_Obj):
fontname = family = str(font)
tclobject = True
elif isinstance(font, tuple):
fontname = str(font[4])
tclobject = True
if tclobject:
s = RE_FONT.search(fontname)
if s:
g = s.groupdict()
family = g['family'].replace('{', '').replace('}', '')
size = g['size']
modifiers = g['modifiers'] if g['modifiers'] else ''
if fontname not in OnCanvasMenuPreview.fonts:
weight = 'bold' if 'bold' in modifiers else 'normal'
slant = 'italic' if 'italic' in modifiers else 'roman'
underline = '1' if 'underline' in modifiers else '0'
overstrike = '1' if 'overstrike' in modifiers else '0'
kw = {'family': family, 'weight': weight, 'slant': slant,
'underline': underline, 'overstrike': overstrike}
if size:
kw['size'] = size
OnCanvasMenuPreview.fonts[fontname] = tk.font.Font(**kw)
return OnCanvasMenuPreview.fonts[fontname]
def _calculate_menu_wh(self):
""" Calculate menu widht and height."""
w = iw = 50
h = ih = 0
count = self._menu.index(tk.END) + 1
# First calculate using the font paramters of root menu:
font = self._menu.cget('font')
font = self._get_font(font)
for i in range(0, count):
mtype = self._menu.type(i)
if mtype == 'tearoff':
continue
label = 'default'
ifont = 'TkMenuFont'
if mtype != 'separator':
label = self._menu.entrycget(i, 'label')
ifont = self._menu.entrycget(i, 'font')
wpx = font.measure(label)
hpx = font.metrics('linespace')
w += wpx
if hpx > h:
h = hpx * 2
# Calculate using font configured for each subitem
ifont = self._get_font(ifont)
wpx = ifont.measure(label)
hpx = ifont.metrics('linespace')
iw += wpx
if hpx > ih:
ih = hpx * 2
# Then compare 2 sizes and use the greatest
w = max(w, iw, 100)
h = max(h, ih, 25)
self._cwidth = w + int(w * 0.25)
self._cheight = h + int(h * 0.25)
def create_preview_widget(self, parent, widget_id, xmlnode):
container = tk.Frame(parent, container=True, height=50)
container.grid(sticky='nswe')
container.rowconfigure(0, weight=1)
container.columnconfigure(0, weight=1)
self._top = top = tk.Toplevel(parent, use=container.winfo_id())
top.maxsize(2048, 50)
top.resizable(width=True, height=False)
top.update()
self.builder = self._create_builder()
self.builder.add_from_xmlnode(xmlnode)
self._menu = widget = self.builder.get_object(widget_id, top)
top.configure(menu=widget)
self._calculate_menu_wh()
return parent
def create_toplevel(self, widget_id, xmlnode):
# Create preview
builder = pygubu.Builder()
builder.add_from_xmlnode(xmlnode)
top = tk.Toplevel(self.canvas)
top.columnconfigure(0, weight=1)
top.rowconfigure(0, weight=1)
menu = builder.get_object(widget_id, top)
top['menu'] = menu
return top
MenuPreview = DefaultMenuPreview
if sys.platform == 'linux':
MenuPreview = OnCanvasMenuPreview
class ToplevelPreview(Preview):
def create_preview_widget(self, parent, widget_id, xmlnode):
xmlnode.set('class', 'pygubudesigner.ToplevelFramePreview')
layout = ET.Element('layout')
for n, v in (('row', '0'), ('column', '0'), ('sticky', 'nsew')):
p = ET.Element('property')
p.set('name', n)
p.text = v
layout.append(p)
xmlnode.append(layout)
# print(ET.tostring(xmlnode))
self.builder = self._create_builder()
self.builder.add_from_xmlnode(xmlnode)
widget = self.builder.get_object(widget_id, parent)
return widget
def create_toplevel(self, widget_id, xmlnode):
# Create preview
builder = pygubu.Builder()
builder.add_from_xmlnode(xmlnode)
top = builder.get_object(widget_id, self.canvas)
return top
class DialogPreview(ToplevelPreview):
def create_toplevel(self, widget_id, xmlnode):
top = super(DialogPreview, self).create_toplevel(widget_id, xmlnode)
top.run()
return top
# def get_widget_by_id(self, widget_id):
# return self.canvas_window
class PreviewHelper:
indicators_tag = ('nw', 'ne', 'sw', 'se')
def __init__(self, canvas):
self.canvas = canvas
self.previews = OrderedDict()
self.padding = 20
self.indicators = None
self._sel_id = None
self._sel_widget = None
self.toplevel_previews = []
self.resource_paths = []
self._moving = False
self._last_event = None
self._objects_moving = None
canvas.bind('<Button-1>', self.click_handler)
canvas.bind('<ButtonRelease-1>', self.release_handler)
canvas.bind('<Motion>', self.motion_handler)
canvas.bind('<4>', lambda event: canvas.yview('scroll', -1, 'units'))
canvas.bind('<5>', lambda event: canvas.yview('scroll', 1, 'units'))
self._create_indicators()
def add_resource_path(self, path):
self._resource_paths.append(path)
def motion_handler(self, event):
if not self._moving:
c = event.widget
x = c.canvasx(event.x)
y = c.canvasy(event.y)
if self._over_resizer(x, y):
c.configure(cursor='fleur')
else:
c.configure(cursor='')
else:
dx = event.x - self._last_event.x
dy = event.y - self._last_event.y
self._last_event = event
if dx or dy:
self.resize_preview(dx, dy)
def click_handler(self, event):
c = event.widget
x = c.canvasx(event.x)
y = c.canvasy(event.y)
if self._over_resizer(x, y):
ids = c.find_overlapping(x, y, x, y)
if ids:
self._moving = True
self._objects_moving = ids
c.configure(cursor='fleur')
self._last_event = event
def release_handler(self, event):
self._objects_moving = None
self._moving = False
def _over_resizer(self, x, y):
"Returns True if mouse is over a resizer"
over_resizer = False
c = self.canvas
ids = c.find_overlapping(x, y, x, y)
if ids:
o = ids[0]
tags = c.gettags(o)
if 'resizer' in tags:
over_resizer = True
return over_resizer
def resize_preview(self, dw, dh):
"Resizes preview that is currently dragged"
# identify preview
if self._objects_moving:
id_ = self._objects_moving[0]
tags = self.canvas.gettags(id_)
for tag in tags:
if tag.startswith('preview_'):
_, ident = tag.split('preview_')
preview = self.previews[ident]
preview.resize_by(dw, dh)
self.move_previews()
break
self._update_cregion()
def _update_cregion(self):
# update canvas scrollregion
bbox = self.canvas.bbox(tk.ALL)
padd = 20
if bbox is not None:
region = (0, 0, bbox[2] + padd, bbox[3] + padd)
self.canvas.configure(scrollregion=region)
def move_previews(self):
"Move previews after a resize event"
# calculate new positions
min_y = self._calc_preview_ypos()
for idx, (key, p) in enumerate(self.previews.items()):
new_dy = min_y[idx] - p.y
self.previews[key].move_by(0, new_dy)
self._update_cregion()
self.show_selected(self._sel_id, self._sel_widget)
def _calc_preview_ypos(self):
"Calculates the previews positions on canvas"
y = 10
min_y = [y]
for k, p in self.previews.items():
y += p.height() + self.padding
min_y.append(y)
return min_y
def _get_slot(self):
"Returns the next coordinates for a preview"
x = y = 10
for k, p in self.previews.items():
y += p.height() + self.padding
return x, y
def draw(self, identifier, widget_id, xmlnode, wclass):
preview_class = Preview
if wclass == 'tk.Menu':
preview_class = MenuPreview
elif wclass == 'tk.Toplevel':
preview_class = ToplevelPreview
elif wclass == 'pygubu.builder.widgets.dialog':
preview_class = DialogPreview
if identifier not in self.previews:
x, y = self._get_slot()
self.previews[identifier] = preview \
= preview_class(identifier, self.canvas, x, y,
self.resource_paths)
else:
preview = self.previews[identifier]
preview.update(widget_id, xmlnode)
self.reset_selected(identifier)
self.move_previews()
def _create_indicators(self):
# selected indicators
self.indicators = []
anchors = {'nw': tk.SE, 'ne': tk.SW, 'sw': tk.NE, 'se': tk.NW}
for sufix in self.indicators_tag:
label = tk.Label(self.canvas,
image=StockImage.get('indicator_' + sufix))
self.indicators.append(label)
self.canvas.create_window(-10, -10, anchor=anchors[sufix],
window=label, tags=sufix)
def _calculate_indicator_coords(self, tag, widget):
x = y = 0
wx = widget.winfo_rootx()
wy = widget.winfo_rooty()
ww = widget.winfo_width()
wh = widget.winfo_height()
cx = self.canvas.winfo_rootx()
cy = self.canvas.winfo_rooty()
if tag == 'nw':
x = wx - cx
y = wy - cy
if tag == 'ne':
x = (wx - cx) + ww
y = (wy - cy)
if tag == 'sw':
x = (wx - cx)
y = (wy - cy) + wh
if tag == 'se':
x = (wx - cx) + ww
y = (wy - cy) + wh
x, y = self.canvas.canvasx(x), self.canvas.canvasy(y)
return (x, y)
def show_selected(self, identifier, selected_id=None):
canvas = self.canvas
if selected_id is None:
for indicator in self.indicators_tag:
canvas.itemconfigure(indicator, state=tk.HIDDEN)
elif identifier in self.previews:
for indicator in self.indicators_tag:
canvas.itemconfigure(indicator, state=tk.NORMAL)
preview = self.previews[identifier]
canvas.update_idletasks()
widget = preview.get_widget_by_id(selected_id)
for indicatorw in self.indicators:
try:
indicatorw.lift(widget)
except tk.TclError:
pass
for tag in self.indicators_tag:
x, y = self._calculate_indicator_coords(tag, widget)
ox, oy = canvas.coords(tag)
canvas.move(tag, x - ox, y - oy)
self._sel_id = identifier
self._sel_widget = selected_id
def delete(self, identifier):
if identifier in self.previews:
preview = self.previews[identifier]
preview.erase()
del self.previews[identifier]
self.reset_selected(identifier)
self.move_previews()
def reset_selected(self, identifier):
if identifier == self._sel_id:
self._sel_id = None
self._sel_widget = None
def remove_all(self):
for identifier in self.previews:
self.delete(identifier)
self.resource_paths = []
def preview_in_toplevel(self, identifier, widget_id, xmlnode):
preview = self.previews[identifier]
top = preview.create_toplevel(widget_id, xmlnode)
self.toplevel_previews.append(top)
def close_toplevel_previews(self):
for top in self.toplevel_previews:
top.destroy()
self.toplevel_previews = []
| mhcrnl/pygubu | pygubudesigner/previewer.py | Python | gpl-3.0 | 20,513 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from . import Dataset
import numpy as np
class MiniBatches(Dataset):
"""
Convert data into mini-batches.
"""
def __init__(self, dataset, batch_size=20, cache=True):
self.origin = dataset
self.size = batch_size
self._cached_train_set = None
self._cached_valid_set = None
self._cached_test_set = None
self.cache = cache
def _yield_data(self, subset):
if type(subset) != list:
subset = list(subset)
for i in xrange(0, len(subset), self.size):
yield map(np.array, list(zip(*subset[i:i + self.size])))
def train_set(self):
if self.cache and self._cached_train_set is not None:
return self._cached_train_set
data_generator = self._yield_data(self.origin.train_set())
if data_generator is None:
return None
if self.cache:
self._cached_train_set = list(data_generator)
return self._cached_train_set
else:
return data_generator
def test_set(self):
if not self.origin.test_set():
return None
if self.cache and self._cached_test_set is not None:
return self._cached_test_set
data_generator = self._yield_data(self.origin.test_set())
if data_generator is None:
return None
if self.cache:
self._cached_test_set = list(data_generator)
return self._cached_test_set
else:
return data_generator
def valid_set(self):
if not self.origin.valid_set():
return None
if self.cache and self._cached_valid_set is not None:
return self._cached_valid_set
data_generator = self._yield_data(self.origin.valid_set())
if data_generator is None:
return None
if self.cache:
self._cached_valid_set = list(data_generator)
return self._cached_valid_set
else:
return data_generator
def train_size(self):
train_size = self.origin.train_size()
if train_size is None:
train_size = len(list(self.origin.train_set()))
return train_size / self.size | zomux/deepy | deepy/dataset/mini_batch.py | Python | mit | 2,262 |
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2015, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
from os import path
from setuptools import setup, find_packages
MAIN_MODULE = 'agent'
# Find the agent package that contains the main module
packages = find_packages('.')
agent_package = ''
for package in find_packages():
# Because there could be other packages such as tests
if path.isfile(package + '/' + MAIN_MODULE + '.py') is True:
agent_package = package
if not agent_package:
raise RuntimeError('None of the packages under {dir} contain the file '
'{main_module}'.format(main_module=MAIN_MODULE + '.py',
dir=path.abspath('.')))
# Find the version number from the main module
agent_module = agent_package + '.' + MAIN_MODULE
_temp = __import__(agent_module, globals(), locals(), ['__version__'], -1)
__version__ = _temp.__version__
# Setup
setup(
name=agent_package + 'agent',
version=__version__,
install_requires=['volttron'],
packages=packages,
entry_points={
'setuptools.installation': [
'eggsecutable = ' + agent_module + ':main',
]
}
)
| VOLTTRON/volttron-applications | pnnl/deprecated/TCMAgent/setup.py | Python | bsd-3-clause | 3,996 |
from udata.utils import safe_unicode
from udata.forms import Form, fields, validators
from udata.i18n import lazy_gettext as _
from .actions import list_backends
from .models import VALIDATION_STATES, VALIDATION_REFUSED
__all__ = 'HarvestSourceForm', 'HarvestSourceValidationForm'
class HarvestConfigField(fields.DictField):
'''
A DictField with extras validations on known configurations
'''
def get_backend(self, form):
return next(b for b in list_backends() if b.name == form.backend.data)
def get_filter_specs(self, backend, key):
candidates = (f for f in backend.filters if f.key == key)
return next(candidates, None)
def get_feature_specs(self, backend, key):
candidates = (f for f in backend.features if f.key == key)
return next(candidates, None)
def pre_validate(self, form):
if self.data:
backend = self.get_backend(form)
# Validate filters
for f in (self.data.get('filters') or []):
if not ('key' in f and 'value' in f):
msg = 'A field should have both key and value properties'
raise validators.ValidationError(msg)
specs = self.get_filter_specs(backend, f['key'])
if not specs:
msg = 'Unknown filter key "{0}" for "{1}" backend'
msg = msg.format(f['key'], backend.name)
raise validators.ValidationError(msg)
if isinstance(f['value'], str):
f['value'] = safe_unicode(f['value']) # Fix encoding error
if not isinstance(f['value'], specs.type):
msg = '"{0}" filter should of type "{1}"'
msg = msg.format(specs.key, specs.type.__name__)
raise validators.ValidationError(msg)
# Validate features
for key, value in (self.data.get('features') or {}).items():
if not isinstance(value, bool):
msg = 'A feature should be a boolean'
raise validators.ValidationError(msg)
if not self.get_feature_specs(backend, key):
msg = 'Unknown feature "{0}" for "{1}" backend'
msg = msg.format(key, backend.name)
raise validators.ValidationError(msg)
class HarvestSourceForm(Form):
name = fields.StringField(_('Name'), [validators.DataRequired()])
description = fields.MarkdownField(
_('Description'),
description=_('Some optional details about this harvester'))
url = fields.URLField(_('URL'), [validators.DataRequired()])
backend = fields.SelectField(_('Backend'), choices=lambda: [
(b.name, b.display_name) for b in list_backends()
])
owner = fields.CurrentUserField()
organization = fields.PublishAsField(_('Publish as'))
active = fields.BooleanField()
autoarchive = fields.BooleanField()
config = HarvestConfigField()
class HarvestSourceValidationForm(Form):
state = fields.SelectField(choices=list(VALIDATION_STATES.items()))
comment = fields.StringField(_('Comment'),
[validators.RequiredIfVal('state',
VALIDATION_REFUSED
)])
| etalab/udata | udata/harvest/forms.py | Python | agpl-3.0 | 3,389 |
class DiffRegion(object):
def __init__(self, id_, sourceFilePath, lineSpan, charSpan, enclosingMethodDefId):
self.id = id_
self.sourceFilePath = sourceFilePath
self.lineSpan = lineSpan
self.charSpan = charSpan
self.enclosingMethodDefId = enclosingMethodDefId
def __str__(self):
return 'diff #%d src:%s lines:%d,%d' % (self.id, self.sourceFilePath, self.lineSpan[0], self.lineSpan[1]) | victorclf/jcc-web | server/src/model/diff_region.py | Python | agpl-3.0 | 448 |
from ldotcommons.sqlalchemy import create_session, declarative
from sqlalchemy import Column, String, Integer
import json
import pickle
from sqlalchemy.orm import exc
_UNDEF = object()
def keyvaluemodel_for_session(name, session, tablename=None):
base = declarative.declarative_base()
base.metadata.bind = session.get_bind()
return keyvaluemodel(name, base, tablename)
def keyvaluemodel(name, base, extra_dict={}):
if not (isinstance(name, str) and name != ''):
raise TypeError('name must be a non-empty str')
class_dict = {
'__tablename__': name.lower()
}
class_dict.update(extra_dict)
newcls = type(
name,
(_KeyValueItem, base),
class_dict)
return newcls
class _KeyValueItem:
id = Column(Integer, primary_key=True)
key = Column(String, name='key', nullable=False)
_value = Column(String, name='value')
_typ = Column(String(), name='type', default='str', nullable=False)
_resolved = _UNDEF
def __init__(self, key, value, typ=None):
self.key = key
self._typ, self._value = self._native_to_internal(value)
if typ:
self._typ = typ
@property
def value(self):
return self._interal_to_native(self._typ, self._value)
@value.setter
def value(self, v):
self._typ, self._value = self._native_to_internal(v)
@staticmethod
def _native_to_internal(value):
if isinstance(value, str):
typ = 'str'
elif isinstance(value, bool):
typ = 'bool'
value = '1' if value else '0'
elif isinstance(value, int):
typ = 'int'
value = str(value)
elif isinstance(value, float):
typ = 'float'
value = str(value)
else:
try:
value = json.dumps(value)
typ = 'json'
except TypeError:
value = pickle.dumps(value)
typ = 'pickle'
return (typ, value)
@staticmethod
def _interal_to_native(typ, value):
if typ == 'bool':
return (value != '0')
elif typ == 'int':
return int(value)
elif typ == 'float':
return float(value)
elif typ == 'str':
return str(value)
elif typ == 'json':
return json.loads(value)
elif typ == 'pickle':
return pickle.loads(value)
raise ValueError((typ, value))
def __repr__(self):
return "<{classname} {key}={value}>".format(
classname=self.__class__.__name__,
key=self.key,
value=self.value)
class KeyValueManager:
def __init__(self, model, session=None):
if not session:
engine = model.metadata.bind
if not engine:
msg = ("Model '{model}' is not bind to any engine an session "
"argument is None")
msg = msg.format(model=repr(model))
raise TypeError(msg)
session = create_session(engine=model.metadata.bind)
self._sess = session
self._model = model
@property
def _query(self):
return self._sess.query(self._model)
def get(self, k, default=_UNDEF):
try:
item = self._query.filter(self._model.key == k).one()
except exc.NoResultFound:
if default is _UNDEF:
raise KeyError(k)
else:
return default
return item.value
def set(self, k, v):
try:
item = self._query.filter(self._model.key == k).one()
item.value = v
except exc.NoResultFound:
item = self._model(key=k, value=v)
self._sess.add(item)
self._sess.commit()
def reset(self, k):
try:
item = self._query.filter(self._model.key == k).one()
except KeyError:
pass
self._sess.delete(item)
self._sess.commit()
def children(self, k):
return map(
lambda x: x.key,
self._query.filter(self._model.key.startswith(k+".")))
| ldotlopez/ldotcommons | ldotcommons/keyvaluestore.py | Python | gpl-2.0 | 4,184 |
'''
Copyright (c) 2011-2015, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
from chai import Chai
from haigha.channel import Channel
from haigha.classes import channel_class
from haigha.classes.protocol_class import ProtocolClass
from haigha.classes.channel_class import ChannelClass
from haigha.frames.method_frame import MethodFrame
from haigha.writer import Writer
class ChannelClassTest(Chai):
def setUp(self):
super(ChannelClassTest, self).setUp()
connection = mock()
ch = Channel(connection, 42, {})
connection._logger = mock()
self.klass = ChannelClass(ch)
def test_init(self):
expect(ProtocolClass.__init__).args('foo', a='b')
klass = ChannelClass.__new__(ChannelClass)
klass.__init__('foo', a='b')
assert_equals(
{
11: klass._recv_open_ok,
20: klass._recv_flow,
21: klass._recv_flow_ok,
40: klass._recv_close,
41: klass._recv_close_ok,
}, klass.dispatch_map)
assert_equals(None, klass._flow_control_cb)
def test_cleanup(self):
self.klass._cleanup()
assert_equals(None, self.klass._channel)
assert_equals(None, self.klass.dispatch_map)
def test_set_flow_cb(self):
assert_equals(None, self.klass._flow_control_cb)
self.klass.set_flow_cb('foo')
assert_equals('foo', self.klass._flow_control_cb)
def test_open(self):
writer = mock()
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_shortstr).args('')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 10, writer).returns('frame')
expect(self.klass.send_frame).args('frame')
expect(self.klass.channel.add_synchronous_cb).args(
self.klass._recv_open_ok)
self.klass.open()
def test_recv_open_ok(self):
expect(self.klass.channel._notify_open_listeners)
self.klass._recv_open_ok('methodframe')
def test_activate_when_not_active(self):
self.klass.channel._active = False
expect(self.klass._send_flow).args(True)
self.klass.activate()
def test_activate_when_active(self):
self.klass.channel._active = True
stub(self.klass._send_flow)
self.klass.activate()
def test_deactivate_when_not_active(self):
self.klass.channel._active = False
stub(self.klass._send_flow)
self.klass.deactivate()
def test_deactivate_when_active(self):
self.klass.channel._active = True
expect(self.klass._send_flow).args(False)
self.klass.deactivate()
def test_send_flow(self):
writer = mock()
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_bit).args('active')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 20, writer).returns('frame')
expect(self.klass.send_frame).args('frame')
expect(self.klass.channel.add_synchronous_cb).args(
self.klass._recv_flow_ok)
self.klass._send_flow('active')
def test_recv_flow_no_cb(self):
self.klass._flow_control_cb = None
rframe = mock()
writer = mock()
expect(rframe.args.read_bit).returns('active')
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_bit).args('active')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 21, writer).returns('frame')
expect(self.klass.send_frame).args('frame')
self.klass._recv_flow(rframe)
assert_equals('active', self.klass.channel._active)
def test_recv_flow_with_cb(self):
self.klass._flow_control_cb = mock()
rframe = mock()
writer = mock()
expect(rframe.args.read_bit).returns('active')
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_bit).args('active')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 21, writer).returns('frame')
expect(self.klass.send_frame).args('frame')
expect(self.klass._flow_control_cb)
self.klass._recv_flow(rframe)
def test_recv_flow_ok_no_cb(self):
self.klass._flow_control_cb = None
rframe = mock()
expect(rframe.args.read_bit).returns('active')
self.klass._recv_flow_ok(rframe)
assert_equals('active', self.klass.channel._active)
def test_recv_flow_ok_with_cb(self):
self.klass._flow_control_cb = mock()
rframe = mock()
expect(rframe.args.read_bit).returns('active')
expect(self.klass._flow_control_cb)
self.klass._recv_flow_ok(rframe)
assert_equals('active', self.klass.channel._active)
def test_close_when_not_closed(self):
writer = mock()
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_short).args('rcode')
expect(writer.write_shortstr).args(('reason' * 60)[:255])
expect(writer.write_short).args('cid')
expect(writer.write_short).args('mid')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 40, writer).returns('frame')
expect(self.klass.send_frame).args('frame')
expect(self.klass.channel.add_synchronous_cb).args(
self.klass._recv_close_ok)
self.klass.close('rcode', 'reason' * 60, 'cid', 'mid')
assert_true(self.klass.channel._closed)
assert_equals({
'reply_code': 'rcode',
'reply_text': 'reason' * 60,
'class_id': 'cid',
'method_id': 'mid',
}, self.klass.channel._close_info)
def test_close_when_closed(self):
self.klass.channel._closed = True
stub(self.klass.send_frame)
self.klass.close()
def test_close_when_channel_reference_cleared_in_recv_close_ok(self):
writer = mock()
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_short).args('rcode')
expect(writer.write_shortstr).args('reason')
expect(writer.write_short).args('cid')
expect(writer.write_short).args('mid')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 40, writer).returns('frame')
expect(self.klass.send_frame).args('frame')
expect(self.klass.channel.add_synchronous_cb).args(self.klass._recv_close_ok).side_effect(
setattr, self.klass, '_channel', None)
# assert nothing raised
self.klass.close('rcode', 'reason', 'cid', 'mid')
def test_close_when_error_sending_frame(self):
self.klass.channel._closed = False
writer = mock()
expect(mock(channel_class, 'Writer')).returns(writer)
expect(writer.write_short).args(0)
expect(writer.write_shortstr).args('')
expect(writer.write_short).args(0)
expect(writer.write_short).args(0)
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 40, writer).returns('frame')
expect(self.klass.send_frame).args(
'frame').raises(RuntimeError('fail'))
assert_raises(RuntimeError, self.klass.close)
assert_true(self.klass.channel._closed)
assert_equals({
'reply_code': 0,
'reply_text': '',
'class_id': 0,
'method_id': 0,
}, self.klass.channel._close_info)
def test_recv_close(self):
rframe = mock()
expect(rframe.args.read_short).returns('rcode')
expect(rframe.args.read_shortstr).returns('reason')
expect(rframe.args.read_short).returns('cid')
expect(rframe.args.read_short).returns('mid')
expect(mock(channel_class, 'MethodFrame')).args(
42, 20, 41).returns('frame')
expect(self.klass.channel._closed_cb).args(final_frame='frame')
assert_false(self.klass.channel._closed)
self.klass._recv_close(rframe)
assert_true(self.klass.channel._closed)
assert_equals({
'reply_code': 'rcode',
'reply_text': 'reason',
'class_id': 'cid',
'method_id': 'mid',
}, self.klass.channel._close_info)
def test_recv_close_ok(self):
expect(self.klass.channel._closed_cb)
self.klass.channel._closed = False
self.klass._recv_close_ok('frame')
assert_true(self.klass.channel._closed)
| lipixun/haigha | tests/unit/classes/channel_class_test.py | Python | bsd-3-clause | 8,531 |
# coding=utf-8
import time
import os
from selenium import webdriver
from selenium.webdriver.remote.webdriver import WebElement
from selenium.webdriver.common.by import By
UPLOAD_EXE_PATH = os.path.join(os.path.dirname(__file__), 'upload.exe')
def time_check(func):
def wrapper(*args, **kwargs):
t1 = time.time()
res = func(*args, **kwargs)
t2 = time.time()
print(u'查找【%s】元素用时%.3f秒' % (args[1], t2 - t1))
return res
return wrapper
class BasePage(object):
driver = None # type:webdriver.Chrome
def __new__(cls, *args, **kwargs):
if not cls.driver:
cls.driver = webdriver.Chrome()
return object.__new__(cls, *args, **kwargs)
def element(self, item):
"""
:rtype: WebElement
"""
try:
value = self.locators[item]
except:
raise Exception(u'没有定义【%s】元素的定位方式,请检查locators' % item)
return self.find_element(item, value)
def elements(self, item):
"""
:rtype: list of WebElement
"""
try:
value = self.locators[item]
except:
raise Exception(u'没有定义【%s】元素的定位方式,请检查locators' % item)
return self.find_elements(item, value)
@staticmethod
def upload(file_path):
os.system('{exe_path} {file_path}'.format(exe_path=UPLOAD_EXE_PATH, file_path=file_path))
@time_check
def find_element(self, item, value):
try:
return self.driver.find_element(*value)
except:
raise Exception(u'没有找到元素【%s】' % item)
@time_check
def find_elements(self, item, value):
try:
return self.driver.find_elements(*value)
except:
raise Exception(u'没有找到元素【%s】' % item)
@classmethod
def quit(cls):
try:
cls.driver.quit()
except:
pass
finally:
cls.driver = None
| yuyu1987/pithy-test | pithy/app.py | Python | apache-2.0 | 2,051 |
# -*- coding: utf-8 -*-
def before_related(adminform):
adminform.fieldsets_before = adminform.fieldsets
adminform.fieldsets_after = []
try:
adminform.fieldsets_before = adminform.fieldsets[:adminform.fieldsets.index(('related_go_here', {'fields': []}))]
adminform.fieldsets_after = adminform.fieldsets[adminform.fieldsets.index(('related_go_here', {'fields': []}))+1:]
adminform.fieldsets = adminform.fieldsets_before
return adminform
except:
return adminform
def after_related(adminform):
try:
adminform.fieldsets = adminform.fieldsets_after
adminform.fieldsets_before = adminform.fieldsets[:adminform.fieldsets.index(('related_go_here', {'fields': []}))]
adminform.fieldsets_after = adminform.fieldsets[adminform.fieldsets.index(('related_go_here', {'fields': []}))+1:]
adminform.fieldsets = adminform.fieldsets_after
return adminform
except:
return adminform
| Depado/starmato-admin | starmato/admin/templatetags/_fieldset_related.py | Python | mit | 976 |
from typing import Tuple
from pyspades.constants import (
BLUE_FLAG, GREEN_FLAG,
CTF_MODE,
BLUE_BASE, GREEN_BASE,
)
from pyspades.entities import Flag, Base
from pyspades.protocol import BaseProtocol
class Team:
score = None
flag = None
base = None
other = None
protocol = None
name = None
kills = None
def __init__(self, team_id: int, name: str, color: Tuple[int, int, int],
spectator: bool, protocol: BaseProtocol) -> None:
self.id = team_id
self.name = name
self.protocol = protocol
self.color = color
self.spectator = spectator
def get_players(self) -> None:
for player in self.protocol.players.values():
if player.team is self:
yield player
def count(self) -> int:
count = 0
for player in self.protocol.players.values():
if player.team is self:
count += 1
return count
def initialize(self) -> None:
if self.spectator:
return
self.score = 0
self.kills = 0
if self.protocol.game_mode == CTF_MODE:
self.set_flag()
self.set_base()
def set_flag(self) -> Flag:
entity_id = [BLUE_FLAG, GREEN_FLAG][self.id]
if self.flag is None:
self.flag = Flag(entity_id, self.protocol)
self.flag.team = self
self.protocol.entities.append(self.flag)
location = self.get_entity_location(entity_id)
returned = self.protocol.on_flag_spawn(location[0], location[1],
location[2], self.flag, entity_id)
if returned is not None:
location = returned
self.flag.set(*location)
self.flag.player = None
return self.flag
def set_base(self) -> Base:
entity_id = [BLUE_BASE, GREEN_BASE][self.id]
if self.base is None:
self.base = Base(entity_id, self.protocol)
self.base.team = self
self.protocol.entities.append(self.base)
location = self.get_entity_location(entity_id)
returned = self.protocol.on_base_spawn(location[0], location[1],
location[2], self.base, entity_id)
if returned is not None:
location = returned
self.base.set(*location)
return self.base
def get_entity_location(self, entity_id: int) -> Tuple[int, int, int]:
return self.get_random_location(True)
def get_random_location(self, force_land: bool = False) -> Tuple[int, int, int]:
x_offset = self.id * 384
return self.protocol.get_random_location(force_land, (
x_offset, 128, 128 + x_offset, 384))
def get_entities(self):
for item in self.protocol.entities:
if item.team is self:
yield item
def __repr__(self):
return "{}(id={}, name={}, color={}, spectator={}, protocol)".format(
self.__class__.__name__, self.id,
self.name, self.color, self.spectator
)
| piqueserver/piqueserver | pyspades/team.py | Python | gpl-3.0 | 3,117 |
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, WITNESS_COMMITMENT_HEADER
from test_framework.key import CECKey, CPubKey
import time
import random
from binascii import hexlify
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
VB_PERIOD = 144
VB_ACTIVATION_THRESHOLD = 108
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
'''
SegWit p2p test.
'''
# Calculate the virtual size of a witness block:
# (base + witness/4)
def get_virtual_size(witness_block):
base_size = len(witness_block.serialize())
total_size = len(witness_block.serialize(with_witness=True))
# the "+3" is so we round up
vsize = int((3*base_size + total_size + 3)/4)
return vsize
# Note: we can reduce code by using SingleNodeConnCB (in master, not 0.12)
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong(0)
self.sleep_time = 0.05
self.getdataset = set()
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_inv(self, conn, message):
self.last_inv = message
def on_block(self, conn, message):
self.last_block = message.block
self.last_block.calc_sha256()
def on_getdata(self, conn, message):
for inv in message.inv:
self.getdataset.add(inv.hash)
self.last_getdata = message
def on_pong(self, conn, message):
self.last_pong = message
def on_reject(self, conn, message):
self.last_reject = message
#print message
# Syncing helpers
def sync(self, test_function, timeout=60):
while timeout > 0:
with mininode_lock:
if test_function():
return
time.sleep(self.sleep_time)
timeout -= self.sleep_time
raise AssertionError("Sync failed to complete")
def sync_with_ping(self, timeout=60):
self.send_message(msg_ping(nonce=self.ping_counter))
test_function = lambda: self.last_pong.nonce == self.ping_counter
self.sync(test_function, timeout)
self.ping_counter += 1
return
def wait_for_block(self, blockhash, timeout=60):
test_function = lambda: self.last_block != None and self.last_block.sha256 == blockhash
self.sync(test_function, timeout)
return
def wait_for_getdata(self, timeout=60):
test_function = lambda: self.last_getdata != None
self.sync(test_function, timeout)
def wait_for_inv(self, expected_inv, timeout=60):
test_function = lambda: self.last_inv != expected_inv
self.sync(test_function, timeout)
def announce_tx_and_wait_for_getdata(self, tx, timeout=60):
with mininode_lock:
self.last_getdata = None
self.send_message(msg_inv(inv=[CInv(1, tx.sha256)]))
self.wait_for_getdata(timeout)
return
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
with mininode_lock:
self.last_getdata = None
if use_header:
msg = msg_headers()
msg.headers = [ CBlockHeader(block) ]
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
self.wait_for_getdata()
return
def announce_block(self, block, use_header):
with mininode_lock:
self.last_getdata = None
if use_header:
msg = msg_headers()
msg.headers = [ CBlockHeader(block) ]
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
def request_block(self, blockhash, inv_type, timeout=60):
with mininode_lock:
self.last_block = None
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
return self.last_block
def test_transaction_acceptance(self, tx, with_witness, accepted):
tx_message = msg_tx(tx)
if with_witness:
tx_message = msg_witness_tx(tx)
self.send_message(tx_message)
self.sync_with_ping()
assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted)
# Test whether a witness block had the correct effect on the tip
def test_witness_block(self, block, accepted, with_witness=True):
if with_witness:
self.send_message(msg_witness_block(block))
else:
self.send_message(msg_block(block))
self.sync_with_ping()
assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted)
# Used to keep track of anyone-can-spend outputs that we can use in the tests
class UTXO(object):
def __init__(self, sha256, n, nValue):
self.sha256 = sha256
self.n = n
self.nValue = nValue
class SegWitTest(BitcoinTestFramework):
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 3)
def add_options(self, parser):
parser.add_option("--oldbinary", dest="oldbinary",
default=None,
help="pre-segwit bitcoind binary for upgrade testing")
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-whitelist=127.0.0.1"]))
# Start a node for testing IsStandard rules.
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-whitelist=127.0.0.1", "-acceptnonstdtxn=0"]))
connect_nodes(self.nodes[0], 1)
# If an old bitcoind is given, do the upgrade-after-activation test.
self.test_upgrade = False
if (self.options.oldbinary != None):
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1"], binary=self.options.oldbinary))
connect_nodes(self.nodes[0], 2)
self.test_upgrade = True
''' Helpers '''
# Build a block on top of node0's tip.
def build_next_block(self, nVersion=4):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount() + 1
block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.nVersion = nVersion
block.rehash()
return block
# Adds list of transactions to block, adds witness commitment, then solves.
def update_witness_block_with_transactions(self, block, tx_list, nonce=0):
block.vtx.extend(tx_list)
add_witness_commitment(block, nonce)
block.solve()
return
''' Individual tests '''
def test_witness_services(self):
print("\tVerifying NODE_WITNESS service bit")
assert((self.test_node.connection.nServices & NODE_WITNESS) != 0)
# See if sending a regular transaction works, and create a utxo
# to use in later tests.
def test_non_witness_transaction(self):
# Mine a block with an anyone-can-spend coinbase,
# let it mature, then try to spend it.
print("\tTesting non-witness transaction")
block = self.build_next_block(nVersion=1)
block.solve()
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping() # make sure the block was processed
txid = block.vtx[0].sha256
self.nodes[0].generate(99) # let the block mature
# Create a transaction that spends the coinbase
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(txid, 0), b""))
tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE])))
tx.calc_sha256()
# Check that serializing it with or without witness is the same
# This is a sanity check of our testing framework.
assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize())
self.test_node.send_message(msg_witness_tx(tx))
self.test_node.sync_with_ping() # make sure the tx was processed
assert(tx.hash in self.nodes[0].getrawmempool())
# Save this transaction for later
self.utxo.append(UTXO(tx.sha256, 0, 49*100000000))
self.nodes[0].generate(1)
# Verify that blocks with witnesses are rejected before activation.
def test_unnecessary_witness_before_segwit_activation(self):
print("\tTesting behavior of unnecessary witnesses")
# For now, rely on earlier tests to have created at least one utxo for
# us to use
assert(len(self.utxo) > 0)
assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active')
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxinWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])]
# Verify the hash with witness differs from the txid
# (otherwise our testing framework must be broken!)
tx.rehash()
assert(tx.sha256 != tx.calc_sha256(with_witness=True))
# Construct a segwit-signaling block that includes the transaction.
block = self.build_next_block(nVersion=(VB_TOP_BITS|(1 << VB_WITNESS_BIT)))
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
self.test_node.test_witness_block(block, accepted=False)
# TODO: fix synchronization so we can test reject reason
# Right now, bitcoind delays sending reject messages for blocks
# until the future, making synchronization here difficult.
#assert_equal(self.test_node.last_reject.reason, "unexpected-witness")
# But it should not be permanently marked bad...
# Resend without witness information.
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping()
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
# Mine enough blocks for segwit's vb state to be 'started'.
def advance_to_segwit_started(self):
height = self.nodes[0].getblockcount()
# Will need to rewrite the tests here if we are past the first period
assert(height < VB_PERIOD - 1)
# Genesis block is 'defined'.
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined')
# Advance to end of period, status should now be 'started'
self.nodes[0].generate(VB_PERIOD-height-1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Mine enough blocks to lock in segwit, but don't activate.
# TODO: we could verify that lockin only happens at the right threshold of
# signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_lockin(self):
height = self.nodes[0].getblockcount()
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Advance to end of period, and verify lock-in happens at the end
self.nodes[0].generate(VB_PERIOD-1)
height = self.nodes[0].getblockcount()
assert((height % VB_PERIOD) == VB_PERIOD - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
# Mine enough blocks to activate segwit.
# TODO: we could verify that activation only happens at the right threshold
# of signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_active(self):
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
height = self.nodes[0].getblockcount()
self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active')
# This test can only be run after segwit has activated
def test_witness_commitments(self):
print("\tTesting witness commitments")
# First try a correct witness commitment.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Test the test -- witness serialization should be different
assert(msg_witness_block(block).serialize() != msg_block(block).serialize())
# This empty block should be valid.
self.test_node.test_witness_block(block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
add_witness_commitment(block_2, nonce=28)
block_2.solve()
# The commitment should have changed!
assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1])
# This should also be valid.
self.test_node.test_witness_block(block_2, accepted=True)
# Now test commitments with actual transactions
assert (len(self.utxo) > 0)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# Let's construct a witness program
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# tx2 will spend tx1, and send back to a regular anyone-can-spend address
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1)
# Add an extra OP_RETURN output that matches the witness commitment template,
# even though it has extra data after the incorrect commitment.
# This block should fail.
block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10])))
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=False)
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
# This should succeed (nValue shouldn't affect finding the
# witness commitment).
add_witness_commitment(block_3, nonce=0)
block_3.vtx[0].vout[0].nValue -= 1
block_3.vtx[0].vout[-1].nValue += 1
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
block_4 = self.build_next_block()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx3.rehash()
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
self.test_node.test_witness_block(block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_block_malleability(self):
print("\tTesting witness block malleability")
# Make sure that a block that has too big a virtual size
# because of a too-large coinbase witness is not permanently
# marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000)
assert(get_virtual_size(block) > MAX_BLOCK_SIZE)
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert(get_virtual_size(block) < MAX_BLOCK_SIZE)
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() == block.hash)
# Now make sure that malleating the witness nonce doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ]
self.test_node.test_witness_block(block, accepted=False)
# Changing the witness nonce doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ]
self.test_node.test_witness_block(block, accepted=True)
def test_witness_block_size(self):
print("\tTesting witness block size limit")
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in p2p-fullblocktest.py
# Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB.
block = self.build_next_block()
assert(len(self.utxo) > 0)
# Create a P2WSH transaction.
# The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
# virtual size.
NUM_DROPS = 200 # 201 max ops per script!
NUM_OUTPUTS = 50
witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE])
witness_hash = uint256_from_str(sha256(witness_program))
scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)])
prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n)
value = self.utxo[0].nValue
parent_tx = CTransaction()
parent_tx.vin.append(CTxIn(prevout, b""))
child_value = int(value/NUM_OUTPUTS)
for i in range(NUM_OUTPUTS):
parent_tx.vout.append(CTxOut(child_value, scriptPubKey))
parent_tx.vout[0].nValue -= 50000
assert(parent_tx.vout[0].nValue > 0)
parent_tx.rehash()
child_tx = CTransaction()
for i in range(NUM_OUTPUTS):
child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b""))
child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))]
for i in range(NUM_OUTPUTS):
child_tx.wit.vtxinwit.append(CTxinWitness())
child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program]
child_tx.rehash()
self.update_witness_block_with_transactions(block, [parent_tx, child_tx])
vsize = get_virtual_size(block)
additional_bytes = (MAX_BLOCK_SIZE - vsize)*4
i = 0
while additional_bytes > 0:
# Add some more bytes to each input until we hit MAX_BLOCK_SIZE+1
extra_bytes = min(additional_bytes+1, 55)
block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes)
additional_bytes -= extra_bytes
i += 1
block.vtx[0].vout.pop() # Remove old commitment
add_witness_commitment(block)
block.solve()
vsize = get_virtual_size(block)
assert_equal(vsize, MAX_BLOCK_SIZE + 1)
# Make sure that our test case would exceed the old max-network-message
# limit
assert(len(block.serialize(True)) > 2*1024*1024)
self.test_node.test_witness_block(block, accepted=False)
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1)
block.vtx[0].vout.pop()
add_witness_commitment(block)
block.solve()
assert(get_virtual_size(block) == MAX_BLOCK_SIZE)
self.test_node.test_witness_block(block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
# submitblock will try to add the nonce automatically, so that mining
# software doesn't need to worry about doing so itself.
def test_submit_block(self):
block = self.build_next_block()
# Try using a custom nonce and then don't supply it.
# This shouldn't possibly work.
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
# Now redo commitment with the standard nonce, but let bitcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
# the commitment.
block_2 = self.build_next_block()
add_witness_commitment(block_2)
block_2.solve()
# Drop commitment and nonce -- submitblock should not fill in.
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True)))
# Tip should not advance!
assert(self.nodes[0].getbestblockhash() != block_2.hash)
# Consensus tests of extra witness data in a transaction.
def test_extra_witness_data(self):
print("\tTesting extra witness data in tx")
assert(len(self.utxo) > 0)
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First try extra witness data on a tx that doesn't require a witness
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-2000, scriptPubKey))
tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output
tx.wit.vtxinwit.append(CTxinWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])]
tx.rehash()
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
self.test_node.test_witness_block(block, accepted=False)
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
block.vtx[1].vin[0].scriptSig = CScript([OP_0])
block.vtx[1].rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
tx2.wit.vtxinwit.extend([CTxinWitness(), CTxinWitness()])
tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ]
tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
tx2.vin[1].scriptSig = CScript([OP_TRUE])
tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0)
tx2.wit.vtxinwit[1].scriptWitness.stack = []
tx2.rehash()
add_witness_commitment(block)
block.solve()
# This has extra signature data for a witness input, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
tx2.vin[0].scriptSig = b""
tx2.rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_push_length(self):
''' Should only allow up to 520 byte pushes in witness stack '''
print("\tTesting maximum witness push size")
MAX_SCRIPT_ELEMENT_SIZE = 520
assert(len(self.utxo))
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxinWitness())
# First try a 521-byte stack element
tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_program_length(self):
# Can create witness outputs that are long, but can't be greater than
# 10k bytes to successfully spend
print("\tTesting maximum witness program length")
assert(len(self.utxo))
MAX_PROGRAM_LENGTH = 10000
# This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes.
long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE])
assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1)
long_witness_hash = sha256(long_witness_program)
long_scriptPubKey = CScript([OP_0, long_witness_hash])
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, long_scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Try again with one less byte in the witness program
witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE])
assert(len(witness_program) == MAX_PROGRAM_LENGTH)
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey)
tx.rehash()
tx2.vin[0].prevout.hash = tx.sha256
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program]
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_input_length(self):
''' Ensure that vin length must match vtxinwit length '''
print("\tTesting witness input length")
assert(len(self.utxo))
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
nValue = self.utxo[0].nValue
for i in range(10):
tx.vout.append(CTxOut(int(nValue/10), scriptPubKey))
tx.vout[0].nValue -= 1000
assert(tx.vout[0].nValue >= 0)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
# the length of vtxinwit.
class BrokenCTransaction(CTransaction):
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
tx2 = BrokenCTransaction()
for i in range(10):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE])))
# First try using a too long vtxinwit
for i in range(11):
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
tx2.wit.vtxinwit.pop()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program]
tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_tx_relay_before_segwit_activation(self):
print("\tTesting relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected for premature-witness, but should
# not be added to recently rejected list.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxinWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
assert(self.old_node.last_getdata.inv[0].type == 1)
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
try:
self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2)
print("Error: duplicate tx getdata!")
assert(False)
except AssertionError as e:
pass
# Delivering this transaction with witness should fail (no matter who
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Verify that inv's to test_node come with getdata's for non-witness tx's
# Just tweak the transaction, announce it, and verify we get a getdata
# for a normal tx
tx.vout[0].scriptPubKey = CScript([OP_TRUE, OP_TRUE])
tx.rehash()
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert(self.test_node.last_getdata.inv[0].type == 1)
# Cleanup: mine the first transaction and update utxo
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
# After segwit activates, verify that mempool:
# - rejects transactions with unnecessary/extra witnesses
# - accepts transactions with valid witnesses
# and that witness transactions are relayed to non-upgraded peers.
def test_tx_relay_after_segwit_activation(self):
print("\tTesting relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected because we can't use a witness
# when spending a non-witness output.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxinWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
# Re-announcing won't result in a getdata for ~2.5 minutes, so just
# deliver the modified transaction.
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, CScript([OP_TRUE])))
tx3.wit.vtxinwit.append(CTxinWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv(CInv(1, tx2.sha256)) # wait until tx2 was inv'ed
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv(CInv(1, tx3.sha256))
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4
assert_equal(raw_tx["vsize"], vsize)
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii'))
assert(vsize != raw_tx["size"])
# Cleanup: mine the transactions and update utxo for next test
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
# Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG
# This is true regardless of segwit activation.
# Also test that we don't ask for blocks from unupgraded peers
def test_block_relay(self, segwit_activated):
print("\tTesting block relay")
blocktype = 2|MSG_WITNESS_FLAG if segwit_activated else 2
# test_node has set NODE_WITNESS, so all getdata requests should be for
# witness blocks.
# Test announcing a block via inv results in a getdata, and that
# announcing a version 4 or random VB block with a header results in a getdata
block1 = self.build_next_block()
block1.solve()
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert(self.test_node.last_getdata.inv[0].type == blocktype)
self.test_node.test_witness_block(block1, True)
block2 = self.build_next_block(nVersion=4)
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert(self.test_node.last_getdata.inv[0].type == blocktype)
self.test_node.test_witness_block(block2, True)
block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15)))
block3.solve()
self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True)
assert(self.test_node.last_getdata.inv[0].type == blocktype)
self.test_node.test_witness_block(block3, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
if segwit_activated == False:
# Before activation, we should be able to request old blocks with
# or without witness, and they should be the same.
chain_height = self.nodes[0].getblockcount()
# Pick 10 random blocks on main chain, and verify that getdata's
# for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal.
all_heights = list(range(chain_height+1))
random.shuffle(all_heights)
all_heights = all_heights[0:10]
for height in all_heights:
block_hash = self.nodes[0].getblockhash(height)
rpc_block = self.nodes[0].getblock(block_hash, False)
block_hash = int(block_hash, 16)
block = self.test_node.request_block(block_hash, 2)
wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG)
assert_equal(block.serialize(True), wit_block.serialize(True))
assert_equal(block.serialize(), hex_str_to_bytes(rpc_block))
else:
# After activation, witness blocks and non-witness blocks should
# be different. Verify rpc getblock() returns witness blocks, while
# getdata respects the requested type.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [])
# This gives us a witness commitment.
assert(len(block.vtx[0].wit.vtxinwit) == 1)
assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1)
self.test_node.test_witness_block(block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
wit_block = self.test_node.request_block(block.sha256, 2|MSG_WITNESS_FLAG)
assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block))
assert_equal(wit_block.serialize(False), non_wit_block.serialize())
assert_equal(wit_block.serialize(True), block.serialize(True))
# Test size, vsize, cost
rpc_details = self.nodes[0].getblock(block.hash, True)
assert_equal(rpc_details["size"], len(block.serialize(True)))
assert_equal(rpc_details["strippedsize"], len(block.serialize(False)))
cost = 3*len(block.serialize(False)) + len(block.serialize(True))
assert_equal(rpc_details["cost"], cost)
# Upgraded node should not ask for blocks from unupgraded
block4 = self.build_next_block(nVersion=4)
block4.solve()
self.old_node.getdataset = set()
# Blocks can be requested via direct-fetch (immediately upon processing the announcement)
# or via parallel download (with an indeterminate delay from processing the announcement)
# so to test that a block is NOT requested, we could guess a time period to sleep for,
# and then check. We can avoid the sleep() by taking advantage of transaction getdata's
# being processed after block getdata's, and announce a transaction as well,
# and then check to see if that particular getdata has been received.
self.old_node.announce_block(block4, use_header=False)
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert(block4.sha256 not in self.old_node.getdataset)
# Verify that future segwit upgraded transactions are non-standard,
# but valid in blocks. Can run this before and after segwit activation.
def test_segwit_versions(self):
print("\tTesting standardness/consensus for segwit versions (0-16)")
assert(len(self.utxo))
NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16
if (len(self.utxo) < NUM_TESTS):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
split_value = (self.utxo[0].nValue - 4000) // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, CScript([OP_TRUE])))
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_TESTS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
sync_blocks(self.nodes)
temp_utxo = []
tx = CTransaction()
count = 0
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16+1)) + [OP_0]:
count += 1
# First try to spend to a future version segwit scriptPubKey.
scriptPubKey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue-1000, scriptPubKey)]
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
self.nodes[0].generate(1) # Mine all the transactions
sync_blocks(self.nodes)
assert(len(self.nodes[0].getrawmempool()) == 0)
# Finally, verify that version 0 -> version 1 transactions
# are non-standard
scriptPubKey = CScript([CScriptOp(OP_1), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue-1000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
tx2.rehash()
# Gets accepted to test_node, because standardness of outputs isn't
# checked with fRequireStandard
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
# Spend everything in temp_utxo back to an OP_TRUE output.
tx3 = CTransaction()
total_value = 0
for i in temp_utxo:
tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx3.wit.vtxinwit.append(CTxinWitness())
total_value += i.nValue
tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program]
tx3.vout.append(CTxOut(total_value - 1000, CScript([OP_TRUE])))
tx3.rehash()
# Spending a higher version witness output is not allowed by policy,
# even with fRequireStandard=false.
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
self.test_node.sync_with_ping()
with mininode_lock:
assert(b"reserved for soft-fork upgrades" in self.test_node.last_reject.reason)
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_premature_coinbase_witness_spend(self):
print("\tTesting premature coinbase witness spend")
block = self.build_next_block()
# Change the output of the block to be a witness output.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
block.vtx[0].vout[0].scriptPubKey = scriptPubKey
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)]
spend_tx.wit.vtxinwit.append(CTxinWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
spend_tx.rehash()
# Now test a premature spend.
self.nodes[0].generate(98)
sync_blocks(self.nodes)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=False)
# Advancing one more block should allow the spend.
self.nodes[0].generate(1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=True)
sync_blocks(self.nodes)
def test_signature_version_1(self):
print("\tTesting segwit signature hash version 1")
key = CECKey()
key.set_secretbytes(b"9")
pubkey = CPubKey(key.get_pubkey())
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First create a witness output for use in the tests.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
self.utxo.pop(0)
# Add signature for a P2PK witness program.
def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key):
tx_hash = SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, value)
signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1')
txTo.wit.vtxinwit[inIdx].scriptWitness.stack = [signature, script]
txTo.rehash()
# Test each hashtype
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
for sigflag in [ 0, SIGHASH_ANYONECANPAY ]:
for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]:
hashtype |= sigflag
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
tx.vout.append(CTxOut(prev_utxo.nValue - 1000, scriptPubKey))
tx.wit.vtxinwit.append(CTxinWitness())
# Too-large input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key)
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Too-small input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Now try correct value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
# Test combinations of signature hashes.
# Split the utxo into a lot of outputs.
# Randomly choose up to 10 to spend, sign with different hashtypes, and
# output to a random number of outputs. Repeat NUM_TESTS times.
# Ensure that we've tested a situation where we use SIGHASH_SINGLE with
# an input index > number of outputs.
NUM_TESTS = 500
temp_utxos = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
split_value = prev_utxo.nValue // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.wit.vtxinwit.append(CTxinWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key)
for i in range(NUM_TESTS):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
for i in range(NUM_TESTS):
# Choose random number of inputs to use.
num_inputs = random.randint(1, 10)
# Create a slight bias for producing more utxos
num_outputs = random.randint(1, 11)
random.shuffle(temp_utxos)
assert(len(temp_utxos) > num_inputs)
tx = CTransaction()
total_value = 0
for i in range(num_inputs):
tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b""))
tx.wit.vtxinwit.append(CTxinWitness())
total_value += temp_utxos[i].nValue
split_value = total_value // num_outputs
for i in range(num_outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
for i in range(num_inputs):
# Now try to sign each input, using a random hashtype.
anyonecanpay = 0
if random.randint(0, 1):
anyonecanpay = SIGHASH_ANYONECANPAY
hashtype = random.randint(1, 3) | anyonecanpay
sign_P2PK_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key)
if (hashtype == SIGHASH_SINGLE and i >= num_outputs):
used_sighash_single_out_of_bounds = True
tx.rehash()
for i in range(num_outputs):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
temp_utxos = temp_utxos[num_inputs:]
block.vtx.append(tx)
# Test the block periodically, if we're close to maxblocksize
if (get_virtual_size(block) > MAX_BLOCK_SIZE - 1000):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
print("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value")
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b""))
tx.vout.append(CTxOut(temp_utxos[0].nValue, scriptPKH))
tx.wit.vtxinwit.append(CTxinWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)])
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Move the signature to the witness.
block.vtx.pop()
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.vin[0].scriptSig = b""
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
temp_utxos.pop(0)
# Update self.utxos for later tests. Just spend everything in
# temp_utxos to a corresponding entry in self.utxos
tx = CTransaction()
index = 0
for i in temp_utxos:
# Just spend to our usual anyone-can-spend output
# Use SIGHASH_SINGLE|SIGHASH_ANYONECANPAY so we can build up
# the signatures as we go.
tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx.vout.append(CTxOut(i.nValue, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxinWitness())
sign_P2PK_witness_input(witness_program, tx, index, SIGHASH_SINGLE|SIGHASH_ANYONECANPAY, i.nValue, key)
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
# Test P2SH wrapped witness programs.
def test_p2sh_witness(self, segwit_activated):
print("\tTesting P2SH witness transactions")
assert(len(self.utxo))
# Prepare the p2sh-wrapped witness output
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
p2wsh_pubkey = CScript([OP_0, witness_hash])
p2sh_witness_hash = hash160(p2wsh_pubkey)
scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script
# Fund the P2SH output
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# Verify mempool acceptance and block validity
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated)
sync_blocks(self.nodes)
# Now test attempts to spend the output.
spend_tx = CTransaction()
spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), scriptSig))
spend_tx.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
spend_tx.rehash()
# This transaction should not be accepted into the mempool pre- or
# post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older bitcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
spend_tx.vin[0].scriptSig = scriptSig
spend_tx.rehash()
spend_tx.wit.vtxinwit.append(CTxinWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ]
# Verify mempool acceptance
self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
# If we're before activation, then sending this without witnesses
# should be valid. If we're after activation, then sending this with
# witnesses should be valid.
if segwit_activated:
self.test_node.test_witness_block(block, accepted=True)
else:
self.test_node.test_witness_block(block, accepted=True, with_witness=False)
# Update self.utxo
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
# Test the behavior of starting up a segwit-aware node after the softfork
# has activated. As segwit requires different block data than pre-segwit
# nodes would have stored, this requires special handling.
# To enable this test, pass --oldbinary=<path-to-pre-segwit-bitcoind> to
# the test.
def test_upgrade_after_activation(self, node, node_id):
print("\tTesting software upgrade after softfork activation")
assert(node_id != 0) # node0 is assumed to be a segwit-active bitcoind
# Make sure the nodes are all up
sync_blocks(self.nodes)
# Restart with the new binary
stop_node(node, node_id)
self.nodes[node_id] = start_node(node_id, self.options.tmpdir, ["-debug"])
connect_nodes(self.nodes[0], node_id)
sync_blocks(self.nodes)
# Make sure that this peer thinks segwit has activated.
assert(get_bip9_status(node, 'segwit')['status'] == "active")
# Make sure this peers blocks match those of node0.
height = node.getblockcount()
while height >= 0:
block_hash = node.getblockhash(height)
assert_equal(block_hash, self.nodes[0].getblockhash(height))
assert_equal(self.nodes[0].getblock(block_hash), node.getblock(block_hash))
height -= 1
def test_witness_sigops(self):
'''Ensure sigop counting is correct inside witnesses.'''
print("\tTesting sigops limit")
assert(len(self.utxo))
# Keep this under MAX_OPS_PER_SCRIPT (201)
witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
sigops_per_script = 20*5 + 193*1
# We'll produce 2 extra outputs, one with a program that would take us
# over max sig ops, and one with a program that would exactly reach max
# sig ops
outputs = (MAX_SIGOP_COST // sigops_per_script) + 2
extra_sigops_available = MAX_SIGOP_COST % sigops_per_script
# We chose the number of checkmultisigs/checksigs to make this work:
assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT
# This script, when spent with the first
# N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction,
# would push us just over the block sigop limit.
witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF])
witness_hash_toomany = sha256(witness_program_toomany)
scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany])
# If we spend this script instead, we would exactly reach our sigop
# limit (for witness sigops).
witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available) + [OP_ENDIF])
witness_hash_justright = sha256(witness_program_justright)
scriptPubKey_justright = CScript([OP_0, witness_hash_justright])
# First split our available utxo into a bunch of outputs
split_value = self.utxo[0].nValue // outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
for i in range(outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.vout[-2].scriptPubKey = scriptPubKey_toomany
tx.vout[-1].scriptPubKey = scriptPubKey_justright
tx.rehash()
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
self.test_node.test_witness_block(block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
# too many sigops.
total_value = 0
for i in range(outputs-1):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ]
total_value += tx.vout[i].nValue
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ]
tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE])))
tx2.rehash()
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
self.test_node.test_witness_block(block_2, accepted=False)
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
checksig_count = (extra_sigops_available // 4) + 1
scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count)
tx2.vout.append(CTxOut(0, scriptPubKey_checksigs));
tx2.vin.pop()
tx2.wit.vtxinwit.pop()
tx2.vout[0].nValue -= tx.vout[-2].nValue
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
self.test_node.test_witness_block(block_3, accepted=False)
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
self.test_node.test_witness_block(block_4, accepted=True)
# Reset the tip back down for the next test
sync_blocks(self.nodes)
for x in self.nodes:
x.invalidateblock(block_4.hash)
# Try replacing the last input of tx2 to be spending the last
# output of tx
block_5 = self.build_next_block()
tx2.vout.pop()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs-1), b""))
tx2.wit.vtxinwit.append(CTxinWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
self.test_node.test_witness_block(block_5, accepted=True)
# TODO: test p2sh sigop counting
def test_getblocktemplate_before_lockin(self):
print("\tTesting getblocktemplate setting of segwit versionbit (before lockin)")
block_version = (self.nodes[0].getblocktemplate())['version']
assert_equal(block_version & (1 << VB_WITNESS_BIT), 0)
# Workaround:
# Can either change the tip, or change the mempool and wait 5 seconds
# to trigger a recomputation of getblocktemplate.
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
# Using mocktime lets us avoid sleep()
self.nodes[0].setmocktime(int(time.time())+10)
block_version = self.nodes[0].getblocktemplate({"rules" : ["segwit"]})['version']
assert(block_version & (1 << VB_WITNESS_BIT) != 0)
self.nodes[0].setmocktime(0) # undo mocktime
def run_test(self):
# Setup the p2p connections and start up the network thread.
self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK
self.old_node = TestNode() # only NODE_NETWORK
self.std_node = TestNode() # for testing node1 (fRequireStandard=true)
self.p2p_connections = [self.test_node, self.old_node]
self.connections = []
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS))
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK))
self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS))
self.test_node.add_connection(self.connections[0])
self.old_node.add_connection(self.connections[1])
self.std_node.add_connection(self.connections[2])
NetworkThread().start() # Start up network handling in another thread
# Keep a place to store utxo's that can be used in later tests
self.utxo = []
# Test logic begins here
self.test_node.wait_for_verack()
print("\nStarting tests before segwit lock in:")
self.test_witness_services() # Verifies NODE_WITNESS
self.test_non_witness_transaction() # non-witness tx's are accepted
self.test_unnecessary_witness_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
# Advance to segwit being 'started'
self.advance_to_segwit_started()
self.test_getblocktemplate_before_lockin()
sync_blocks(self.nodes)
# At lockin, nothing should change.
print("\nTesting behavior post lockin, pre-activation")
self.advance_to_segwit_lockin()
# Retest unnecessary witnesses
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
self.test_p2sh_witness(segwit_activated=False)
sync_blocks(self.nodes)
# Now activate segwit
print("\nTesting behavior after segwit activation")
self.advance_to_segwit_active()
sync_blocks(self.nodes)
# Test P2SH witness handling again
self.test_p2sh_witness(segwit_activated=True)
self.test_witness_commitments()
self.test_block_malleability()
self.test_witness_block_size()
self.test_submit_block()
self.test_extra_witness_data()
self.test_max_witness_push_length()
self.test_max_witness_program_length()
self.test_witness_input_length()
self.test_block_relay(segwit_activated=True)
self.test_tx_relay_after_segwit_activation()
self.test_segwit_versions()
self.test_premature_coinbase_witness_spend()
self.test_signature_version_1()
sync_blocks(self.nodes)
if self.test_upgrade:
self.test_upgrade_after_activation(self.nodes[2], 2)
else:
print("\tSkipping upgrade-after-activation test (use --oldbinary to enable)")
self.test_witness_sigops()
if __name__ == '__main__':
SegWitTest().main()
| Diapolo/bitcoin | qa/rpc-tests/p2p-segwit.py | Python | mit | 73,280 |
import cgi
import datetime
import urllib
import webapp2
import logging
import os
import json
import random
# GAE
from google.appengine.ext import ndb
from google.appengine.api import users
# 3rd party
import jinja2
import twitter
import keys
# Templating
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Models
class SocialAccount(ndb.Model):
"""An account to track"""
account_username = ndb.StringProperty()
account_type = ndb.StringProperty(choices=['facebook', 'twitter', 'youtube', 'googleplus'])
grouping = ndb.StringProperty() # earthhour, wwf
count = ndb.IntegerProperty(default=1)
def display_name(self):
if self.account_username == 'WWF':
return 'WWF (International)'
else:
return self.account_username
class SocialAccountRecord(ndb.Model):
"""For tracking stats over time"""
account_username = ndb.StringProperty()
account_type = ndb.StringProperty(choices=['facebook', 'twitter', 'youtube', 'googleplus'])
count = ndb.IntegerProperty()
created_on = ndb.DateProperty()
unique_day_data_mash = ndb.StringProperty()
updated = ndb.DateTimeProperty(auto_now=True)
class TotalCount(ndb.Model):
"""A utility model"""
facebook = ndb.IntegerProperty(default=0)
twitter = ndb.IntegerProperty(default=0)
youtube = ndb.IntegerProperty(default=0)
# Handlers
class MainPage(webapp2.RequestHandler):
def get(self):
all_social_accounts = get_all_social_accounts_by_count()
totals_wwf = get_totals(all_social_accounts, 'wwf')
totals_earthhour = get_totals(all_social_accounts, 'earthhour')
template_values = {
'all_social_accounts':all_social_accounts,
'totals_wwf':totals_wwf,
'totals_earthhour':totals_earthhour
}
jinja_environment.filters['format_number'] = format_number
# Write out Page
template = jinja_environment.get_template('index.html')
self.response.out.write(template.render(template_values))
class AdminPage(webapp2.RequestHandler):
def get(self):
template_values = {
'all_social_accounts':get_all_social_accounts(),
'logout_url': users.create_logout_url("/")
}
# Write out Page
template = jinja_environment.get_template('admin.html')
self.response.out.write(template.render(template_values))
class AddAccount(webapp2.RequestHandler):
def post(self):
account_username = self.request.get('account_username')
account_type = self.request.get('account_type')
grouping = self.request.get('grouping')
if not account_username:
self.redirect('/admin/?account-name-is-required')
return
account = SocialAccount(parent=get_app_key())
account.account_username = account_username
account.account_type = account_type
account.grouping = grouping
account.put()
get_latest_count(account)
self.redirect('/admin/?done')
class DeleteAccount(webapp2.RequestHandler):
def get(self, account_id=None):
if account_id:
account_to_delete = SocialAccount.get_by_id(int(account_id), parent=get_app_key())
account_to_delete.key.delete()
self.redirect('/admin/?deleted')
return
else:
self.redirect('/admin/?invalid')
class RefreshStats(webapp2.RequestHandler):
def get(self):
refresh_stats()
self.redirect('/admin/?refreshed')
class CronRefreshStats(webapp2.RequestHandler):
def get(self):
refresh_stats()
# Functions
def refresh_stats():
records = []
social_accounts = get_all_social_accounts()
twitter_accounts_checked = 0
random.shuffle(social_accounts) # this is to mix up sort order in case of API limit overloads
for social_account in social_accounts:
if social_account.account_type == 'twitter':
twitter_accounts_checked += 1
if twitter_accounts_checked < 100:
record = get_latest_count(social_account, get_twitter_api())
else:
record = get_latest_count(social_account, get_twitter_api())
if record:
records.append(record)
ndb.put_multi(records)
def get_totals(all_social_accounts, grouping='wwf'):
totals = TotalCount()
for social_account in all_social_accounts:
if social_account.grouping == grouping:
if social_account.account_type == 'facebook':
totals.facebook += social_account.count
if social_account.account_type == 'twitter':
totals.twitter += social_account.count
if social_account.account_type == 'youtube':
totals.youtube += social_account.count
return totals
def record_latest_count(social_account, count):
record = None
if count:
if count > 0:
today = datetime.date.today()
datamash_for_quick_lookup = str(today) + social_account.account_type + social_account.account_username
# only keep the latest record for any day
q = SocialAccountRecord.query(SocialAccountRecord.unique_day_data_mash == datamash_for_quick_lookup)
q.filter()
record = q.get()
if not record:
record = SocialAccountRecord() # create a new one
record.account_username = social_account.account_username
record.account_type = social_account.account_type
record.count = count
record.created_on = today
record.unique_day_data_mash = datamash_for_quick_lookup
# record.put() # bubble up and put_multi
# if not social_account.grouping:
# social_account.grouping = 'wwf' # this was a temporary measure to update old records after the addition of EH accounts
social_account.count = count
social_account.put()
return record
def get_latest_count(social_account, api=None):
record = None
if social_account.account_type == 'facebook':
record = get_latest_facebook_count(social_account)
if social_account.account_type == 'youtube':
record = get_latest_youtube_count(social_account)
if social_account.account_type == 'twitter':
record = get_latest_twitter_count(social_account, api)
return record
def get_latest_facebook_count(social_account):
api_url = "http://graph.facebook.com/" + social_account.account_username
page_likes = None
try:
j = json.loads(urllib.urlopen(api_url).read())
if j:
page_likes = int(j['likes'])
except Exception as e:
logging.error("Error fetching facebook API")
logging.error(e)
record = None
if page_likes:
record = record_latest_count(social_account, page_likes)
return record
def get_latest_youtube_count(social_account):
api_url = "https://gdata.youtube.com/feeds/api/users/" + social_account.account_username + "?alt=json"
video_views = None
try:
j = json.loads(urllib.urlopen(api_url).read())
if j:
video_views = int(j['entry']['yt$statistics']['totalUploadViews'])
except Exception as e:
logging.error("Error fetching facebook API")
logging.error(e)
record = None
if video_views:
record = record_latest_count(social_account, video_views)
return record
def get_latest_twitter_count(social_account, api):
if not api:
api = get_twitter_api()
followers = None
try:
twitter_user = api.GetUser(screen_name=social_account.account_username)
if twitter_user:
followers = twitter_user.followers_count
except Exception as e:
logging.error("Error fetching twitter API")
logging.error(e)
record = None
if followers:
record = record_latest_count(social_account, followers)
return record
# Utility
def get_app_key():
"""Constructs a fixed Datastore key for the app for strong consistency for all admins."""
return ndb.Key('AppFixedKey', 'HappyPanda') # This is hacky, but works for our needs
def get_all_social_accounts():
q = SocialAccount.query(ancestor=get_app_key())
q = q.order(SocialAccount.account_username)
return list(q.iter())
def get_all_social_accounts_by_count():
q = SocialAccount.query(ancestor=get_app_key())
q = q.order(-SocialAccount.count)
return list(q.iter())
def format_number(value):
return "{:,}".format(value)
def get_twitter_api():
return twitter.Api(consumer_key=keys.TWITTER_CONSUMER_KEY,
consumer_secret=keys.TWITTER_CONSUMER_SECRET,
access_token_key=keys.TWITTER_ACCESS_TOKEN_KEY,
access_token_secret=keys.TWITTER_ACCESS_TOKEN_SECRET,
cache=None)
app = webapp2.WSGIApplication([
('/', MainPage),
('/admin/?', AdminPage),
('/admin/add', AddAccount),
('/admin/delete/(\d+)', DeleteAccount),
('/admin/refresh', RefreshStats),
('/cron/refresh', CronRefreshStats)
], debug=True)
| adamlofting/socialstatstracker | main.py | Python | apache-2.0 | 9,394 |
Subsets and Splits