repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
jcpeterson/avoidr | player.py | 1 | 3915 | import pygame, os
class Player:
# this takes in screen width/height to calculate the player's starting position (center of screen)
# it also takes in the background color to compute its own color (inverted background color)
def __init__(self, screenWidth, screenHeight):
self.posRangeX = screenWidth
self.posRangeY = screenHeight
self.posX = screenWidth/2
self.posY = screenHeight/2
self.speed = 10
self.sizeMax = 80
self.sizeMin = 42#40
# might as well start out at the minimum size
self.size = self.sizeMin
self.state = 'growing'
# make the player color white; it doesn't really matter how it starts
self.color = (255,255,255)
self.isJumping = False
self.goingUp = True
self.killed = False
self.exploding = False
self.rect = pygame.Rect((self.posX,self.posY),(self.size,self.size))
# load the jump sound
self.jumpSound = pygame.mixer.Sound(os.path.join('audio','jump.wav'))
def updateSize(self):
# # player size changes
# if self.state == 'growing' and self.size >= self.sizeMin:
# self.size += 1
# if self.size >= self.sizeMax:
# self.state = 'shrinking'
# if self.state == 'shrinking' and self.size <= self.sizeMax:
# self.size -= 1
# if self.size <= self.sizeMin:
# self.state = 'growing'
if self.isJumping:
self.speed = 3
# player size changes when jumpin
if self.goingUp:
self.size += 1
if self.size == self.sizeMax:
self.goingUp = False
if not self.goingUp:
self.size -= 1
if self.size == self.sizeMin:
self.isJumping = False
self.goingUp = True
self.speed = 10
self.rect.width = self.size
self.rect.height = self.size
# sounds/animations for this have not been implemented yet
if self.killed:
self.exploding = True
def updatePos(self, keys):
# left border collision detection
if (self.posX != 0 + self.sizeMax) and (self.posX > 0 + self.sizeMax + 5):
# player movement input
if keys[pygame.K_LEFT]:
self.posX = self.posX - self.speed
# right border collision detection
if (self.posX != self.posRangeX - self.sizeMax) and (self.posX < self.posRangeX - (self.sizeMax + 5)):
# player movement input
if keys[pygame.K_RIGHT]:
self.posX = self.posX + self.speed
# vertical border collision detection
if (self.posY != 0 + self.sizeMax) and (self.posY > 0 + self.sizeMax + 5):
# player movement input
if keys[pygame.K_UP]:
self.posY = self.posY - self.speed
# vertical border collision detection
if (self.posY != self.posRangeY - self.sizeMax) and (self.posY < self.posRangeY - (self.sizeMax + 5)):
# player movement input
if keys[pygame.K_DOWN]:
self.posY = self.posY + self.speed
# MOVE THIS OVER TO THE SIZE FUNCTION SOON!!!
if keys[pygame.K_SPACE]:
if not self.isJumping:
# play the jump sound
self.jumpSound.play()
self.isJumping = True
self.rect.x = self.posX
self.rect.y = self.posY
# use something like this for input handling later
# for e in pygame.event.get():
# if e.type == QUIT: raise SystemExit, "QUIT"
# if e.type == KEYDOWN and e.key == K_ESCAPE:
# raise SystemExit, "ESCAPE"
# pressed = pygame.key.get_pressed()
# up, left, right = [pressed[key] for key in (K_UP, K_LEFT, K_RIGHT)]
def updateColor(self,gameBgColor):
# update the player color with the inverted current background color
self.color = (255-gameBgColor[0],255-gameBgColor[1],255-gameBgColor[2]) | gpl-3.0 | -8,821,727,489,908,777,000 | 33.052174 | 108 | 0.591315 | false |
staranjeet/fjord | fjord/redirector/tests/test_dummy.py | 1 | 1413 | from fjord.base.tests import reverse, TestCase
from fjord.redirector import get_redirectors
from fjord.redirector.base import build_redirect_url
from fjord.redirector.providers.dummy import DummyRedirector
from fjord.redirector.tests import RedirectorTestMixin
class DummyRedirectorLoadingTestCase(RedirectorTestMixin, TestCase):
redirectors = []
def test_didnt_load(self):
dummy_providers = [
prov for prov in get_redirectors()
if isinstance(prov, DummyRedirector)
]
assert len(dummy_providers) == 0
class DummyRedirectorTestCase(RedirectorTestMixin, TestCase):
redirectors = [
'fjord.redirector.providers.dummy.DummyRedirector'
]
def test_load(self):
dummy_redirectors = [
prov for prov in get_redirectors()
if isinstance(prov, DummyRedirector)
]
assert len(dummy_redirectors) == 1
def test_handle_redirect(self):
resp = self.client.get(build_redirect_url('dummy:ou812'))
assert resp.status_code == 302
assert resp['Location'] == 'http://example.com/ou812'
def test_nothing_handled_it_404(self):
resp = self.client.get(build_redirect_url('notdummy:ou812'))
assert resp.status_code == 404
def test_no_redirect_specified_404(self):
resp = self.client.get(reverse('redirect-view'))
assert resp.status_code == 404
| bsd-3-clause | 70,996,547,842,896,990 | 32.642857 | 68 | 0.678698 | false |
lucidfrontier45/scikit-learn | examples/manifold/plot_manifold_sphere.py | 1 | 4572 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=============================================
Manifold Learning methods on a severed sphere
=============================================
An application of the different :ref:`manifold` techniques
on a spherical data-set. Here one can see the use of
dimensionality reduction in order to gain some intuition
regarding the Manifold learning methods. Regarding the dataset,
the poles are cut from the sphere, as well as a thin slice down its
side. This enables the manifold learning techniques to
'spread it open' whilst projecting it onto two dimensions.
For a similiar example, where the methods are applied to the
S-curve dataset, see :ref:`example_manifold_plot_compare_methods.py`
Note that the purpose of the :ref:`MDS <multidimensional_scaling>` is
to find a low-dimensional representation of the data (here 2D) in
which the distances respect well the distances in the original
high-dimensional space, unlike other manifold-learning algorithms,
it does not seeks an isotropic representation of the data in
the low-dimensional space. Here the manifold problem matches fairly
that of representing a flat map of the Earth, as with
`map projection <http://en.wikipedia.org/wiki/Map_projection>`_
"""
# Author: Jaques Grobler <[email protected]>
# License: BSD
print __doc__
from time import time
import numpy as np
import pylab as pl
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import manifold
from sklearn.utils import check_random_state
# Next line to silence pyflakes.
Axes3D
# Variables for manifold learning.
n_neighbors = 10
n_samples = 1000
# Create our sphere.
random_state = check_random_state(0)
p = random_state.rand(n_samples) * (2 * np.pi - 0.55)
t = random_state.rand(n_samples) * np.pi
# Sever the poles from the sphere.
indices = ((t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8))))
colors = p[indices]
x, y, z = np.sin(t[indices]) * np.cos(p[indices]), \
np.sin(t[indices]) * np.sin(p[indices]), \
np.cos(t[indices])
# Plot our dataset.
fig = pl.figure(figsize=(15, 8))
pl.suptitle("Manifold Learning with %i points, %i neighbors"
% (1000, n_neighbors), fontsize=14)
ax = fig.add_subplot(241, projection='3d')
ax.scatter(x, y, z, c=p[indices], cmap=pl.cm.rainbow)
try:
# compatibility matplotlib < 1.0
ax.view_init(40, -10)
except:
pass
sphere_data = np.array([x, y, z]).T
# Perform Locally Linear Embedding Manifold learning
methods = ['standard', 'ltsa', 'hessian', 'modified']
labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE']
for i, method in enumerate(methods):
t0 = time()
trans_data = manifold\
.LocallyLinearEmbedding(n_neighbors, 2,
method=method).fit_transform(sphere_data).T
t1 = time()
print "%s: %.2g sec" % (methods[i], t1 - t0)
ax = fig.add_subplot(242 + i)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("%s (%.2g sec)" % (labels[i], t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Isomap Manifold learning.
t0 = time()
trans_data = manifold.Isomap(n_neighbors, n_components=2)\
.fit_transform(sphere_data).T
t1 = time()
print "%s: %.2g sec" % ('ISO', t1 - t0)
ax = fig.add_subplot(246)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Multi-dimensional scaling.
t0 = time()
mds = manifold.MDS(2, max_iter=100, n_init=1)
trans_data = mds.fit_transform(sphere_data).T
t1 = time()
print "MDS: %.2g sec" % (t1 - t0)
ax = fig.add_subplot(247)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("MDS (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Spectral Embedding.
t0 = time()
se = manifold.SpectralEmbedding(n_components=2,
n_neighbors=n_neighbors)
trans_data = se.fit_transform(sphere_data).T
t1 = time()
print "Spectral Embedding: %.2g sec" % (t1 - t0)
ax = fig.add_subplot(248)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
pl.show()
| bsd-3-clause | -6,762,097,957,016,355,000 | 31.425532 | 75 | 0.680446 | false |
iulian787/spack | var/spack/repos/builtin/packages/linktest/package.py | 2 | 1299 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Linktest(MakefilePackage):
"""Performance tool to generate communication matrix using
parallel ping-pong benchmark"""
homepage = "https://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/LinkTest/_node.html"
url = "http://apps.fz-juelich.de/jsc/linktest/download.php?version=1.2p1"
maintainers = ['pramodk']
version('1.2p1', sha256='981b96da1d5bf214507b8e219a36e8d0183d8bd5c10539b26f660b2c83e5269d', extension='tar.gz')
depends_on('mpi')
depends_on('sionlib')
def edit(self, spec, prefix):
with working_dir('src'):
makefile = FileFilter('Makefile')
makefile.filter('= gcc', '= cc')
makefile.filter('mpicc', spec['mpi'].mpicc)
makefile.filter('#SIONLIB_INST=.*',
'SIONLIB_INST=%s' % spec['sionlib'].prefix)
def build(self, spec, prefix):
with working_dir('src'):
make()
def install(self, spec, prefix):
mkdir(prefix.bin)
install('src/mpilinktest', prefix.bin)
install('src/pingponganalysis', prefix.bin)
| lgpl-2.1 | -1,271,115,290,490,346,200 | 35.083333 | 115 | 0.647421 | false |
nomad-vino/SPSE-1 | Module 5/5.4.py | 1 | 1362 | #!/usr/bin/python
print " __ "
print " |__|____ ___ __ "
print " | \__ \\\\ \/ / "
print " | |/ __ \\\\ / "
print " /\__| (____ /\_/ "
print " \______| \/ "
print " "
print 'Module 5'
print 'Exploitation Techniques'
print 'Part 4'
print
"""
Playing with processes in IDB
"""
import immlib
imm = immlib.Debugger()
# # # # Main application
DESC = 'Playing with processes'
def main(args):
# open closed process
#exe = 'E:\\Module 6\\Server-Strcpy.exe'
#imm.openProcess(exe)
# attach to running process - not the one opened in immunity
# -> !script_name PID
#imm.Attach(int(args[0])) #PID
#imm.restartProcess()
# find all modules in running process
modules_table = imm.createTable('Module Information', ['Name', 'Base', 'Entry', 'Size', 'Version'])
# get list of modules
module_dict = imm.getAllModules()
# fill table
for entity in module_dict.values() :
# Libs.debugtypes => Module
modules_table.add(0, [
entity.getName(),
'%08X'%entity.getBaseAddress(),
'%08X'%entity.getEntry(),
'%08X'%entity.getSize(),
entity.getVersion()
])
# print the state of registers in logs
imm.log(str(imm.getRegs()))
return 'Done'
| gpl-3.0 | 3,683,522,595,518,480,000 | 22.084746 | 100 | 0.526432 | false |
mancoast/CPythonPyc_test | fail/313_test_bigmem.py | 1 | 41534 | from test import support
from test.support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
import unittest
import operator
import sys
import functools
# Bigmem testing houserules:
#
# - Try not to allocate too many large objects. It's okay to rely on
# refcounting semantics, but don't forget that 's = create_largestring()'
# doesn't release the old 's' (if it exists) until well after its new
# value has been created. Use 'del s' before the create_largestring call.
#
# - Do *not* compare large objects using assertEqual or similar. It's a
# lengthy operation and the errormessage will be utterly useless due to
# its size. To make sure whether a result has the right contents, better
# to use the strip or count methods, or compare meaningful slices.
#
# - Don't forget to test for large indices, offsets and results and such,
# in addition to large sizes.
#
# - When repeating an object (say, a substring, or a small list) to create
# a large object, make the subobject of a length that is not a power of
# 2. That way, int-wrapping problems are more easily detected.
#
# - While the bigmemtest decorator speaks of 'minsize', all tests will
# actually be called with a much smaller number too, in the normal
# test run (5Kb currently.) This is so the tests themselves get frequent
# testing. Consequently, always make all large allocations based on the
# passed-in 'size', and don't rely on the size being very large. Also,
# memuse-per-size should remain sane (less than a few thousand); if your
# test uses more, adjust 'size' upward, instead.
# BEWARE: it seems that one failing test can yield other subsequent tests to
# fail as well. I do not know whether it is due to memory fragmentation
# issues, or other specifics of the platform malloc() routine.
character_size = 4 if sys.maxunicode > 0xFFFF else 2
class BaseStrTest:
@bigmemtest(minsize=_2G, memuse=2)
def test_capitalize(self, size):
_ = self.from_latin1
SUBSTR = self.from_latin1(' abc def ghi')
s = _('-') * size + SUBSTR
caps = s.capitalize()
self.assertEqual(caps[-len(SUBSTR):],
SUBSTR.capitalize())
self.assertEqual(caps.lstrip(_('-')), SUBSTR)
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_center(self, size):
SUBSTR = self.from_latin1(' abc def ghi')
s = SUBSTR.center(size)
self.assertEqual(len(s), size)
lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
if len(s) % 2:
lpadsize += 1
self.assertEqual(s[lpadsize:-rpadsize], SUBSTR)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G, memuse=2)
def test_count(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = _('.') * size + SUBSTR
self.assertEqual(s.count(_('.')), size)
s += _('.')
self.assertEqual(s.count(_('.')), size + 1)
self.assertEqual(s.count(_(' ')), 3)
self.assertEqual(s.count(_('i')), 1)
self.assertEqual(s.count(_('j')), 0)
@bigmemtest(minsize=_2G, memuse=2)
def test_endswith(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = _('-') * size + SUBSTR
self.assertTrue(s.endswith(SUBSTR))
self.assertTrue(s.endswith(s))
s2 = _('...') + s
self.assertTrue(s2.endswith(s))
self.assertFalse(s.endswith(_('a') + SUBSTR))
self.assertFalse(SUBSTR.endswith(s))
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_expandtabs(self, size):
_ = self.from_latin1
s = _('-') * size
tabsize = 8
self.assertEqual(s.expandtabs(), s)
del s
slen, remainder = divmod(size, tabsize)
s = _(' \t') * slen
s = s.expandtabs(tabsize)
self.assertEqual(len(s), size - remainder)
self.assertEqual(len(s.strip(_(' '))), 0)
@bigmemtest(minsize=_2G, memuse=2)
def test_find(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.find(_(' ')), 0)
self.assertEqual(s.find(SUBSTR), 0)
self.assertEqual(s.find(_(' '), sublen), sublen + size)
self.assertEqual(s.find(SUBSTR, len(SUBSTR)), sublen + size)
self.assertEqual(s.find(_('i')), SUBSTR.find(_('i')))
self.assertEqual(s.find(_('i'), sublen),
sublen + size + SUBSTR.find(_('i')))
self.assertEqual(s.find(_('i'), size),
sublen + size + SUBSTR.find(_('i')))
self.assertEqual(s.find(_('j')), -1)
@bigmemtest(minsize=_2G, memuse=2)
def test_index(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.index(_(' ')), 0)
self.assertEqual(s.index(SUBSTR), 0)
self.assertEqual(s.index(_(' '), sublen), sublen + size)
self.assertEqual(s.index(SUBSTR, sublen), sublen + size)
self.assertEqual(s.index(_('i')), SUBSTR.index(_('i')))
self.assertEqual(s.index(_('i'), sublen),
sublen + size + SUBSTR.index(_('i')))
self.assertEqual(s.index(_('i'), size),
sublen + size + SUBSTR.index(_('i')))
self.assertRaises(ValueError, s.index, _('j'))
@bigmemtest(minsize=_2G, memuse=2)
def test_isalnum(self, size):
_ = self.from_latin1
SUBSTR = _('123456')
s = _('a') * size + SUBSTR
self.assertTrue(s.isalnum())
s += _('.')
self.assertFalse(s.isalnum())
@bigmemtest(minsize=_2G, memuse=2)
def test_isalpha(self, size):
_ = self.from_latin1
SUBSTR = _('zzzzzzz')
s = _('a') * size + SUBSTR
self.assertTrue(s.isalpha())
s += _('.')
self.assertFalse(s.isalpha())
@bigmemtest(minsize=_2G, memuse=2)
def test_isdigit(self, size):
_ = self.from_latin1
SUBSTR = _('123456')
s = _('9') * size + SUBSTR
self.assertTrue(s.isdigit())
s += _('z')
self.assertFalse(s.isdigit())
@bigmemtest(minsize=_2G, memuse=2)
def test_islower(self, size):
_ = self.from_latin1
chars = _(''.join(
chr(c) for c in range(255) if not chr(c).isupper()))
repeats = size // len(chars) + 2
s = chars * repeats
self.assertTrue(s.islower())
s += _('A')
self.assertFalse(s.islower())
@bigmemtest(minsize=_2G, memuse=2)
def test_isspace(self, size):
_ = self.from_latin1
whitespace = _(' \f\n\r\t\v')
repeats = size // len(whitespace) + 2
s = whitespace * repeats
self.assertTrue(s.isspace())
s += _('j')
self.assertFalse(s.isspace())
@bigmemtest(minsize=_2G, memuse=2)
def test_istitle(self, size):
_ = self.from_latin1
SUBSTR = _('123456')
s = _('').join([_('A'), _('a') * size, SUBSTR])
self.assertTrue(s.istitle())
s += _('A')
self.assertTrue(s.istitle())
s += _('aA')
self.assertFalse(s.istitle())
@bigmemtest(minsize=_2G, memuse=2)
def test_isupper(self, size):
_ = self.from_latin1
chars = _(''.join(
chr(c) for c in range(255) if not chr(c).islower()))
repeats = size // len(chars) + 2
s = chars * repeats
self.assertTrue(s.isupper())
s += _('a')
self.assertFalse(s.isupper())
@bigmemtest(minsize=_2G, memuse=2)
def test_join(self, size):
_ = self.from_latin1
s = _('A') * size
x = s.join([_('aaaaa'), _('bbbbb')])
self.assertEqual(x.count(_('a')), 5)
self.assertEqual(x.count(_('b')), 5)
self.assertTrue(x.startswith(_('aaaaaA')))
self.assertTrue(x.endswith(_('Abbbbb')))
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_ljust(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = SUBSTR.ljust(size)
self.assertTrue(s.startswith(SUBSTR + _(' ')))
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_lower(self, size):
_ = self.from_latin1
s = _('A') * size
s = s.lower()
self.assertEqual(len(s), size)
self.assertEqual(s.count(_('a')), size)
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_lstrip(self, size):
_ = self.from_latin1
SUBSTR = _('abc def ghi')
s = SUBSTR.rjust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.lstrip(), SUBSTR.lstrip())
del s
s = SUBSTR.ljust(size)
self.assertEqual(len(s), size)
# Type-specific optimization
if isinstance(s, (str, bytes)):
stripped = s.lstrip()
self.assertTrue(stripped is s)
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_replace(self, size):
_ = self.from_latin1
replacement = _('a')
s = _(' ') * size
s = s.replace(_(' '), replacement)
self.assertEqual(len(s), size)
self.assertEqual(s.count(replacement), size)
s = s.replace(replacement, _(' '), size - 4)
self.assertEqual(len(s), size)
self.assertEqual(s.count(replacement), 4)
self.assertEqual(s[-10:], _(' aaaa'))
@bigmemtest(minsize=_2G, memuse=2)
def test_rfind(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.rfind(_(' ')), sublen + size + SUBSTR.rfind(_(' ')))
self.assertEqual(s.rfind(SUBSTR), sublen + size)
self.assertEqual(s.rfind(_(' '), 0, size), SUBSTR.rfind(_(' ')))
self.assertEqual(s.rfind(SUBSTR, 0, sublen + size), 0)
self.assertEqual(s.rfind(_('i')), sublen + size + SUBSTR.rfind(_('i')))
self.assertEqual(s.rfind(_('i'), 0, sublen), SUBSTR.rfind(_('i')))
self.assertEqual(s.rfind(_('i'), 0, sublen + size),
SUBSTR.rfind(_('i')))
self.assertEqual(s.rfind(_('j')), -1)
@bigmemtest(minsize=_2G, memuse=2)
def test_rindex(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.rindex(_(' ')),
sublen + size + SUBSTR.rindex(_(' ')))
self.assertEqual(s.rindex(SUBSTR), sublen + size)
self.assertEqual(s.rindex(_(' '), 0, sublen + size - 1),
SUBSTR.rindex(_(' ')))
self.assertEqual(s.rindex(SUBSTR, 0, sublen + size), 0)
self.assertEqual(s.rindex(_('i')),
sublen + size + SUBSTR.rindex(_('i')))
self.assertEqual(s.rindex(_('i'), 0, sublen), SUBSTR.rindex(_('i')))
self.assertEqual(s.rindex(_('i'), 0, sublen + size),
SUBSTR.rindex(_('i')))
self.assertRaises(ValueError, s.rindex, _('j'))
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_rjust(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = SUBSTR.ljust(size)
self.assertTrue(s.startswith(SUBSTR + _(' ')))
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_rstrip(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = SUBSTR.ljust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.rstrip(), SUBSTR.rstrip())
del s
s = SUBSTR.rjust(size)
self.assertEqual(len(s), size)
# Type-specific optimization
if isinstance(s, (str, bytes)):
stripped = s.rstrip()
self.assertTrue(stripped is s)
# The test takes about size bytes to build a string, and then about
# sqrt(size) substrings of sqrt(size) in size and a list to
# hold sqrt(size) items. It's close but just over 2x size.
@bigmemtest(minsize=_2G, memuse=2.1)
def test_split_small(self, size):
_ = self.from_latin1
# Crudely calculate an estimate so that the result of s.split won't
# take up an inordinate amount of memory
chunksize = int(size ** 0.5 + 2)
SUBSTR = _('a') + _(' ') * chunksize
s = SUBSTR * chunksize
l = s.split()
self.assertEqual(len(l), chunksize)
expected = _('a')
for item in l:
self.assertEqual(item, expected)
del l
l = s.split(_('a'))
self.assertEqual(len(l), chunksize + 1)
expected = _(' ') * chunksize
for item in filter(None, l):
self.assertEqual(item, expected)
# Allocates a string of twice size (and briefly two) and a list of
# size. Because of internal affairs, the s.split() call produces a
# list of size times the same one-character string, so we only
# suffer for the list size. (Otherwise, it'd cost another 48 times
# size in bytes!) Nevertheless, a list of size takes
# 8*size bytes.
@bigmemtest(minsize=_2G + 5, memuse=10)
def test_split_large(self, size):
_ = self.from_latin1
s = _(' a') * size + _(' ')
l = s.split()
self.assertEqual(len(l), size)
self.assertEqual(set(l), set([_('a')]))
del l
l = s.split(_('a'))
self.assertEqual(len(l), size + 1)
self.assertEqual(set(l), set([_(' ')]))
@bigmemtest(minsize=_2G, memuse=2.1)
def test_splitlines(self, size):
_ = self.from_latin1
# Crudely calculate an estimate so that the result of s.split won't
# take up an inordinate amount of memory
chunksize = int(size ** 0.5 + 2) // 2
SUBSTR = _(' ') * chunksize + _('\n') + _(' ') * chunksize + _('\r\n')
s = SUBSTR * chunksize
l = s.splitlines()
self.assertEqual(len(l), chunksize * 2)
expected = _(' ') * chunksize
for item in l:
self.assertEqual(item, expected)
@bigmemtest(minsize=_2G, memuse=2)
def test_startswith(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = _('-') * size + SUBSTR
self.assertTrue(s.startswith(s))
self.assertTrue(s.startswith(_('-') * size))
self.assertFalse(s.startswith(SUBSTR))
@bigmemtest(minsize=_2G, memuse=1)
def test_strip(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi ')
s = SUBSTR.rjust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
del s
s = SUBSTR.ljust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G, memuse=2)
def test_swapcase(self, size):
_ = self.from_latin1
SUBSTR = _("aBcDeFG12.'\xa9\x00")
sublen = len(SUBSTR)
repeats = size // sublen + 2
s = SUBSTR * repeats
s = s.swapcase()
self.assertEqual(len(s), sublen * repeats)
self.assertEqual(s[:sublen * 3], SUBSTR.swapcase() * 3)
self.assertEqual(s[-sublen * 3:], SUBSTR.swapcase() * 3)
@bigmemtest(minsize=_2G, memuse=2)
def test_title(self, size):
_ = self.from_latin1
SUBSTR = _('SpaaHAaaAaham')
s = SUBSTR * (size // len(SUBSTR) + 2)
s = s.title()
self.assertTrue(s.startswith((SUBSTR * 3).title()))
self.assertTrue(s.endswith(SUBSTR.lower() * 3))
@bigmemtest(minsize=_2G, memuse=2)
def test_translate(self, size):
_ = self.from_latin1
SUBSTR = _('aZz.z.Aaz.')
if isinstance(SUBSTR, str):
trans = {
ord(_('.')): _('-'),
ord(_('a')): _('!'),
ord(_('Z')): _('$'),
}
else:
trans = bytes.maketrans(b'.aZ', b'-!$')
sublen = len(SUBSTR)
repeats = size // sublen + 2
s = SUBSTR * repeats
s = s.translate(trans)
self.assertEqual(len(s), repeats * sublen)
self.assertEqual(s[:sublen], SUBSTR.translate(trans))
self.assertEqual(s[-sublen:], SUBSTR.translate(trans))
self.assertEqual(s.count(_('.')), 0)
self.assertEqual(s.count(_('!')), repeats * 2)
self.assertEqual(s.count(_('z')), repeats * 3)
@bigmemtest(minsize=_2G + 5, memuse=2)
def test_upper(self, size):
_ = self.from_latin1
s = _('a') * size
s = s.upper()
self.assertEqual(len(s), size)
self.assertEqual(s.count(_('A')), size)
@bigmemtest(minsize=_2G + 20, memuse=1)
def test_zfill(self, size):
_ = self.from_latin1
SUBSTR = _('-568324723598234')
s = SUBSTR.zfill(size)
self.assertTrue(s.endswith(_('0') + SUBSTR[1:]))
self.assertTrue(s.startswith(_('-0')))
self.assertEqual(len(s), size)
self.assertEqual(s.count(_('0')), size - len(SUBSTR))
# This test is meaningful even with size < 2G, as long as the
# doubled string is > 2G (but it tests more if both are > 2G :)
@bigmemtest(minsize=_1G + 2, memuse=3)
def test_concat(self, size):
_ = self.from_latin1
s = _('.') * size
self.assertEqual(len(s), size)
s = s + s
self.assertEqual(len(s), size * 2)
self.assertEqual(s.count(_('.')), size * 2)
# This test is meaningful even with size < 2G, as long as the
# repeated string is > 2G (but it tests more if both are > 2G :)
@bigmemtest(minsize=_1G + 2, memuse=3)
def test_repeat(self, size):
_ = self.from_latin1
s = _('.') * size
self.assertEqual(len(s), size)
s = s * 2
self.assertEqual(len(s), size * 2)
self.assertEqual(s.count(_('.')), size * 2)
@bigmemtest(minsize=_2G + 20, memuse=2)
def test_slice_and_getitem(self, size):
_ = self.from_latin1
SUBSTR = _('0123456789')
sublen = len(SUBSTR)
s = SUBSTR * (size // sublen)
stepsize = len(s) // 100
stepsize = stepsize - (stepsize % sublen)
for i in range(0, len(s) - stepsize, stepsize):
self.assertEqual(s[i], SUBSTR[0])
self.assertEqual(s[i:i + sublen], SUBSTR)
self.assertEqual(s[i:i + sublen:2], SUBSTR[::2])
if i > 0:
self.assertEqual(s[i + sublen - 1:i - 1:-3],
SUBSTR[sublen::-3])
# Make sure we do some slicing and indexing near the end of the
# string, too.
self.assertEqual(s[len(s) - 1], SUBSTR[-1])
self.assertEqual(s[-1], SUBSTR[-1])
self.assertEqual(s[len(s) - 10], SUBSTR[0])
self.assertEqual(s[-sublen], SUBSTR[0])
self.assertEqual(s[len(s):], _(''))
self.assertEqual(s[len(s) - 1:], SUBSTR[-1:])
self.assertEqual(s[-1:], SUBSTR[-1:])
self.assertEqual(s[len(s) - sublen:], SUBSTR)
self.assertEqual(s[-sublen:], SUBSTR)
self.assertEqual(len(s[:]), len(s))
self.assertEqual(len(s[:len(s) - 5]), len(s) - 5)
self.assertEqual(len(s[5:-5]), len(s) - 10)
self.assertRaises(IndexError, operator.getitem, s, len(s))
self.assertRaises(IndexError, operator.getitem, s, len(s) + 1)
self.assertRaises(IndexError, operator.getitem, s, len(s) + 1<<31)
@bigmemtest(minsize=_2G, memuse=2)
def test_contains(self, size):
_ = self.from_latin1
SUBSTR = _('0123456789')
edge = _('-') * (size // 2)
s = _('').join([edge, SUBSTR, edge])
del edge
self.assertTrue(SUBSTR in s)
self.assertFalse(SUBSTR * 2 in s)
self.assertTrue(_('-') in s)
self.assertFalse(_('a') in s)
s += _('a')
self.assertTrue(_('a') in s)
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_compare(self, size):
_ = self.from_latin1
s1 = _('-') * size
s2 = _('-') * size
self.assertEqual(s1, s2)
del s2
s2 = s1 + _('a')
self.assertFalse(s1 == s2)
del s2
s2 = _('.') * size
self.assertFalse(s1 == s2)
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_hash(self, size):
# Not sure if we can do any meaningful tests here... Even if we
# start relying on the exact algorithm used, the result will be
# different depending on the size of the C 'long int'. Even this
# test is dodgy (there's no *guarantee* that the two things should
# have a different hash, even if they, in the current
# implementation, almost always do.)
_ = self.from_latin1
s = _('\x00') * size
h1 = hash(s)
del s
s = _('\x00') * (size + 1)
self.assertFalse(h1 == hash(s))
class StrTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
return s
def basic_encode_test(self, size, enc, c='.', expectedsize=None):
if expectedsize is None:
expectedsize = size
s = c * size
self.assertEqual(len(s.encode(enc)), expectedsize)
def setUp(self):
# HACK: adjust memory use of tests inherited from BaseStrTest
# according to character size.
self._adjusted = {}
for name in dir(BaseStrTest):
if not name.startswith('test_'):
continue
meth = getattr(type(self), name)
try:
memuse = meth.memuse
except AttributeError:
continue
meth.memuse = character_size * memuse
self._adjusted[name] = memuse
def tearDown(self):
for name, memuse in self._adjusted.items():
getattr(type(self), name).memuse = memuse
@bigmemtest(minsize=_2G + 2, memuse=character_size + 1)
def test_encode(self, size):
return self.basic_encode_test(size, 'utf-8')
@precisionbigmemtest(size=_4G // 6 + 2, memuse=character_size + 1)
def test_encode_raw_unicode_escape(self, size):
try:
return self.basic_encode_test(size, 'raw_unicode_escape')
except MemoryError:
pass # acceptable on 32-bit
@precisionbigmemtest(size=_4G // 5 + 70, memuse=character_size + 1)
def test_encode_utf7(self, size):
try:
return self.basic_encode_test(size, 'utf7')
except MemoryError:
pass # acceptable on 32-bit
@precisionbigmemtest(size=_4G // 4 + 5, memuse=character_size + 4)
def test_encode_utf32(self, size):
try:
return self.basic_encode_test(size, 'utf32', expectedsize=4*size+4)
except MemoryError:
pass # acceptable on 32-bit
@precisionbigmemtest(size=_2G - 1, memuse=character_size + 1)
def test_encode_ascii(self, size):
return self.basic_encode_test(size, 'ascii', c='A')
@precisionbigmemtest(size=_4G // 5, memuse=character_size * (6 + 1))
def test_unicode_repr_overflow(self, size):
try:
s = "\uAAAA"*size
r = repr(s)
except MemoryError:
pass # acceptable on 32-bit
else:
self.assertTrue(s == eval(r))
@bigmemtest(minsize=_2G + 10, memuse=character_size * 2)
def test_format(self, size):
s = '-' * size
sf = '%s' % (s,)
self.assertEqual(s, sf)
del sf
sf = '..%s..' % (s,)
self.assertEqual(len(sf), len(s) + 4)
self.assertTrue(sf.startswith('..-'))
self.assertTrue(sf.endswith('-..'))
del s, sf
size //= 2
edge = '-' * size
s = ''.join([edge, '%s', edge])
del edge
s = s % '...'
self.assertEqual(len(s), size * 2 + 3)
self.assertEqual(s.count('.'), 3)
self.assertEqual(s.count('-'), size * 2)
@bigmemtest(minsize=_2G + 10, memuse=character_size * 2)
def test_repr_small(self, size):
s = '-' * size
s = repr(s)
self.assertEqual(len(s), size + 2)
self.assertEqual(s[0], "'")
self.assertEqual(s[-1], "'")
self.assertEqual(s.count('-'), size)
del s
# repr() will create a string four times as large as this 'binary
# string', but we don't want to allocate much more than twice
# size in total. (We do extra testing in test_repr_large())
size = size // 5 * 2
s = '\x00' * size
s = repr(s)
self.assertEqual(len(s), size * 4 + 2)
self.assertEqual(s[0], "'")
self.assertEqual(s[-1], "'")
self.assertEqual(s.count('\\'), size)
self.assertEqual(s.count('0'), size * 2)
@bigmemtest(minsize=_2G + 10, memuse=character_size * 5)
def test_repr_large(self, size):
s = '\x00' * size
s = repr(s)
self.assertEqual(len(s), size * 4 + 2)
self.assertEqual(s[0], "'")
self.assertEqual(s[-1], "'")
self.assertEqual(s.count('\\'), size)
self.assertEqual(s.count('0'), size * 2)
@bigmemtest(minsize=2**32 / 5, memuse=character_size * 7)
def test_unicode_repr(self, size):
s = "\uAAAA" * size
for f in (repr, ascii):
r = f(s)
self.assertTrue(len(r) > size)
self.assertTrue(r.endswith(r"\uaaaa'"), r[-10:])
del r
# The character takes 4 bytes even in UCS-2 builds because it will
# be decomposed into surrogates.
@bigmemtest(minsize=2**32 / 5, memuse=4 + character_size * 9)
def test_unicode_repr_wide(self, size):
s = "\U0001AAAA" * size
for f in (repr, ascii):
r = f(s)
self.assertTrue(len(r) > size)
self.assertTrue(r.endswith(r"\U0001aaaa'"), r[-12:])
del r
class BytesTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
return s.encode("latin1")
@bigmemtest(minsize=_2G + 2, memuse=1 + character_size)
def test_decode(self, size):
s = self.from_latin1('.') * size
self.assertEqual(len(s.decode('utf-8')), size)
class BytearrayTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
return bytearray(s.encode("latin1"))
@bigmemtest(minsize=_2G + 2, memuse=1 + character_size)
def test_decode(self, size):
s = self.from_latin1('.') * size
self.assertEqual(len(s.decode('utf-8')), size)
test_hash = None
test_split_large = None
class TupleTest(unittest.TestCase):
# Tuples have a small, fixed-sized head and an array of pointers to
# data. Since we're testing 64-bit addressing, we can assume that the
# pointers are 8 bytes, and that thus that the tuples take up 8 bytes
# per size.
# As a side-effect of testing long tuples, these tests happen to test
# having more than 2<<31 references to any given object. Hence the
# use of different types of objects as contents in different tests.
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_compare(self, size):
t1 = ('',) * size
t2 = ('',) * size
self.assertEqual(t1, t2)
del t2
t2 = ('',) * (size + 1)
self.assertFalse(t1 == t2)
del t2
t2 = (1,) * size
self.assertFalse(t1 == t2)
# Test concatenating into a single tuple of more than 2G in length,
# and concatenating a tuple of more than 2G in length separately, so
# the smaller test still gets run even if there isn't memory for the
# larger test (but we still let the tester know the larger test is
# skipped, in verbose mode.)
def basic_concat_test(self, size):
t = ((),) * size
self.assertEqual(len(t), size)
t = t + t
self.assertEqual(len(t), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_concat_small(self, size):
return self.basic_concat_test(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_concat_large(self, size):
return self.basic_concat_test(size)
@bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5)
def test_contains(self, size):
t = (1, 2, 3, 4, 5) * size
self.assertEqual(len(t), size * 5)
self.assertTrue(5 in t)
self.assertFalse((1, 2, 3, 4, 5) in t)
self.assertFalse(0 in t)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_hash(self, size):
t1 = (0,) * size
h1 = hash(t1)
del t1
t2 = (0,) * (size + 1)
self.assertFalse(h1 == hash(t2))
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_index_and_slice(self, size):
t = (None,) * size
self.assertEqual(len(t), size)
self.assertEqual(t[-1], None)
self.assertEqual(t[5], None)
self.assertEqual(t[size - 1], None)
self.assertRaises(IndexError, operator.getitem, t, size)
self.assertEqual(t[:5], (None,) * 5)
self.assertEqual(t[-5:], (None,) * 5)
self.assertEqual(t[20:25], (None,) * 5)
self.assertEqual(t[-25:-20], (None,) * 5)
self.assertEqual(t[size - 5:], (None,) * 5)
self.assertEqual(t[size - 5:size], (None,) * 5)
self.assertEqual(t[size - 6:size - 2], (None,) * 4)
self.assertEqual(t[size:size], ())
self.assertEqual(t[size:size+5], ())
# Like test_concat, split in two.
def basic_test_repeat(self, size):
t = ('',) * size
self.assertEqual(len(t), size)
t = t * 2
self.assertEqual(len(t), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_repeat_small(self, size):
return self.basic_test_repeat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_repeat_large(self, size):
return self.basic_test_repeat(size)
@bigmemtest(minsize=_1G - 1, memuse=12)
def test_repeat_large_2(self, size):
return self.basic_test_repeat(size)
@precisionbigmemtest(size=_1G - 1, memuse=9)
def test_from_2G_generator(self, size):
try:
t = tuple(range(size))
except MemoryError:
pass # acceptable on 32-bit
else:
count = 0
for item in t:
self.assertEqual(item, count)
count += 1
self.assertEqual(count, size)
@precisionbigmemtest(size=_1G - 25, memuse=9)
def test_from_almost_2G_generator(self, size):
try:
t = tuple(range(size))
count = 0
for item in t:
self.assertEqual(item, count)
count += 1
self.assertEqual(count, size)
except MemoryError:
pass # acceptable, expected on 32-bit
# Like test_concat, split in two.
def basic_test_repr(self, size):
t = (0,) * size
s = repr(t)
# The repr of a tuple of 0's is exactly three times the tuple length.
self.assertEqual(len(s), size * 3)
self.assertEqual(s[:5], '(0, 0')
self.assertEqual(s[-5:], '0, 0)')
self.assertEqual(s.count('0'), size)
@bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3)
def test_repr_small(self, size):
return self.basic_test_repr(size)
@bigmemtest(minsize=_2G + 2, memuse=8 + 3)
def test_repr_large(self, size):
return self.basic_test_repr(size)
class ListTest(unittest.TestCase):
# Like tuples, lists have a small, fixed-sized head and an array of
# pointers to data, so 8 bytes per size. Also like tuples, we make the
# lists hold references to various objects to test their refcount
# limits.
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_compare(self, size):
l1 = [''] * size
l2 = [''] * size
self.assertEqual(l1, l2)
del l2
l2 = [''] * (size + 1)
self.assertFalse(l1 == l2)
del l2
l2 = [2] * size
self.assertFalse(l1 == l2)
# Test concatenating into a single list of more than 2G in length,
# and concatenating a list of more than 2G in length separately, so
# the smaller test still gets run even if there isn't memory for the
# larger test (but we still let the tester know the larger test is
# skipped, in verbose mode.)
def basic_test_concat(self, size):
l = [[]] * size
self.assertEqual(len(l), size)
l = l + l
self.assertEqual(len(l), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_concat_small(self, size):
return self.basic_test_concat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_concat_large(self, size):
return self.basic_test_concat(size)
def basic_test_inplace_concat(self, size):
l = [sys.stdout] * size
l += l
self.assertEqual(len(l), size * 2)
self.assertTrue(l[0] is l[-1])
self.assertTrue(l[size - 1] is l[size + 1])
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_inplace_concat_small(self, size):
return self.basic_test_inplace_concat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_inplace_concat_large(self, size):
return self.basic_test_inplace_concat(size)
@bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5)
def test_contains(self, size):
l = [1, 2, 3, 4, 5] * size
self.assertEqual(len(l), size * 5)
self.assertTrue(5 in l)
self.assertFalse([1, 2, 3, 4, 5] in l)
self.assertFalse(0 in l)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_hash(self, size):
l = [0] * size
self.assertRaises(TypeError, hash, l)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_index_and_slice(self, size):
l = [None] * size
self.assertEqual(len(l), size)
self.assertEqual(l[-1], None)
self.assertEqual(l[5], None)
self.assertEqual(l[size - 1], None)
self.assertRaises(IndexError, operator.getitem, l, size)
self.assertEqual(l[:5], [None] * 5)
self.assertEqual(l[-5:], [None] * 5)
self.assertEqual(l[20:25], [None] * 5)
self.assertEqual(l[-25:-20], [None] * 5)
self.assertEqual(l[size - 5:], [None] * 5)
self.assertEqual(l[size - 5:size], [None] * 5)
self.assertEqual(l[size - 6:size - 2], [None] * 4)
self.assertEqual(l[size:size], [])
self.assertEqual(l[size:size+5], [])
l[size - 2] = 5
self.assertEqual(len(l), size)
self.assertEqual(l[-3:], [None, 5, None])
self.assertEqual(l.count(5), 1)
self.assertRaises(IndexError, operator.setitem, l, size, 6)
self.assertEqual(len(l), size)
l[size - 7:] = [1, 2, 3, 4, 5]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[-7:], [None, None, 1, 2, 3, 4, 5])
l[:7] = [1, 2, 3, 4, 5]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[:7], [1, 2, 3, 4, 5, None, None])
del l[size - 1]
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(l[-1], 4)
del l[-2:]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[-1], 2)
del l[0]
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(l[0], 2)
del l[:2]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[0], 4)
# Like test_concat, split in two.
def basic_test_repeat(self, size):
l = [] * size
self.assertFalse(l)
l = [''] * size
self.assertEqual(len(l), size)
l = l * 2
self.assertEqual(len(l), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_repeat_small(self, size):
return self.basic_test_repeat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_repeat_large(self, size):
return self.basic_test_repeat(size)
def basic_test_inplace_repeat(self, size):
l = ['']
l *= size
self.assertEqual(len(l), size)
self.assertTrue(l[0] is l[-1])
del l
l = [''] * size
l *= 2
self.assertEqual(len(l), size * 2)
self.assertTrue(l[size - 1] is l[-1])
@bigmemtest(minsize=_2G // 2 + 2, memuse=16)
def test_inplace_repeat_small(self, size):
return self.basic_test_inplace_repeat(size)
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_inplace_repeat_large(self, size):
return self.basic_test_inplace_repeat(size)
def basic_test_repr(self, size):
l = [0] * size
s = repr(l)
# The repr of a list of 0's is exactly three times the list length.
self.assertEqual(len(s), size * 3)
self.assertEqual(s[:5], '[0, 0')
self.assertEqual(s[-5:], '0, 0]')
self.assertEqual(s.count('0'), size)
@bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3)
def test_repr_small(self, size):
return self.basic_test_repr(size)
@bigmemtest(minsize=_2G + 2, memuse=8 + 3)
def test_repr_large(self, size):
return self.basic_test_repr(size)
# list overallocates ~1/8th of the total size (on first expansion) so
# the single list.append call puts memuse at 9 bytes per size.
@bigmemtest(minsize=_2G, memuse=9)
def test_append(self, size):
l = [object()] * size
l.append(object())
self.assertEqual(len(l), size+1)
self.assertTrue(l[-3] is l[-2])
self.assertFalse(l[-2] is l[-1])
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_count(self, size):
l = [1, 2, 3, 4, 5] * size
self.assertEqual(l.count(1), size)
self.assertEqual(l.count("1"), 0)
def basic_test_extend(self, size):
l = [object] * size
l.extend(l)
self.assertEqual(len(l), size * 2)
self.assertTrue(l[0] is l[-1])
self.assertTrue(l[size - 1] is l[size + 1])
@bigmemtest(minsize=_2G // 2 + 2, memuse=16)
def test_extend_small(self, size):
return self.basic_test_extend(size)
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_extend_large(self, size):
return self.basic_test_extend(size)
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_index(self, size):
l = [1, 2, 3, 4, 5] * size
size *= 5
self.assertEqual(l.index(1), 0)
self.assertEqual(l.index(5, size - 5), size - 1)
self.assertEqual(l.index(5, size - 5, size), size - 1)
self.assertRaises(ValueError, l.index, 1, size - 4, size)
self.assertRaises(ValueError, l.index, 6)
# This tests suffers from overallocation, just like test_append.
@bigmemtest(minsize=_2G + 10, memuse=9)
def test_insert(self, size):
l = [1.0] * size
l.insert(size - 1, "A")
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[-3:], [1.0, "A", 1.0])
l.insert(size + 1, "B")
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[-3:], ["A", 1.0, "B"])
l.insert(1, "C")
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[:3], [1.0, "C", 1.0])
self.assertEqual(l[size - 3:], ["A", 1.0, "B"])
@bigmemtest(minsize=_2G // 5 + 4, memuse=8 * 5)
def test_pop(self, size):
l = ["a", "b", "c", "d", "e"] * size
size *= 5
self.assertEqual(len(l), size)
item = l.pop()
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(item, "e")
self.assertEqual(l[-2:], ["c", "d"])
item = l.pop(0)
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(item, "a")
self.assertEqual(l[:2], ["b", "c"])
item = l.pop(size - 2)
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(item, "c")
self.assertEqual(l[-2:], ["b", "d"])
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_remove(self, size):
l = [10] * size
self.assertEqual(len(l), size)
l.remove(10)
size -= 1
self.assertEqual(len(l), size)
# Because of the earlier l.remove(), this append doesn't trigger
# a resize.
l.append(5)
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[-2:], [10, 5])
l.remove(5)
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(l[-2:], [10, 10])
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_reverse(self, size):
l = [1, 2, 3, 4, 5] * size
l.reverse()
self.assertEqual(len(l), size * 5)
self.assertEqual(l[-5:], [5, 4, 3, 2, 1])
self.assertEqual(l[:5], [5, 4, 3, 2, 1])
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_sort(self, size):
l = [1, 2, 3, 4, 5] * size
l.sort()
self.assertEqual(len(l), size * 5)
self.assertEqual(l.count(1), size)
self.assertEqual(l[:10], [1] * 10)
self.assertEqual(l[-10:], [5] * 10)
def test_main():
support.run_unittest(StrTest, BytesTest, BytearrayTest,
TupleTest, ListTest)
if __name__ == '__main__':
if len(sys.argv) > 1:
support.set_memlimit(sys.argv[1])
test_main()
| gpl-3.0 | 2,800,543,251,407,446,000 | 34.590403 | 79 | 0.552608 | false |
dwhagar/snowboard | snowboard/connection.py | 1 | 6948 | # This file is part of snowboard.
#
# snowboard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# snowboard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with snowboard. If not, see <http://www.gnu.org/licenses/>.
'''
Connection object, designed to be the only object to directly interface with
the server.
See https://github.com/dwhagar/snowboard/wiki/Class-Docs for documentation.
'''
import time
import socket
import ssl
import sys
from . import debug
from . import server
class Connection:
def __init__(self, srv):
self.host = srv.host
self.port = srv.port
self.__socket = None
self.__ssl = None
self.__connected = False
self.ssl = srv.ssl
self.sslVerify = True
self.retries = 3 # Numbers of times to retry a connection
self.delay = 1 # Delay between connection attempts
def connected(self):
'''Returns the state of the connection.'''
return self.__connected
def connect(self):
'''Connect to the configured server.'''
# Keep track of attempts.
attempt = 0
# Try until the connection succeeds or no more tries are left.
while (not self.__connected) and (attempt < self.retries):
# Attempt to establish a connection.
debug.message("Attempting connection to " + self.host + ":" + str(self.port) + ".")
try:
self.__socket = socket.setdefaulttimeout(30)
self.__socket = socket.create_connection((self.host, self.port))
# Handle SSL
if self.ssl:
self.__context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
self.__context.options |= ssl.OP_NO_SSLv2
self.__context.options |= ssl.OP_NO_SSLv3
if self.sslVerify:
self.__context.verify_mode = ssl.CERT_REQUIRED
else:
self.__context.verify_mode = ssl.CERT_NONE
self.__ssl = self.__context.wrap_socket(self.__socket)
self.__ssl.setblocking(False)
# Handle not SSL
else:
self.__socket.setblocking(False)
self.__connected = True
# Assume connection errors are no big deal but do display an error.
except ConnectionAbortedError:
debug.error("Connection to " + self.host + " aborted by server.")
except ConnectionRefusedError:
debug.error("Connection to " + self.host + " refused by server.")
except TimeoutError:
debug.error("Connection to " + self.host + " timed out.")
except socket.gaierror:
debug.error("Failed to resolve " + self.host + ".")
except OSError as err:
debug.error("Failed to connect '" + err.errno + "' " + err.strerror + ".")
attempt += 1
time.sleep(self.delay)
return self.__connected
def disconnect(self):
'''Disconnect from the server.'''
debug.message("Disconnected from " + self.host + ":" + str(self.port) + ".")
if ssl:
if not self.__ssl is None:
self.__ssl.close()
self.__ssl = None
else:
if not self.__socket is None:
self.__socket.close()
self.__socket = None
self.__connected = False
def read(self):
'''Read a line of data from the server, if any.'''
# Only do something if we're connected.
if self.__connected:
done = False
received = ""
while not done:
try:
if self.ssl:
data = self.__ssl.recv(1)
else:
data = self.__socket.recv(1)
except (ssl.SSLWantReadError, BlockingIOError):
received = None
break
except OSError as err:
debug.error("Error #" + str(err.errno) + ": '" + err.strerror + "' disconnecting.")
data = False
# Process the data.
# socket.recv is supposed to return a False if the connection
# been broken.
if not data:
self.disconnect()
done = True
received = None
else:
text = data.decode('utf-8','replace')
if text == '\n':
done = True
else:
received += text
else:
received = None
# Remove the trailing carriage return character (cr/lf pair)
if not received is None:
received = received.strip('\r')
if len(received) > 0:
if received[0] == ':':
received = received[1:]
# Bug fix for Issue #18, do not return blank lines.
if received == "":
received = None
return received
def write(self, data):
'''Sends data to the server.'''
# Encode the data for the server.
data += '\n'
data = data.encode('utf-8')
# Prepare to keep track of what is being sent.
dataSent = 0
bufferSize = len(data)
if self.__connected:
# Loop to send the data.
while dataSent < bufferSize:
try:
if self.ssl:
sentNow = self.__ssl.send(data[dataSent:])
else:
sentNow = self.__socket.send(data[dataSent:])
except OSError as err:
debug.error("Error #" + str(err.errno) + ": '" + err.strerror + "' disconnecting.")
self.disconnect()
return False
# If nothing gets sent, we are disconnected from the server.
if sentNow == 0:
debug.error("Data could not be sent for an unknown reason, disconnecting.")
self.disconnect()
return False
# Keep track of the data.
dataSent += sentNow
else:
sent = False
# If sending completed, set the flag to true.
if dataSent == bufferSize:
sent = True
return sent | gpl-3.0 | 5,802,513,024,660,214,000 | 34.454082 | 103 | 0.517559 | false |
zarthur/restful-todo | tests/test_integration.py | 1 | 1726 | import re
import threading
import unittest
from selenium import webdriver
from app import create_app, db
from models import Todo
class IntegrationTestCase(unittest.TestCase):
client = None
@classmethod
def setUpClass(cls):
# start Firefox
try:
cls.client = webdriver.Firefox()
except:
pass
if cls.client:
cls.app = create_app('testing')
cls.app_context = cls.app.app_context()
cls.app_context.push()
db.drop_all()
db.create_all()
todo = Todo(title='title1', body='body1')
db.session.add(todo)
db.session.commit()
threading.Thread(target=cls.app.run).start()
@classmethod
def tearDownClass(cls):
if cls.client:
cls.client.close()
db.drop_all()
db.session.remove()
cls.app_context.pop()
def setUp(self):
if not self.client:
self.skipTest('Web browser not available')
def tearDown(self):
pass
def test_home_page(self):
self.client.get('http://localhost:5000/')
self.assertTrue(re.search('RESTful', self.client.page_source))
def test_new_page(self):
self.client.get('http://localhost:5000/')
self.client.find_element_by_link_text('New Todo').click()
self.assertTrue('Back to list' in self.client.page_source)
self.client.find_element_by_name('title').send_keys('SelTitle')
self.client.find_element_by_name('body').send_keys('selenium body')
self.client.find_element_by_name('submit').click()
self.assertTrue(re.search('SelTitle', self.client.page_source))
| gpl-2.0 | -385,083,701,008,058,800 | 25.151515 | 75 | 0.593859 | false |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/PySide/examples/demos/qtdemo/demotextitem.py | 1 | 2157 | from PySide import QtCore, QtGui
from demoitem import DemoItem
class DemoTextItem(DemoItem):
STATIC_TEXT, DYNAMIC_TEXT = range(2)
def __init__(self, text, font, textColor, textWidth, scene=None,
parent=None, type=STATIC_TEXT, bgColor=QtGui.QColor()):
super(DemoTextItem, self).__init__(scene, parent)
self.type = type
self.text = text
self.font = font
self.textColor = textColor
self.bgColor = bgColor
self.textWidth = textWidth
self.noSubPixeling = True
def setText(self, text):
self.text = text
self.update()
def createImage(self, matrix):
if self.type == DemoTextItem.DYNAMIC_TEXT:
return None
sx = min(matrix.m11(), matrix.m22())
sy = max(matrix.m22(), sx)
textItem = QtGui.QGraphicsTextItem()
textItem.setHtml(self.text)
textItem.setTextWidth(self.textWidth)
textItem.setFont(self.font)
textItem.setDefaultTextColor(self.textColor)
textItem.document().setDocumentMargin(2)
w = textItem.boundingRect().width()
h = textItem.boundingRect().height()
image = QtGui.QImage(int(w * sx), int(h * sy),
QtGui.QImage.Format_ARGB32_Premultiplied)
image.fill(QtGui.QColor(0, 0, 0, 0).rgba())
painter = QtGui.QPainter(image)
painter.scale(sx, sy)
style = QtGui.QStyleOptionGraphicsItem()
textItem.paint(painter, style, None)
return image
def animationStarted(self, id=0):
self.noSubPixeling = False
def animationStopped(self, id=0):
self.noSubPixeling = True
def boundingRect(self):
if self.type == DemoTextItem.STATIC_TEXT:
return super(DemoTextItem, self).boundingRect()
# Sorry for using magic number.
return QtCore.QRectF(0, 0, 50, 20)
def paint(self, painter, option, widget):
if self.type == DemoTextItem.STATIC_TEXT:
super(DemoTextItem, self).paint(painter, option, widget)
return
painter.setPen(self.textColor)
painter.drawText(0, 0, self.text)
| epl-1.0 | -7,139,952,236,623,638,000 | 29.814286 | 68 | 0.618915 | false |
Jumpscale/core9 | JumpScale9/data/serializers/SerializerYAML.py | 1 | 3063 | import yaml
from collections import OrderedDict
from js9 import j
from .SerializerBase import SerializerBase
testtoml="""
name = 'something'
multiline = '''
these are multiple lines
next line
'''
nr = 87
nr2 = 34.4
"""
# from .PrettyYAMLDumper import PrettyYaml
class SerializerYAML(SerializerBase):
def __init__(self):
SerializerBase.__init__(self)
def dumps(self, obj):
return yaml.dump(obj, default_flow_style=False, default_style='',indent=4,line_break="\n")
def loads(self, s):
# out=cStringIO.StringIO(s)
try:
return yaml.load(s)
except Exception as e:
error = "error:%s\n" % e
error += "\nyaml could not parse:\n%s\n" % s
raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="")
def load(self, path):
try:
s = j.sal.fs.readFile(path)
except Exception as e:
error = "error:%s\n" % e
error += '\npath:%s\n' % path
raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="")
try:
return yaml.load(s)
except Exception as e:
error = "error:%s\n" % e
error += "\nyaml could not parse:\n%s\n" % s
raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="")
def ordered_load(self, stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):
"""
load a yaml stream and keep the order
"""
class OrderedLoader(Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return object_pairs_hook(loader.construct_pairs(node))
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
construct_mapping)
return yaml.load(stream, OrderedLoader)
def ordered_dump(self, data, stream=None, Dumper=yaml.Dumper, **kwds):
"""
dump a yaml stream with keeping the order
"""
class OrderedDumper(Dumper):
pass
def _dict_representer(dumper, data):
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items())
OrderedDumper.add_representer(OrderedDict, _dict_representer)
return yaml.dump(data, stream, OrderedDumper, **kwds)
def test(self):
ddict=j.data.serializer.toml.loads(testtoml)
#TODO:*3 write some test
# from js9 import j
# from yaml import load, dump
# try:
# from yaml import CLoader as Loader, CDumper as Dumper
# except ImportError:
# from yaml import Loader, Dumper
# class YAMLTool:
# def decode(self,string):
# """
# decode yaml string to python object
# """
# return load(string)
# def encode(self,obj,width=120):
# """
# encode python (simple) objects to yaml
# """
# return dump(obj, width=width, default_flow_style=False)
#
| apache-2.0 | 5,639,020,786,048,849,000 | 28.171429 | 98 | 0.588965 | false |
rsalmaso/django-cms | cms/toolbar/items.py | 1 | 16937 | import json
from abc import ABCMeta
from collections import defaultdict
from django.template.loader import render_to_string
from django.utils.encoding import force_str
from django.utils.functional import Promise
from cms.constants import RIGHT, LEFT, REFRESH_PAGE, URL_CHANGE
class ItemSearchResult:
def __init__(self, item, index):
self.item = item
self.index = index
def __add__(self, other):
return ItemSearchResult(self.item, self.index + other)
def __sub__(self, other):
return ItemSearchResult(self.item, self.index - other)
def __int__(self):
return self.index
def may_be_lazy(thing):
if isinstance(thing, Promise):
return thing._proxy____args[0]
else:
return thing
class ToolbarAPIMixin(metaclass=ABCMeta):
REFRESH_PAGE = REFRESH_PAGE
URL_CHANGE = URL_CHANGE
LEFT = LEFT
RIGHT = RIGHT
def __init__(self):
self.items = []
self.menus = {}
self._memo = defaultdict(list)
def _memoize(self, item):
self._memo[item.__class__].append(item)
def _unmemoize(self, item):
self._memo[item.__class__].remove(item)
def _item_position(self, item):
return self.items.index(item)
def _add_item(self, item, position):
if position is not None:
self.items.insert(position, item)
else:
self.items.append(item)
def _remove_item(self, item):
if item in self.items:
self.items.remove(item)
else:
raise KeyError("Item %r not found" % item)
def get_item_count(self):
return len(self.items)
def add_item(self, item, position=None):
if not isinstance(item, BaseItem):
raise ValueError("Items must be subclasses of cms.toolbar.items.BaseItem, %r isn't" % item)
if isinstance(position, ItemSearchResult):
position = position.index
elif isinstance(position, BaseItem):
position = self._item_position(position)
elif not (position is None or isinstance(position, (int,))):
raise ValueError("Position must be None, an integer, an item or an ItemSearchResult, got %r instead" % position)
self._add_item(item, position)
self._memoize(item)
return item
def find_items(self, item_type, **attributes):
results = []
attr_items = attributes.items()
notfound = object()
for candidate in self._memo[item_type]:
if all(may_be_lazy(getattr(candidate, key, notfound)) == value for key, value in attr_items):
results.append(ItemSearchResult(candidate, self._item_position(candidate)))
return results
def find_first(self, item_type, **attributes):
try:
return self.find_items(item_type, **attributes)[0]
except IndexError:
return None
#
# This will only work if it is used to determine the insert position for
# all items in the same menu.
#
def get_alphabetical_insert_position(self, new_menu_name, item_type,
default=0):
results = self.find_items(item_type)
# No items yet? Use the default value provided
if not len(results):
return default
last_position = 0
for result in sorted(results, key=lambda x: x.item.name):
if result.item.name > new_menu_name:
return result.index
if result.index > last_position:
last_position = result.index
else:
return last_position + 1
def remove_item(self, item):
self._remove_item(item)
self._unmemoize(item)
def add_sideframe_item(self, name, url, active=False, disabled=False,
extra_classes=None, on_close=None, side=LEFT, position=None):
item = SideframeItem(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
side=side,
)
self.add_item(item, position=position)
return item
def add_modal_item(self, name, url, active=False, disabled=False,
extra_classes=None, on_close=REFRESH_PAGE, side=LEFT, position=None):
item = ModalItem(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
side=side,
)
self.add_item(item, position=position)
return item
def add_link_item(self, name, url, active=False, disabled=False,
extra_classes=None, side=LEFT, position=None):
item = LinkItem(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
side=side
)
self.add_item(item, position=position)
return item
def add_ajax_item(self, name, action, active=False, disabled=False,
extra_classes=None, data=None, question=None,
side=LEFT, position=None, on_success=None, method='POST'):
item = AjaxItem(name, action, self.csrf_token,
active=active,
disabled=disabled,
extra_classes=extra_classes,
data=data,
question=question,
side=side,
on_success=on_success,
method=method,
)
self.add_item(item, position=position)
return item
class BaseItem(metaclass=ABCMeta):
toolbar = None
template = None
def __init__(self, side=LEFT):
self.side = side
@property
def right(self):
return self.side is RIGHT
def render(self):
if self.toolbar:
template = self.toolbar.templates.get_cached_template(self.template)
return template.render(self.get_context())
# Backwards compatibility
return render_to_string(self.template, self.get_context())
def get_context(self):
return {}
class TemplateItem(BaseItem):
def __init__(self, template, extra_context=None, side=LEFT):
super().__init__(side)
self.template = template
self.extra_context = extra_context
def get_context(self):
if self.extra_context:
return self.extra_context
return {}
class SubMenu(ToolbarAPIMixin, BaseItem):
template = "cms/toolbar/items/menu.html"
sub_level = True
active = False
def __init__(self, name, csrf_token, disabled=False, side=LEFT):
ToolbarAPIMixin.__init__(self)
BaseItem.__init__(self, side)
self.name = name
self.disabled = disabled
self.csrf_token = csrf_token
def __repr__(self):
return '<Menu:%s>' % force_str(self.name)
def add_break(self, identifier=None, position=None):
item = Break(identifier)
self.add_item(item, position=position)
return item
def get_items(self):
items = self.items
for item in items:
item.toolbar = self.toolbar
if hasattr(item, 'disabled'):
item.disabled = self.disabled or item.disabled
return items
def get_context(self):
return {
'active': self.active,
'disabled': self.disabled,
'items': self.get_items(),
'title': self.name,
'sub_level': self.sub_level
}
class Menu(SubMenu):
sub_level = False
def get_or_create_menu(self, key, verbose_name, disabled=False, side=LEFT, position=None):
if key in self.menus:
return self.menus[key]
menu = SubMenu(verbose_name, self.csrf_token, disabled=disabled, side=side)
self.menus[key] = menu
self.add_item(menu, position=position)
return menu
class LinkItem(BaseItem):
template = "cms/toolbar/items/item_link.html"
def __init__(self, name, url, active=False, disabled=False, extra_classes=None, side=LEFT):
super().__init__(side)
self.name = name
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
def __repr__(self):
return '<LinkItem:%s>' % force_str(self.name)
def get_context(self):
return {
'url': self.url,
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
}
class FrameItem(BaseItem):
# Be sure to define the correct template
def __init__(self, name, url, active=False, disabled=False,
extra_classes=None, on_close=None, side=LEFT):
super().__init__(side)
self.name = "%s..." % force_str(name)
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
self.on_close = on_close
def __repr__(self):
# Should be overridden
return '<FrameItem:%s>' % force_str(self.name)
def get_context(self):
return {
'url': self.url,
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
'on_close': self.on_close,
}
class SideframeItem(FrameItem):
template = "cms/toolbar/items/item_sideframe.html"
def __repr__(self):
return '<SideframeItem:%s>' % force_str(self.name)
class ModalItem(FrameItem):
template = "cms/toolbar/items/item_modal.html"
def __repr__(self):
return '<ModalItem:%s>' % force_str(self.name)
class AjaxItem(BaseItem):
template = "cms/toolbar/items/item_ajax.html"
def __init__(self, name, action, csrf_token, data=None, active=False,
disabled=False, extra_classes=None,
question=None, side=LEFT, on_success=None, method='POST'):
super().__init__(side)
self.name = name
self.action = action
self.active = active
self.disabled = disabled
self.csrf_token = csrf_token
self.data = data or {}
self.extra_classes = extra_classes or []
self.question = question
self.on_success = on_success
self.method = method
def __repr__(self):
return '<AjaxItem:%s>' % force_str(self.name)
def get_context(self):
data = self.data.copy()
if self.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
data['csrfmiddlewaretoken'] = self.csrf_token
return {
'action': self.action,
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
'data': json.dumps(data),
'question': self.question,
'on_success': self.on_success,
'method': self.method,
}
class Break(BaseItem):
template = "cms/toolbar/items/break.html"
def __init__(self, identifier=None):
self.identifier = identifier
class BaseButton(metaclass=ABCMeta):
toolbar = None
template = None
def render(self):
if self.toolbar:
template = self.toolbar.templates.get_cached_template(self.template)
return template.render(self.get_context())
# Backwards compatibility
return render_to_string(self.template, self.get_context())
def get_context(self):
return {}
class Button(BaseButton):
template = "cms/toolbar/items/button.html"
def __init__(self, name, url, active=False, disabled=False,
extra_classes=None):
self.name = name
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
def __repr__(self):
return '<Button:%s>' % force_str(self.name)
def get_context(self):
return {
'name': self.name,
'url': self.url,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
}
class ModalButton(Button):
template = "cms/toolbar/items/button_modal.html"
def __init__(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None):
self.name = name
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
self.on_close = on_close
def __repr__(self):
return '<ModalButton:%s>' % force_str(self.name)
def get_context(self):
return {
'name': self.name,
'url': self.url,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
'on_close': self.on_close,
}
class SideframeButton(ModalButton):
template = "cms/toolbar/items/button_sideframe.html"
def __repr__(self):
return '<SideframeButton:%s>' % force_str(self.name)
class ButtonList(BaseItem):
template = "cms/toolbar/items/button_list.html"
def __init__(self, identifier=None, extra_classes=None, side=LEFT):
super().__init__(side)
self.extra_classes = extra_classes or []
self.buttons = []
self.identifier = identifier
def __repr__(self):
return '<ButtonList:%s>' % self.identifier
def add_item(self, item):
if not isinstance(item, Button):
raise ValueError("Expected instance of cms.toolbar.items.Button, got %r instead" % item)
self.buttons.append(item)
def add_button(self, name, url, active=False, disabled=False,
extra_classes=None):
item = Button(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes
)
self.buttons.append(item)
return item
def add_modal_button(self, name, url, active=False, disabled=False, extra_classes=None, on_close=REFRESH_PAGE):
item = ModalButton(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
)
self.buttons.append(item)
return item
def add_sideframe_button(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None):
item = SideframeButton(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
)
self.buttons.append(item)
return item
def get_buttons(self):
for button in self.buttons:
button.toolbar = self.toolbar
yield button
def get_context(self):
context = {
'buttons': list(self.get_buttons()),
'extra_classes': self.extra_classes
}
if self.toolbar:
context['cms_structure_on'] = self.toolbar.structure_mode_url_on
return context
class Dropdown(ButtonList):
template = "cms/toolbar/items/dropdown.html"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.primary_button = None
def __repr__(self):
return '<Dropdown:%s>' % force_str(self.name)
def add_primary_button(self, button):
self.primary_button = button
def get_buttons(self):
for button in self.buttons:
button.toolbar = self.toolbar
button.is_in_dropdown = True
yield button
def get_context(self):
return {
'primary_button': self.primary_button,
'buttons': list(self.get_buttons()),
'extra_classes': self.extra_classes,
}
class DropdownToggleButton(BaseButton):
template = "cms/toolbar/items/dropdown_button.html"
has_no_action = True
def __init__(self, name, active=False, disabled=False,
extra_classes=None):
self.name = name
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
def __repr__(self):
return '<DropdownToggleButton:%s>' % force_str(self.name)
def get_context(self):
return {
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
}
| bsd-3-clause | 1,316,855,163,184,581,600 | 29.46223 | 124 | 0.56598 | false |
krissrex/python_projects | Projects/Oving10-itgk/main.py | 1 | 6889 | # -*- coding: utf-8 -*-
"""
Created on Sun Nov 9 00:06:24 2014
@author: kristian
"""
from skumleskogen import *
import time
################## OPTIONS ##################
debug_on = True
write_to_file = True
hukommelse = {}
sti_totalt = ["inn"]
noder_med_lås = set()
forrige_retning = []
file = None
try:
del print
except:
pass
_print = print
class Print_To_File(object):
def __init__(self, *text):
_print(text)
string = ""
for t in text:
string += str(t)
if file:
file.write("\n" + string)
if write_to_file:
print = Print_To_File
file = open("output.txt", mode="a")
class MovementException(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return str(self.error)
def start_solving():
print("Er inngang:", er_inngang())
nøkler = 0
while True:
debug()
husk_node()
if er_stank():
if gaa_tilbake():
sti_totalt.append("STANK! tilbake til " + str(nummer()))
kom_fra_retning = forrige_retning.pop(len(forrige_retning) - 1)
continue
if er_nokkel():
if plukk_opp():
nøkler += 1
sti_totalt.append("plukket nøkkel " + str(nøkler))
continue
if (not hukommelse[nummer()]["venstre"]) \
or kan_låse_opp(nummer(), nøkler, "venstre"):
try:
hukommelse[nummer()]["lås"][0] = False
hukommelse[nummer()]["superlås"][0] = False
besøk_node("venstre")
except MovementException as ex:
print(ex)
else:
forrige_retning.append("venstre")
sti_totalt.append("venstre " + str(nummer()))
continue
if (not hukommelse[nummer()]["høyre"]) \
or kan_låse_opp(nummer(), nøkler, "høyre"):
try:
hukommelse[nummer()]["lås"][1] = False
hukommelse[nummer()]["superlås"][1] = False
besøk_node("høyre")
except MovementException as ex:
print(ex)
else:
forrige_retning.append("høyre")
sti_totalt.append("høyre " + str(nummer()))
continue
if er_laas():
noder_med_lås.add(nummer())
if er_superlaas():
if nøkler >= 2:
utfall = laas_opp()
if utfall:
nøkler -= 2
sti_totalt.append("låste opp sl " + str(nøkler))
if nummer() in noder_med_lås:
noder_med_lås.remove(nummer())
continue
else:
noder_med_lås.add(nummer())
else:
if nøkler >= 1:
utfall = laas_opp()
if utfall:
nøkler -= 1
sti_totalt.append("låste opp s " + str(nøkler))
if nummer() in noder_med_lås:
noder_med_lås.remove(nummer())
continue
if er_utgang():
gaa_ut()
return
# Vi er stuck. Noen noder må være låste.
har_lås = er_laas()
har_superlås = er_superlaas()
if har_lås and har_superlås:
# Låsen var ikke en vanlig lås, men superlås.
har_lås = False
if barn_har_lås(nummer()):
har_lås = True
if barn_har_superlås(nummer()):
har_superlås = True
if gaa_tilbake():
sti_totalt.append("tilbake til " + str(nummer()))
kom_fra_retning = forrige_retning.pop(len(forrige_retning) - 1)
print("kom fra:", kom_fra_retning)
if har_lås:
print("har lås")
if kom_fra_retning == "venstre":
hukommelse[nummer()]["lås"][0] = True
else:
hukommelse[nummer()]["lås"][1] = True
if har_superlås:
print("har superlås")
if kom_fra_retning == "venstre":
hukommelse[nummer()]["superlås"][0] = True
else:
hukommelse[nummer()]["superlås"][1] = True
print(hukommelse[nummer()])
else:
print("KLARTE IKKE Å GÅ TILBAKE!!!")
return
def kan_låse_opp(n, nøkler, retning):
indeks = 0
if retning == "høyre":
indeks = 1
if hukommelse[n]["lås"][indeks] and (nøkler >= 1):
return True
if hukommelse[n]["superlås"][indeks] and (nøkler >= 2):
return True
return False
def barn_har_lås(n):
return hukommelse[n]["lås"][0] or hukommelse[n]["lås"][1]
def barn_har_superlås(n):
return hukommelse[n]["superlås"][0] or hukommelse[n]["superlås"][1]
def husk_node():
n = nummer()
if n not in hukommelse:
hukommelse[n] = {"venstre": False, "høyre": False,
"lås": [False, False], "superlås": [False, False]}
def besøk_node(retning):
n = nummer()
utfall = False
if retning == "venstre":
utfall = gaa_venstre()
elif retning == "høyre":
utfall = gaa_hoyre()
else:
print("Ugyldig retning oppgitt!", n, retning)
return
if utfall:
hukommelse[n][retning] = True
else:
if er_laas():
raise MovementException("Er låst")
else:
raise MovementException("Er blindvei")
def debug():
if debug_on:
print("/"*25 + "DEBUG:" + "/"*25)
print(("Nummer: {n}\n" +
"Type:\n " +
"i: {i}, l: {l}, sl: {sl}, st: {st}, nk: {nk}, v: {v}, u: {u}" +
"\nLabel: {la}")
.format(n=nummer(), i=er_inngang(), l=er_laas(),
sl=er_superlaas(), st=er_stank(), u=er_utgang(),
v=er_vanlig(), nk=er_nokkel(), la=label(nummer())))
def main():
# Initialisation.
def get_hours():
return time.asctime().split(' ')[4]
start_time = time.time()
print("Starting. Time:", get_hours())
# Start solving the maze.
try:
start_solving()
# In case of failure, e.g. a rabbit ate you.
except Exception as e:
print("Exception occured:")
print(e)
print("Exciting. Time:", get_hours())
# Done, do final actions.
finally:
print("\nRan for {0} seconds.".format(
abs(
round(start_time - time.time(), 4))))
print("Maze completed.")
print(sti_totalt)
if __name__ == "__main__":
main()
if file:
file.close()
| mit | -6,669,817,222,215,976,000 | 26.135458 | 79 | 0.474233 | false |
sajuptpm/neutron-ipam | neutron/services/firewall/drivers/linux/iptables_fwaas.py | 1 | 11774 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Dell Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Rajesh Mohan, [email protected], DELL Inc.
from neutron.agent.linux import iptables_manager
from neutron.extensions import firewall as fw_ext
from neutron.openstack.common import log as logging
from neutron.services.firewall.drivers import fwaas_base
LOG = logging.getLogger(__name__)
FWAAS_DRIVER_NAME = 'Fwaas iptables driver'
FWAAS_CHAIN = 'fwaas'
FWAAS_DEFAULT_CHAIN = 'fwaas-default-policy'
INGRESS_DIRECTION = 'ingress'
EGRESS_DIRECTION = 'egress'
CHAIN_NAME_PREFIX = {INGRESS_DIRECTION: 'i',
EGRESS_DIRECTION: 'o'}
""" Firewall rules are applied on internal-interfaces of Neutron router.
The packets ingressing tenant's network will be on the output
direction on internal-interfaces.
"""
IPTABLES_DIR = {INGRESS_DIRECTION: '-o',
EGRESS_DIRECTION: '-i'}
IPV4 = 'ipv4'
IPV6 = 'ipv6'
IP_VER_TAG = {IPV4: 'v4',
IPV6: 'v6'}
class IptablesFwaasDriver(fwaas_base.FwaasDriverBase):
"""IPTables driver for Firewall As A Service."""
def __init__(self):
LOG.debug(_("Initializing fwaas iptables driver"))
def create_firewall(self, apply_list, firewall):
LOG.debug(_('Creating firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
try:
if firewall['admin_state_up']:
self._setup_firewall(apply_list, firewall)
else:
self.apply_default_policy(apply_list, firewall)
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to create firewall: %s"), firewall['id'])
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def delete_firewall(self, apply_list, firewall):
LOG.debug(_('Deleting firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
fwid = firewall['id']
try:
for router_info in apply_list:
ipt_mgr = router_info.iptables_manager
self._remove_chains(fwid, ipt_mgr)
self._remove_default_chains(ipt_mgr)
# apply the changes immediately (no defer in firewall path)
ipt_mgr.defer_apply_off()
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to delete firewall: %s"), fwid)
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def update_firewall(self, apply_list, firewall):
LOG.debug(_('Updating firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
try:
if firewall['admin_state_up']:
self._setup_firewall(apply_list, firewall)
else:
self.apply_default_policy(apply_list, firewall)
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to update firewall: %s"), firewall['id'])
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def apply_default_policy(self, apply_list, firewall):
LOG.debug(_('Applying firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
fwid = firewall['id']
try:
for router_info in apply_list:
ipt_mgr = router_info.iptables_manager
# the following only updates local memory; no hole in FW
self._remove_chains(fwid, ipt_mgr)
self._remove_default_chains(ipt_mgr)
# create default 'DROP ALL' policy chain
self._add_default_policy_chain_v4v6(ipt_mgr)
self._enable_policy_chain(fwid, ipt_mgr)
# apply the changes immediately (no defer in firewall path)
ipt_mgr.defer_apply_off()
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to apply default policy on firewall: %s"),
fwid)
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def _setup_firewall(self, apply_list, firewall):
fwid = firewall['id']
for router_info in apply_list:
ipt_mgr = router_info.iptables_manager
# the following only updates local memory; no hole in FW
self._remove_chains(fwid, ipt_mgr)
self._remove_default_chains(ipt_mgr)
# create default 'DROP ALL' policy chain
self._add_default_policy_chain_v4v6(ipt_mgr)
#create chain based on configured policy
self._setup_chains(firewall, ipt_mgr)
# apply the changes immediately (no defer in firewall path)
ipt_mgr.defer_apply_off()
def _get_chain_name(self, fwid, ver, direction):
return '%s%s%s' % (CHAIN_NAME_PREFIX[direction],
IP_VER_TAG[ver],
fwid)
def _setup_chains(self, firewall, ipt_mgr):
"""Create Fwaas chain using the rules in the policy
"""
fw_rules_list = firewall['firewall_rule_list']
fwid = firewall['id']
#default rules for invalid packets and established sessions
invalid_rule = self._drop_invalid_packets_rule()
est_rule = self._allow_established_rule()
for ver in [IPV4, IPV6]:
if ver == IPV4:
table = ipt_mgr.ipv4['filter']
else:
table = ipt_mgr.ipv6['filter']
ichain_name = self._get_chain_name(fwid, ver, INGRESS_DIRECTION)
ochain_name = self._get_chain_name(fwid, ver, EGRESS_DIRECTION)
for name in [ichain_name, ochain_name]:
table.add_chain(name)
table.add_rule(name, invalid_rule)
table.add_rule(name, est_rule)
for rule in fw_rules_list:
if not rule['enabled']:
continue
iptbl_rule = self._convert_fwaas_to_iptables_rule(rule)
if rule['ip_version'] == 4:
ver = IPV4
table = ipt_mgr.ipv4['filter']
else:
ver = IPV6
table = ipt_mgr.ipv6['filter']
ichain_name = self._get_chain_name(fwid, ver, INGRESS_DIRECTION)
ochain_name = self._get_chain_name(fwid, ver, EGRESS_DIRECTION)
table.add_rule(ichain_name, iptbl_rule)
table.add_rule(ochain_name, iptbl_rule)
self._enable_policy_chain(fwid, ipt_mgr)
def _remove_default_chains(self, nsid):
"""Remove fwaas default policy chain."""
self._remove_chain_by_name(IPV4, FWAAS_DEFAULT_CHAIN, nsid)
self._remove_chain_by_name(IPV6, FWAAS_DEFAULT_CHAIN, nsid)
def _remove_chains(self, fwid, ipt_mgr):
"""Remove fwaas policy chain."""
for ver in [IPV4, IPV6]:
for direction in [INGRESS_DIRECTION, EGRESS_DIRECTION]:
chain_name = self._get_chain_name(fwid, ver, direction)
self._remove_chain_by_name(ver, chain_name, ipt_mgr)
def _add_default_policy_chain_v4v6(self, ipt_mgr):
ipt_mgr.ipv4['filter'].add_chain(FWAAS_DEFAULT_CHAIN)
ipt_mgr.ipv4['filter'].add_rule(FWAAS_DEFAULT_CHAIN, '-j DROP')
ipt_mgr.ipv6['filter'].add_chain(FWAAS_DEFAULT_CHAIN)
ipt_mgr.ipv6['filter'].add_rule(FWAAS_DEFAULT_CHAIN, '-j DROP')
def _remove_chain_by_name(self, ver, chain_name, ipt_mgr):
if ver == IPV4:
ipt_mgr.ipv4['filter'].ensure_remove_chain(chain_name)
else:
ipt_mgr.ipv6['filter'].ensure_remove_chain(chain_name)
def _add_rules_to_chain(self, ipt_mgr, ver, chain_name, rules):
if ver == IPV4:
table = ipt_mgr.ipv4['filter']
else:
table = ipt_mgr.ipv6['filter']
for rule in rules:
table.add_rule(chain_name, rule)
def _enable_policy_chain(self, fwid, ipt_mgr):
bname = iptables_manager.binary_name
for (ver, tbl) in [(IPV4, ipt_mgr.ipv4['filter']),
(IPV6, ipt_mgr.ipv6['filter'])]:
for direction in [INGRESS_DIRECTION, EGRESS_DIRECTION]:
chain_name = self._get_chain_name(fwid, ver, direction)
chain_name = iptables_manager.get_chain_name(chain_name)
if chain_name in tbl.chains:
jump_rule = ['%s qr-+ -j %s-%s' % (IPTABLES_DIR[direction],
bname, chain_name)]
self._add_rules_to_chain(ipt_mgr, ver, 'FORWARD',
jump_rule)
#jump to DROP_ALL policy
chain_name = iptables_manager.get_chain_name(FWAAS_DEFAULT_CHAIN)
jump_rule = ['-o qr-+ -j %s-%s' % (bname, chain_name)]
self._add_rules_to_chain(ipt_mgr, IPV4, 'FORWARD', jump_rule)
self._add_rules_to_chain(ipt_mgr, IPV6, 'FORWARD', jump_rule)
#jump to DROP_ALL policy
chain_name = iptables_manager.get_chain_name(FWAAS_DEFAULT_CHAIN)
jump_rule = ['-i qr-+ -j %s-%s' % (bname, chain_name)]
self._add_rules_to_chain(ipt_mgr, IPV4, 'FORWARD', jump_rule)
self._add_rules_to_chain(ipt_mgr, IPV6, 'FORWARD', jump_rule)
def _convert_fwaas_to_iptables_rule(self, rule):
action = rule.get('action') == 'allow' and 'ACCEPT' or 'DROP'
args = [self._protocol_arg(rule.get('protocol')),
self._port_arg('dport',
rule.get('protocol'),
rule.get('destination_port')),
self._port_arg('sport',
rule.get('protocol'),
rule.get('source_port')),
self._ip_prefix_arg('s', rule.get('source_ip_address')),
self._ip_prefix_arg('d', rule.get('destination_ip_address')),
self._action_arg(action)]
iptables_rule = ' '.join(args)
return iptables_rule
def _drop_invalid_packets_rule(self):
return '-m state --state INVALID -j DROP'
def _allow_established_rule(self):
return '-m state --state ESTABLISHED,RELATED -j ACCEPT'
def _action_arg(self, action):
if action:
return '-j %s' % action
return ''
def _protocol_arg(self, protocol):
if protocol:
return '-p %s' % protocol
return ''
def _port_arg(self, direction, protocol, port):
if not (protocol in ['udp', 'tcp'] and port):
return ''
return '--%s %s' % (direction, port)
def _ip_prefix_arg(self, direction, ip_prefix):
if ip_prefix:
return '-%s %s' % (direction, ip_prefix)
return ''
| apache-2.0 | 4,463,777,469,175,995,000 | 41.65942 | 79 | 0.58213 | false |
ant31/kpm | kpm/commands/kexec.py | 1 | 1587 | from kpm.console import KubernetesExec
from kpm.commands.command_base import CommandBase
class ExecCmd(CommandBase):
name = 'exec'
help_message = "exec a command in pod from the RC or RS name.\
It executes the command on the first matching pod'"
def __init__(self, options):
self.output = options.output
self.kind = options.kind
self.container = options.container
self.namespace = options.namespace
self.resource = options.name
self.cmd = options.cmd
self.result = None
super(ExecCmd, self).__init__(options)
@classmethod
def _add_arguments(self, parser):
parser.add_argument('cmd', nargs='+', help="command to execute")
parser.add_argument("--namespace", nargs="?",
help="kubernetes namespace", default='default')
parser.add_argument('-k', '--kind', choices=['deployment', 'rs', 'rc'], nargs="?",
help="deployment, rc or rs", default='rc')
parser.add_argument('-n', '--name', help="resource name", default='rs')
parser.add_argument('-c', '--container', nargs='?', help="container name", default=None)
def _call(self):
c = KubernetesExec(self.resource,
cmd=" ".join(self.cmd),
namespace=self.namespace,
container=self.container,
kind=self.kind)
self.result = c.call()
def _render_json(self):
pass
def _render_console(self):
print self.result
| apache-2.0 | 6,065,838,701,070,110,000 | 35.068182 | 96 | 0.570258 | false |
jim-easterbrook/pywws | src/pywws/process.py | 1 | 29244 | # pywws - Python software for USB Wireless Weather Stations
# http://github.com/jim-easterbrook/pywws
# Copyright (C) 2008-21 pywws contributors
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Generate hourly, daily & monthly summaries of raw weather station
data
::
%s
This module takes raw weather station data (typically sampled every
five or ten minutes) and generates hourly, daily and monthly summary
data, which is useful when creating tables and graphs.
Before computing the data summaries, raw data is "calibrated" using a
user-programmable function. See :doc:`pywws.calib` for details.
The hourly data is derived from all the records in one hour, e.g. from
18:00:00 to 18:59:59, and is given the index of the last complete
record in that hour.
The daily data summarises the weather over a 24 hour period typically
ending at 2100 or 0900 hours, local (non DST) time, though midnight is
another popular convention. It is also indexed by the last complete
record in the period. Daytime and nighttime, as used when computing
maximum and minimum temperatures, are assumed to start at 0900 and
2100 local time, or 1000 and 2200 when DST is in effect, regardless of
the meteorological day.
To adjust the meteorological day to your preference, or that used by
your local official weather station, edit the "day end hour" line in
your ``weather.ini`` file, then run :mod:`pywws.reprocess` to regenerate
the summaries.
Monthly summary data is computed from the daily summary data. If the
meteorological day does not end at midnight, then each month may begin
and end up to 12 hours before or after midnight.
Wind speed data is averaged over the hour (or day) and the maximum
gust speed during the hour (or day) is recorded. The predominant wind
direction is calculated using vector arithmetic.
Rainfall is converted from the raw "total since last reset" figure to
a more useful total in the last hour, day or month.
"""
from __future__ import absolute_import, print_function
__docformat__ = "restructuredtext en"
__usage__ = """
usage: python -m pywws.process [options] data_dir
options are:
-h or --help display this help
-v or --verbose increase number of informative messages
data_dir is the root directory of the weather data
"""
__doc__ %= __usage__
__usage__ = __doc__.split('\n')[0] + __usage__
from ast import literal_eval
from collections import deque
from datetime import date, datetime, timedelta
import getopt
import logging
import math
import os
import sys
from pywws.calib import Calib
from pywws.constants import HOUR, DAY, SECOND
import pywws.logger
import pywws.storage
from pywws.timezone import time_zone
logger = logging.getLogger(__name__)
TIME_ERR = timedelta(seconds=45)
MINUTEx5 = timedelta(minutes=5)
HOURx3 = timedelta(hours=3)
WEEK = timedelta(days=7)
class Average(object):
"""Compute average of multiple data values."""
def __init__(self):
self.acc = 0.0
self.count = 0
def add(self, value):
if value is None:
return
self.acc += value
self.count += 1
def result(self):
if self.count == 0:
return None
return self.acc / float(self.count)
class Minimum(object):
"""Compute minimum value and timestamp of multiple data values."""
def __init__(self):
self.value = None
self.time = None
def add(self, value, time):
if not self.time or value <= self.value:
self.value = value
self.time = time
def result(self):
if self.time:
return self.value, self.time
return None, None
class Maximum(object):
"""Compute maximum value and timestamp of multiple data values."""
def __init__(self):
self.value = None
self.time = None
def add(self, value, time):
if not self.time or value > self.value:
self.value = value
self.time = time
def result(self):
if self.time:
return self.value, self.time
return None, None
sin_LUT = list(map(
lambda x: math.sin(math.radians(float(x * 360) / 16.0)), range(16)))
cos_LUT = list(map(
lambda x: math.cos(math.radians(float(x * 360) / 16.0)), range(16)))
class WindFilter(object):
"""Compute average wind speed and direction.
The wind speed and direction of each data item is converted to a
vector before averaging, so the result reflects the dominant wind
direction during the time period covered by the data.
Setting the ``decay`` parameter converts the filter from a simple
averager to one where the most recent sample carries the highest
weight, and earlier samples have a lower weight according to how
long ago they were.
This process is an approximation of "exponential smoothing". See
`Wikipedia <http://en.wikipedia.org/wiki/Exponential_smoothing>`_
for a detailed discussion.
The parameter ``decay`` corresponds to the value ``(1 - alpha)``
in the Wikipedia description. Because the weather data being
smoothed may not be at regular intervals this parameter is the
decay over 5 minutes. Weather data at other intervals will have
its weight scaled accordingly.
The return value is a (speed, direction) tuple.
:param decay: filter coefficient decay rate.
:type decay: float
:rtype: (float, float)
"""
def __init__(self, decay=1.0):
self.decay = decay
self.Ve = None
self.Vn = 0.0
self.total = 0.0
self.weight = 1.0
self.total_weight = 0.0
self.last_idx = None
def add(self, data):
speed = data['wind_ave']
if speed is None:
return
if self.last_idx and self.decay != 1.0:
interval = data['idx'] - self.last_idx
assert interval.days == 0
decay = self.decay
if interval != MINUTEx5:
decay = decay ** (float(interval.seconds) /
float(MINUTEx5.seconds))
self.weight = self.weight / decay
self.last_idx = data['idx']
speed = speed * self.weight
self.total += speed
self.total_weight += self.weight
direction = data['wind_dir']
if direction is None:
return
if self.Ve is None:
self.Ve = 0.0
if isinstance(direction, int):
self.Ve -= speed * sin_LUT[direction]
self.Vn -= speed * cos_LUT[direction]
else:
direction = math.radians(float(direction) * 22.5)
self.Ve -= speed * math.sin(direction)
self.Vn -= speed * math.cos(direction)
def result(self):
if self.total_weight == 0.0:
return (None, None)
if self.Ve is None:
return (self.total / self.total_weight, None)
return (self.total / self.total_weight,
(math.degrees(math.atan2(self.Ve, self.Vn)) + 180.0) / 22.5)
class HourAcc(object):
"""'Accumulate' raw weather data to produce hourly summary.
Compute average wind speed and maximum wind gust, find dominant
wind direction and compute total rainfall.
"""
def __init__(self, last_rain):
self.last_rain = last_rain
self.copy_keys = ['idx', 'hum_in', 'temp_in', 'hum_out', 'temp_out',
'abs_pressure', 'rel_pressure']
self.reset()
def reset(self):
self.wind_fil = WindFilter()
self.wind_gust = (-2.0, None)
self.rain = 0.0
self.retval = {}
def add_raw(self, data):
idx = data['idx']
self.wind_fil.add(data)
wind_gust = data['wind_gust']
if wind_gust is not None and wind_gust > self.wind_gust[0]:
self.wind_gust = (wind_gust, idx)
rain = data['rain']
if rain is not None:
if self.last_rain is not None:
diff = rain - self.last_rain
if diff < -0.001:
logger.warning(
'%s rain reset %.1f -> %.1f', str(idx), self.last_rain, rain)
elif diff > float(data['delay'] * 5):
# rain exceeds 5mm / minute, assume corrupt data and ignore it
logger.warning(
'%s rain jump %.1f -> %.1f', str(idx), self.last_rain, rain)
else:
self.rain += max(0.0, diff)
self.last_rain = rain
# copy some current readings
if 'illuminance' in data and not 'illuminance' in self.copy_keys:
self.copy_keys.append('illuminance')
self.copy_keys.append('uv')
# if already have data to return, ignore 'lost contact' readings
if data['temp_out'] is not None or not self.retval:
for key in self.copy_keys:
self.retval[key] = data[key]
def result(self):
if not self.retval:
return None
self.retval['wind_ave'], self.retval['wind_dir'] = self.wind_fil.result()
if self.wind_gust[1]:
self.retval['wind_gust'] = self.wind_gust[0]
else:
self.retval['wind_gust'] = None
self.retval['rain'] = self.rain
return self.retval
class DayAcc(object):
"""'Accumulate' weather data to produce daily summary.
Compute average wind speed, maximum wind gust and daytime max &
nighttime min temperatures, find dominant wind direction and
compute total rainfall.
Daytime is assumed to be 0900-2100 and nighttime to be 2100-0900,
local time (1000-2200 and 2200-1000 during DST), regardless of the
"day end hour" setting.
"""
def __init__(self):
self.has_illuminance = False
self.ave = {}
self.max = {}
self.min = {}
self.reset()
def reset(self):
self.wind_fil = WindFilter()
self.wind_gust = (-1.0, None)
self.rain = 0.0
for i in ('temp_in', 'temp_out', 'hum_in', 'hum_out',
'abs_pressure', 'rel_pressure'):
self.ave[i] = Average()
self.max[i] = Maximum()
self.min[i] = Minimum()
for i in ('illuminance', 'uv'):
self.ave[i] = Average()
self.max[i] = Maximum()
self.retval = dict()
def add_raw(self, data):
idx = data['idx']
local_hour = time_zone.utc_to_nodst(idx).hour
wind_gust = data['wind_gust']
if wind_gust is not None and wind_gust > self.wind_gust[0]:
self.wind_gust = (wind_gust, idx)
for i in ('temp_in', 'temp_out'):
temp = data[i]
if temp is not None:
self.ave[i].add(temp)
if local_hour >= 9 and local_hour < 21:
# daytime max temperature
self.max[i].add(temp, idx)
else:
# nighttime min temperature
self.min[i].add(temp, idx)
for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'):
value = data[i]
if value is not None:
self.ave[i].add(value)
self.max[i].add(value, idx)
self.min[i].add(value, idx)
if 'illuminance' in data:
self.has_illuminance = True
for i in ('illuminance', 'uv'):
value = data[i]
if value is not None:
self.ave[i].add(value)
self.max[i].add(value, idx)
def add_hourly(self, data):
self.wind_fil.add(data)
rain = data['rain']
if rain is not None:
self.rain += rain
self.retval['idx'] = data['idx']
def result(self):
if not self.retval:
return None
self.retval['wind_ave'], self.retval['wind_dir'] = self.wind_fil.result()
if self.wind_gust[1]:
self.retval['wind_gust'] = self.wind_gust[0]
else:
self.retval['wind_gust'] = None
self.retval['wind_gust_t'] = self.wind_gust[1]
self.retval['rain'] = self.rain
for i in ('temp_in', 'temp_out', 'hum_in', 'hum_out',
'abs_pressure', 'rel_pressure'):
self.retval['%s_ave' % i] = self.ave[i].result()
(self.retval['%s_max' % i],
self.retval['%s_max_t' % i]) = self.max[i].result()
(self.retval['%s_min' % i],
self.retval['%s_min_t' % i]) = self.min[i].result()
if self.has_illuminance:
for i in ('illuminance', 'uv'):
self.retval['%s_ave' % i] = self.ave[i].result()
(self.retval['%s_max' % i],
self.retval['%s_max_t' % i]) = self.max[i].result()
return self.retval
class MonthAcc(object):
"""'Accumulate' daily weather data to produce monthly summary.
Compute daytime max & nighttime min temperatures.
"""
def __init__(self, rain_day_threshold):
self.rain_day_threshold = rain_day_threshold
self.has_illuminance = False
self.ave = {}
self.min = {}
self.max = {}
self.min_lo = {}
self.min_hi = {}
self.min_ave = {}
self.max_lo = {}
self.max_hi = {}
self.max_ave = {}
self.reset()
def reset(self):
for i in ('temp_in', 'temp_out'):
self.ave[i] = Average()
self.min_lo[i] = Minimum()
self.min_hi[i] = Maximum()
self.min_ave[i] = Average()
self.max_lo[i] = Minimum()
self.max_hi[i] = Maximum()
self.max_ave[i] = Average()
for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'):
self.ave[i] = Average()
self.max[i] = Maximum()
self.min[i] = Minimum()
for i in ('illuminance', 'uv'):
self.ave[i] = Average()
self.max_lo[i] = Minimum()
self.max_hi[i] = Maximum()
self.max_ave[i] = Average()
self.wind_fil = WindFilter()
self.wind_gust = (-1.0, None)
self.rain = 0.0
self.rain_days = 0
self.valid = False
def add_daily(self, data):
self.idx = data['idx']
for i in ('temp_in', 'temp_out'):
temp = data['%s_ave' % i]
if temp is not None:
self.ave[i].add(temp)
temp = data['%s_min' % i]
if temp is not None:
self.min_lo[i].add(temp, data['%s_min_t' % i])
self.min_hi[i].add(temp, data['%s_min_t' % i])
self.min_ave[i].add(temp)
temp = data['%s_max' % i]
if temp is not None:
self.max_lo[i].add(temp, data['%s_max_t' % i])
self.max_hi[i].add(temp, data['%s_max_t' % i])
self.max_ave[i].add(temp)
for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'):
value = data['%s_ave' % i]
if value is not None:
self.ave[i].add(value)
value = data['%s_min' % i]
if value is not None:
self.min[i].add(value, data['%s_min_t' % i])
value = data['%s_max' % i]
if value is not None:
self.max[i].add(value, data['%s_max_t' % i])
self.wind_fil.add(data)
wind_gust = data['wind_gust']
if wind_gust is not None and wind_gust > self.wind_gust[0]:
self.wind_gust = (wind_gust, data['wind_gust_t'])
if 'illuminance_ave' in data:
self.has_illuminance = True
for i in ('illuminance', 'uv'):
value = data['%s_ave' % i]
if value is not None:
self.ave[i].add(value)
value = data['%s_max' % i]
if value is not None:
self.max_lo[i].add(value, data['%s_max_t' % i])
self.max_hi[i].add(value, data['%s_max_t' % i])
self.max_ave[i].add(value)
self.rain += data['rain']
if data['rain'] >= self.rain_day_threshold:
self.rain_days += 1
self.valid = True
def result(self):
if not self.valid:
return None
result = {}
result['idx'] = self.idx
result['rain'] = self.rain
result['rain_days'] = self.rain_days
for i in ('temp_in', 'temp_out'):
result['%s_ave' % i] = self.ave[i].result()
result['%s_min_ave' % i] = self.min_ave[i].result()
(result['%s_min_lo' % i],
result['%s_min_lo_t' % i]) = self.min_lo[i].result()
(result['%s_min_hi' % i],
result['%s_min_hi_t' % i]) = self.min_hi[i].result()
result['%s_max_ave' % i] = self.max_ave[i].result()
(result['%s_max_lo' % i],
result['%s_max_lo_t' % i]) = self.max_lo[i].result()
(result['%s_max_hi' % i],
result['%s_max_hi_t' % i]) = self.max_hi[i].result()
for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'):
result['%s_ave' % i] = self.ave[i].result()
(result['%s_max' % i],
result['%s_max_t' % i]) = self.max[i].result()
(result['%s_min' % i],
result['%s_min_t' % i]) = self.min[i].result()
result['wind_ave'], result['wind_dir'] = self.wind_fil.result()
if self.wind_gust[1]:
result['wind_gust'] = self.wind_gust[0]
else:
result['wind_gust'] = None
result['wind_gust_t'] = self.wind_gust[1]
if self.has_illuminance:
for i in ('illuminance', 'uv'):
result['%s_ave' % i] = self.ave[i].result()
result['%s_max_ave' % i] = self.max_ave[i].result()
(result['%s_max_lo' % i],
result['%s_max_lo_t' % i]) = self.max_lo[i].result()
(result['%s_max_hi' % i],
result['%s_max_hi_t' % i]) = self.max_hi[i].result()
return result
def calibrate_data(params, raw_data, calib_data):
"""'Calibrate' raw data, using a user-supplied function."""
start = calib_data.before(datetime.max)
if start is None:
start = datetime.min
start = raw_data.after(start + SECOND)
if start is None:
return start
del calib_data[start:]
calibrator = Calib(params, raw_data)
def calibgen(inputdata):
"""Internal generator function"""
count = 0
for data in inputdata:
idx = data['idx']
count += 1
if count % 10000 == 0:
logger.info("calib: %s", idx.isoformat(' '))
elif count % 500 == 0:
logger.debug("calib: %s", idx.isoformat(' '))
for key in ('rain', 'abs_pressure', 'temp_in'):
if data[key] is None:
logger.error('Ignoring invalid data at %s', idx.isoformat(' '))
break
else:
yield calibrator.calib(data)
calib_data.update(calibgen(raw_data[start:]))
return start
def generate_hourly(calib_data, hourly_data, process_from):
"""Generate hourly summaries from calibrated data."""
start = hourly_data.before(datetime.max)
if start is None:
start = datetime.min
start = calib_data.after(start + SECOND)
if process_from:
if start:
start = min(start, process_from)
else:
start = process_from
if start is None:
return start
# set start of hour in local time (not all time offsets are integer hours)
start = time_zone.hour_start(start)
del hourly_data[start:]
# preload pressure history, and find last valid rain
prev = None
pressure_history = deque()
last_rain = None
for data in calib_data[start - HOURx3:start]:
if data['rel_pressure']:
pressure_history.append((data['idx'], data['rel_pressure']))
if data['rain'] is not None:
last_rain = data['rain']
prev = data
# iterate over data in one hour chunks
stop = calib_data.before(datetime.max)
acc = HourAcc(last_rain)
def hourlygen(inputdata, prev):
"""Internal generator function"""
hour_start = start
count = 0
while hour_start <= stop:
count += 1
if count % 1008 == 0:
logger.info("hourly: %s", hour_start.isoformat(' '))
elif count % 24 == 0:
logger.debug("hourly: %s", hour_start.isoformat(' '))
hour_end = hour_start + HOUR
acc.reset()
for data in inputdata[hour_start:hour_end]:
if data['rel_pressure']:
pressure_history.append((data['idx'], data['rel_pressure']))
if prev:
err = data['idx'] - prev['idx']
if abs(err - timedelta(minutes=data['delay'])) > TIME_ERR:
logger.info('unexpected data interval %s %s',
data['idx'].isoformat(' '), str(err))
acc.add_raw(data)
prev = data
new_data = acc.result()
if new_data and (new_data['idx'] - hour_start) >= timedelta(minutes=9):
# compute pressure trend
new_data['pressure_trend'] = None
if new_data['rel_pressure']:
target = new_data['idx'] - HOURx3
while (len(pressure_history) >= 2 and
abs(pressure_history[0][0] - target) >
abs(pressure_history[1][0] - target)):
pressure_history.popleft()
if (pressure_history and
abs(pressure_history[0][0] - target) < HOUR):
new_data['pressure_trend'] = (
new_data['rel_pressure'] - pressure_history[0][1])
# store new hourly data
yield new_data
hour_start = hour_end
hourly_data.update(hourlygen(calib_data, prev))
return start
def generate_daily(day_end_hour, use_dst,
calib_data, hourly_data, daily_data, process_from):
"""Generate daily summaries from calibrated and hourly data."""
start = daily_data.before(datetime.max)
if start is None:
start = datetime.min
start = calib_data.after(start + SECOND)
if process_from:
if start:
start = min(start, process_from)
else:
start = process_from
if start is None:
return start
# round to start of this day, in local time
start = time_zone.day_start(start, day_end_hour, use_dst=use_dst)
del daily_data[start:]
stop = calib_data.before(datetime.max)
acc = DayAcc()
def dailygen(inputdata):
"""Internal generator function"""
day_start = start
count = 0
while day_start <= stop:
count += 1
if count % 30 == 0:
logger.info("daily: %s", day_start.isoformat(' '))
else:
logger.debug("daily: %s", day_start.isoformat(' '))
day_end = day_start + DAY
if use_dst:
# day might be 23 or 25 hours long
day_end = time_zone.day_start(
day_end + HOURx3, day_end_hour, use_dst=use_dst)
acc.reset()
for data in inputdata[day_start:day_end]:
acc.add_raw(data)
for data in hourly_data[day_start:day_end]:
acc.add_hourly(data)
new_data = acc.result()
if new_data:
new_data['start'] = day_start
yield new_data
day_start = day_end
daily_data.update(dailygen(calib_data))
return start
def generate_monthly(rain_day_threshold, day_end_hour, use_dst,
daily_data, monthly_data, process_from):
"""Generate monthly summaries from daily data."""
start = monthly_data.before(datetime.max)
if start is None:
start = datetime.min
start = daily_data.after(start + SECOND)
if process_from:
if start:
start = min(start, process_from)
else:
start = process_from
if start is None:
return start
# set start to noon on start of first day of month (local time)
local_start = time_zone.utc_to_local(start).replace(tzinfo=None)
local_start = local_start.replace(day=1, hour=12, minute=0, second=0)
# go back to UTC and get start of day (which might be previous day)
start = time_zone.local_to_utc(local_start)
start = time_zone.day_start(start, day_end_hour, use_dst=use_dst)
del monthly_data[start:]
stop = daily_data.before(datetime.max)
if stop is None:
return None
def monthlygen(inputdata, start, local_start):
"""Internal generator function"""
acc = MonthAcc(rain_day_threshold)
month_start = start
count = 0
while month_start <= stop:
count += 1
if count % 12 == 0:
logger.info("monthly: %s", month_start.isoformat(' '))
else:
logger.debug("monthly: %s", month_start.isoformat(' '))
if local_start.month < 12:
local_start = local_start.replace(month=local_start.month+1)
else:
local_start = local_start.replace(
month=1, year=local_start.year+1)
month_end = time_zone.local_to_utc(local_start)
month_end = time_zone.day_start(
month_end, day_end_hour, use_dst=use_dst)
acc.reset()
for data in inputdata[month_start:month_end]:
acc.add_daily(data)
new_data = acc.result()
if new_data:
new_data['start'] = month_start
yield new_data
month_start = month_end
monthly_data.update(monthlygen(daily_data, start, local_start))
return start
def get_day_end_hour(params):
# get daytime end hour (in local time)
day_end_hour, use_dst = literal_eval(
params.get('config', 'day end hour', '9, False'))
day_end_hour = day_end_hour % 24
return day_end_hour, use_dst
def process_data(context):
"""Generate summaries from raw weather station data.
The meteorological day end (typically 2100 or 0900 local time) is
set in the preferences file ``weather.ini``. The default value is
2100 (2200 during DST), following the historical convention for
weather station readings.
"""
logger.info('Generating summary data')
# get time of last record
last_raw = context.raw_data.before(datetime.max)
if last_raw is None:
raise IOError('No data found. Check data directory parameter.')
# get daytime end hour (in local time)
day_end_hour, use_dst = get_day_end_hour(context.params)
# get other config
rain_day_threshold = float(
context.params.get('config', 'rain day threshold', '0.2'))
# calibrate raw data
start = calibrate_data(context.params, context.raw_data, context.calib_data)
# generate hourly data
start = generate_hourly(context.calib_data, context.hourly_data, start)
# generate daily data
start = generate_daily(day_end_hour, use_dst,
context.calib_data, context.hourly_data, context.daily_data, start)
# generate monthly data
generate_monthly(rain_day_threshold, day_end_hour, use_dst,
context.daily_data, context.monthly_data, start)
return 0
def main(argv=None):
if argv is None:
argv = sys.argv
try:
opts, args = getopt.getopt(argv[1:], "hv", ['help', 'verbose'])
except getopt.error as msg:
print('Error: %s\n' % msg, file=sys.stderr)
print(__usage__.strip(), file=sys.stderr)
return 1
# process options
verbose = 0
for o, a in opts:
if o in ('-h', '--help'):
print(__usage__.strip())
return 0
elif o in ('-v', '--verbose'):
verbose += 1
# check arguments
if len(args) != 1:
print('Error: 1 argument required\n', file=sys.stderr)
print(__usage__.strip(), file=sys.stderr)
return 2
pywws.logger.setup_handler(verbose)
data_dir = args[0]
with pywws.storage.pywws_context(data_dir) as context:
return process_data(context)
if __name__ == "__main__":
sys.exit(main())
| gpl-2.0 | -1,985,312,193,686,834,200 | 35.784906 | 94 | 0.559328 | false |
brickfiestastem/brickfiesta | shop/views.py | 1 | 11431 | import datetime
import json
import urllib.error
import urllib.parse
import urllib.request
import uuid
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import User
from django.core.mail import EmailMessage
from django.core.mail import send_mail
from django.shortcuts import render, redirect
from django.template import loader
from django.urls import reverse
from django.utils.html import format_html
from django.views import View
from django.views.generic import DetailView, FormView
from django.views.generic.detail import SingleObjectMixin
from django.views.generic.list import ListView
from event.models import Event
from shop.utils import check_recaptcha
from .cart import ShoppingCart
from .forms import CartItemForm
from .models import Product, Order, OrderItem
from .utils import add_attendee_fan_badge_shirt
# Create your views here.
class EventListView(ListView):
queryset = Event.objects.all().order_by('start_date').filter(
start_date__gt=datetime.date.today())
template_name = 'shop/event_list.html'
class EventProductView(View):
def get(self, request, event_id):
obj_products = Product.objects.filter(
event__id__exact=event_id, is_public=True).order_by('product_type').extra(
select={'is_top': "product_type = '" + Product.EXHIBITION + "'"})
date_two_weeks = datetime.date.today() + datetime.timedelta(days=14)
if obj_products.first().event.start_date <= date_two_weeks:
obj_products = obj_products.extra(
order_by=['-is_top', 'product_type'])
return render(request,
'shop/product_list.html',
{'object_list': obj_products, 'first': obj_products.first()})
class CartTestView(View):
def get(self, request):
str_checkout_id = request.GET.get('checkoutId', None)
str_reference_id = request.GET.get('referenceId', None)
if str_reference_id:
request.session['cart_id'] = str_reference_id
if str_checkout_id:
request.session['checkout_id'] = str_checkout_id
obj_cart = ShoppingCart(request)
return render(request, 'shop/cart_contents.html', {'error_message': obj_cart.get_debug(request),
'cart': obj_cart.get_basket(),
'cart_total': obj_cart.total()})
class CartCheckoutView(View):
def get(self, request):
list_message = list()
obj_cart = ShoppingCart(request)
str_checkout_id = request.GET.get('checkoutId', "INVALID")
str_reference_id = request.GET.get('referenceId', "INVALID")
str_transaction_id = request.GET.get('transactionId', "INVALID")
if obj_cart.check_checkout_id(str_checkout_id):
# valid save everything in the users
obj_order = None
obj_basket = obj_cart.get_basket()
for obj_item in obj_basket:
obj_user = None
try:
obj_user = User.objects.get(email=obj_item.email)
list_message.append(
"Found existing customer information " + obj_item.email + ".")
except User.DoesNotExist:
obj_user = User.objects.create_user(username=obj_item.email,
email=obj_item.email,
first_name=obj_item.first_name,
last_name=obj_item.last_name,
password=uuid.uuid4())
list_message.append(
"Created a user for " + obj_item.email + ". Please check your email for password instructions.")
send_mail(subject="Brick Fiesta - New Account Created",
message=loader.render_to_string(
"afol/new_account_email.html"),
from_email=settings.DEFAULT_FROM_EMAIL,
recipient_list=[obj_item.email])
if obj_order is None:
if request.user.is_authenticated:
obj_order = Order(user=request.user,
transaction_id=str_transaction_id,
reference_id=str_reference_id,
guest="")
else:
obj_order = Order(user=obj_user,
transaction_id=str_transaction_id,
reference_id=str_reference_id,
guest="")
obj_order.save()
list_message.append(
"Order associated with " + obj_item.email + ".")
obj_order_item = OrderItem(order=obj_order,
user=obj_user,
first_name=obj_item.first_name,
last_name=obj_item.last_name,
product=obj_item.product,
price=obj_item.product.price)
# if obj_item.product.quantity_available > 0:
# obj_product = obj_item.product
# obj_product.quantity_available -= 1
# obj_product.save()
obj_order_item.save()
list_message.append(
"Order item " + obj_order_item.product.title + " associated with " + obj_item.email + ".")
add_attendee_fan_badge_shirt(request, obj_order_item)
obj_cart.clear()
else:
list_message.append(
"It looks like there was an problem with your cart and processing it.")
list_message.append(
"We have gathered the data and have sent an email to look into the issue.")
list_message.append(
"If you do not hear back in a few days please contact us using the contact form.")
str_body = "JSON: " + obj_cart.get_debug(request) + "\n\nReference: " + str_reference_id + \
"\n\nTransaction: " + str_transaction_id
email = EmailMessage(
'Brick Fiesta - URGENT - Cart Error', str_body, to=[settings.DEFAULT_FROM_EMAIL])
email.send()
obj_cart.clear()
return render(request, 'shop/cart_complete.html', {'message': list_message, })
class CartView(View):
def post(self, request, *args, **kwargs):
str_error_message = False
obj_cart = ShoppingCart(request)
if 'cart_item' in request.POST:
obj_cart.remove(request.POST['cart_item'])
if 'cart' in request.POST:
# generate json objects
str_json = obj_cart.get_json()
str_json = str_json.encode('utf-8')
print(str_json)
str_url = "https://connect.squareup.com/v2/locations/" + \
settings.SQUARE_LOCATION_KEY + "/checkouts"
# send request for objects
obj_request = urllib.request.Request(url=str_url)
obj_request.add_header(
'Authorization', 'Bearer ' + settings.SQUARE_CART_KEY)
obj_request.add_header(
'Content-Type', 'application/json; charset=utf-8')
obj_request.add_header('Accept', 'application/json')
# get response
obj_response = ""
try:
obj_response = urllib.request.urlopen(
obj_request, data=str_json)
except urllib.error.URLError as obj_error:
# print(obj_error.reason)
str_error_message = "Unable to reach payment server. Please try again later."
str_body = "URL: " + str_url + "\n\nJSON: " + \
str_json.decode('ascii') + "\n\nRESPONSE:" + obj_response
email = EmailMessage(
'Brick Fiesta - Check Out URL Error', str_body, to=[settings.DEFAULT_FROM_EMAIL])
email.send()
pass
except urllib.error.HTTPError as obj_error:
str_error_message = "Unable to process payment correctly. Error sent to event organizers."
str_body = "URL: " + str_url + "\n\nJSON: " + \
str_json.decode('ascii') + "\n\nRESPONSE:" + obj_response
email = EmailMessage(
'Brick Fiesta - Check Out HTTP Error', str_body, to=[settings.DEFAULT_FROM_EMAIL])
email.send()
# print(obj_error.code)
# print(obj_error.read())
pass
else:
result = json.loads(obj_response.read().decode())
# print(result)
obj_cart.set_checkout_id(request, result['checkout']['id'])
return redirect(result['checkout']['checkout_page_url'])
return render(request, 'shop/cart_contents.html', {'error_message': str_error_message,
'cart': obj_cart.get_basket(),
'cart_total': obj_cart.total()})
def get(self, request, token=None):
if token:
request.session['cart'] = str(token)
obj_cart = ShoppingCart(request)
return render(request, 'shop/cart_contents.html', {'cart': obj_cart.get_basket(),
'cart_total': obj_cart.total()})
class ProductDetailView(DetailView):
model = Product
def get_context_data(self, **kwargs):
context = super(ProductDetailView, self).get_context_data(**kwargs)
context['form'] = CartItemForm()
return context
class ProductCartItemView(SingleObjectMixin, FormView):
template_name = 'shop/product_detail.html'
form_class = CartItemForm
model = Product
def post(self, request, *args, **kwargs):
cart = ShoppingCart(request)
self.object = self.get_object()
form = CartItemForm(request.POST)
if not check_recaptcha(request):
form.add_error(
None, 'You failed the human test. Try the reCAPTCHA again.')
if form.is_valid():
cart.add(first_name=form.cleaned_data['first_name'],
last_name=form.cleaned_data['last_name'],
email=form.cleaned_data['email'],
product=self.object)
messages.info(request, format_html(
'Product added to <a href="{}">cart</a>.', reverse('shop:cart')))
return super(ProductCartItemView, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse('shop:event', kwargs={'event_id': self.object.event.id})
class ProductDetail(View):
def get(self, request, *args, **kwargs):
view = ProductDetailView.as_view()
return view(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
view = ProductCartItemView.as_view()
return view(request, *args, **kwargs)
| agpl-3.0 | 1,022,110,666,750,949,400 | 44.181818 | 120 | 0.53906 | false |
acabey/flash-dump-tool | test/test_xecrypt.py | 1 | 35801 | from unittest import TestCase
from Crypto.PublicKey import RSA
from lib.xecrypt import XeCryptBnQw_SwapLeBe, XeCryptBnDw_SwapLeBe, XeCryptBnQw, XeCryptBnQw_toInt, XeCryptBnQwNeMod
from lib.xecrypt_rsa import XeCrypt_RSA
from lib.xecrypt import XeCryptRotSum, RotSumCtx
from lib.xecrypt import XeCryptRotSumSha
class TestXeCryptRotSumSha(TestCase):
def test_normal(self):
buffer1 = bytearray(0x40)
for i in range(len(buffer1)):
buffer1[i] = i % 255
buffer2 = bytearray(0x40)
for i in range(len(buffer2)):
buffer2[i] = 255 - (i % 255)
digest = bytes(
[0x31, 0xf0, 0x0b, 0x77, 0x68, 0xf9, 0x57, 0x71, 0x8c, 0x4d, 0x62, 0x84, 0xb3, 0x45, 0xcd, 0xdd, 0x37, 0x9c,
0x6e, 0xe9])
self.assertEqual(XeCryptRotSumSha(buffer1, buffer2, 0x20), digest)
def test_input_2_zero(self):
buffer1 = bytearray(0x40)
for i in range(len(buffer1)):
buffer1[i] = i % 255
buffer2 = bytearray(0x40)
digest = bytes(
[0x2a, 0xdb, 0xc2, 0x8c, 0xda, 0xca, 0x48, 0x8e, 0xa5, 0x01, 0x74, 0xa5, 0xd9, 0x80, 0x60, 0xc5, 0xf9, 0x0f,
0x5d, 0x54])
self.assertEqual(XeCryptRotSumSha(buffer1, buffer2, 0x20), digest)
def test_input_all_zero(self):
buffer1 = bytearray(0x40)
buffer2 = bytearray(0x40)
digest = bytes(
[0x37, 0xd3, 0xb6, 0xb8, 0x4e, 0x35, 0x08, 0x0a, 0xe5, 0xf9, 0x60, 0xfd, 0xbf, 0x44, 0x26, 0x06, 0x54, 0x02,
0x7e, 0x5b])
self.assertEqual(XeCryptRotSumSha(buffer1, buffer2, 0x20), digest)
class TestXeCryptRotSum(TestCase):
def test_launch_xex(self):
input_1_size = 0x70
input_1 = bytearray(
[0x00, 0x00, 0x01, 0x74, 0x00, 0x00, 0x00, 0x00, 0x82, 0x00, 0x00, 0x00, 0xfc, 0xa0, 0x61, 0xd4,
0xc2, 0xa1, 0x71, 0xc0, 0xec, 0x98, 0x00, 0x86, 0xe6, 0x8b, 0xa8, 0x9f, 0xb0, 0x9c, 0x83, 0xd4,
0x00, 0x00, 0x00, 0x02, 0x47, 0x88, 0x8f, 0x6a, 0x94, 0x36, 0x58, 0xc0, 0x7a, 0xdf, 0xeb, 0xda,
0x44, 0x13, 0x4a, 0x7f, 0xe0, 0xd2, 0x3f, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0xd8, 0x0b, 0xfa, 0xf3, 0xdc, 0x53, 0xb0,
0x89, 0x7d, 0x5e, 0xb5, 0x08, 0xd0, 0x10, 0x63, 0x00, 0x00, 0x00, 0x00, 0xca, 0xe2, 0xc7, 0x20,
0xef, 0xb2, 0x9a, 0x1d, 0x2e, 0x90, 0xe4, 0x7e, 0xb9, 0xdf, 0x41, 0xa0, 0xf5, 0xf5, 0xb7, 0xa4])
rotsum_ctx = RotSumCtx([0] * 4)
output = bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0xb2, 0x51, 0xdc, 0xa2, 0xc4, 0x60, 0xa3, 0x5d,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xd8, 0x2d, 0xc6, 0x4e, 0xec, 0xd3, 0xea, 0x24])
XeCryptRotSum(rotsum_ctx, input_1, input_1_size >> 3)
self.assertEqual(bytes(rotsum_ctx), output)
def test_launch_xex_zero_all(self):
input_1_size = 0x0
input_1 = []
output = bytes([0x0] * 0x20)
rotsum_ctx = RotSumCtx.from_bytes(output)
XeCryptRotSum(rotsum_ctx, input_1, input_1_size >> 3)
self.assertEqual(bytes(rotsum_ctx), output)
def test_launch_xex_zero_input(self):
input_1_size = 0x0
input_1 = []
output = bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0xb2, 0x51, 0xdc, 0xa2, 0xc4, 0x60, 0xa3, 0x5d,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xd8, 0x2d, 0xc6, 0x4e, 0xec, 0xd3, 0xea, 0x24])
rotsum_ctx = RotSumCtx.from_bytes(output)
XeCryptRotSum(rotsum_ctx, input_1, input_1_size >> 3)
self.assertEqual(bytes(rotsum_ctx), output)
def test_normal(self):
input_1_size = 0x40
input_1_bytes = bytearray(
[0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f])
input_1 = bytearray(input_1_size)
for i in range(input_1_size):
input_1[i] = i % 255
self.assertEqual(input_1_bytes, input_1)
rotsum_ctx = RotSumCtx([0] * 4)
output = bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x83, 0x62, 0xe0, 0xd8, 0xd7, 0xd3, 0xc3, 0x83,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x6d, 0x14, 0x14, 0x14, 0x19, 0x69, 0x69, 0x69])
XeCryptRotSum(rotsum_ctx, input_1, input_1_size >> 3)
self.assertEqual(bytes(rotsum_ctx), output)
def test_normal_2(self):
input_2_size = 0x40
input_2_bytes = bytearray(
[0xff, 0xfe, 0xfd, 0xfc, 0xfb, 0xfa, 0xf9, 0xf8, 0xf7, 0xf6, 0xf5, 0xf4, 0xf3, 0xf2, 0xf1, 0xf0,
0xef, 0xee, 0xed, 0xec, 0xeb, 0xea, 0xe9, 0xe8, 0xe7, 0xe6, 0xe5, 0xe4, 0xe3, 0xe2, 0xe1, 0xe0,
0xdf, 0xde, 0xdd, 0xdc, 0xdb, 0xda, 0xd9, 0xd8, 0xd7, 0xd6, 0xd5, 0xd4, 0xd3, 0xd2, 0xd1, 0xd0,
0xcf, 0xce, 0xcd, 0xcc, 0xcb, 0xca, 0xc9, 0xc8, 0xc7, 0xc6, 0xc5, 0xc4, 0xc3, 0xc2, 0xc1, 0xc0])
input_2 = bytearray(input_2_size)
for i in range(input_2_size):
input_2[i] = 255 - (i % 255)
self.assertEqual(input_2_bytes, input_2)
output_1 = bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x83, 0x62, 0xe0, 0xd8, 0xd7, 0xd3, 0xc3, 0x83,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb,
0x6d, 0x14, 0x14, 0x14, 0x19, 0x69, 0x69, 0x69])
rotsum_ctx = RotSumCtx.from_bytes(output_1)
output_2 = bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06,
0x50, 0x60, 0x61, 0xaa, 0x6a, 0x8b, 0x0d, 0x54,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8,
0x52, 0x59, 0x00, 0x00, 0xa4, 0xb0, 0x00, 0x00])
XeCryptRotSum(rotsum_ctx, input_2, input_2_size >> 3)
self.assertEqual(bytes(rotsum_ctx), output_2)
class TestXeCryptBnQw_SwapLeBe(TestCase):
def test_normal_1(self):
self.assertEqual(XeCryptBnQw_SwapLeBe(
bytes([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]), 1),
bytes([0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01]))
def test_normal_2(self):
self.assertEqual(XeCryptBnQw_SwapLeBe(
bytes([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]), 3),
bytes([0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01,
0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01,
0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01]))
def test_zero(self):
self.assertEqual(XeCryptBnQw_SwapLeBe(
bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), 1),
bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
def test_one(self):
self.assertEqual(XeCryptBnQw_SwapLeBe(
bytes([0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), 1),
bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01]))
class TestXeCryptBnDw_SwapLeBe(TestCase):
def test_normal_1(self):
self.assertEqual(XeCryptBnDw_SwapLeBe(
bytes([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]), 2),
bytes([0x04, 0x03, 0x02, 0x01, 0x08, 0x07, 0x06, 0x05, ]))
def test_normal_2(self):
self.assertEqual(XeCryptBnDw_SwapLeBe(
bytes([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]), 6),
bytes([0x04, 0x03, 0x02, 0x01, 0x08, 0x07, 0x06, 0x05,
0x04, 0x03, 0x02, 0x01, 0x08, 0x07, 0x06, 0x05,
0x04, 0x03, 0x02, 0x01, 0x08, 0x07, 0x06, 0x05]))
def test_zero(self):
self.assertEqual(XeCryptBnDw_SwapLeBe(
bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), 2),
bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
def test_one(self):
self.assertEqual(XeCryptBnDw_SwapLeBe(
bytes([0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), 2),
bytes([0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00]))
class TestXeCryptBnQw(TestCase):
def test_rsa(self):
rsa_obj = RSA.generate(1024)
p_old_bn = XeCryptBnQw(rsa_obj.p, 64)
self.assertEqual(XeCryptBnQw_toInt(p_old_bn), rsa_obj.p)
n_old_bn = XeCryptBnQw(rsa_obj.n, 128)
self.assertEqual(XeCryptBnQw_toInt(n_old_bn), rsa_obj.n)
def test_xersa(self):
rsa_obj = RSA.generate(1024)
xecrypt_obj = XeCrypt_RSA.from_rsa_obj(rsa_obj)
p_old_bn = XeCryptBnQw(xecrypt_obj.p, 64)
self.assertEqual(XeCryptBnQw_toInt(p_old_bn), xecrypt_obj.p)
n_old_bn = XeCryptBnQw(xecrypt_obj.n, 128)
self.assertEqual(XeCryptBnQw_toInt(n_old_bn), xecrypt_obj.n)
class TestXeCryptBnQwNeMod(TestCase):
def test_normal(self):
r3 = bytes([
0xf5, 0xbf, 0x46, 0xe4, 0x34, 0xf1, 0x73, 0x3c, 0xef, 0x6d, 0x46, 0x91, 0x3d, 0x75, 0x08, 0xcf,
0x3f, 0x09, 0x57, 0xcd, 0x7e, 0xe5, 0x1a, 0xa9, 0x7d, 0x83, 0x99, 0xf8, 0xf6, 0xe9, 0x58, 0x39,
0xa5, 0x37, 0xf0, 0x6c, 0x56, 0xa7, 0xef, 0x33, 0xe8, 0x11, 0x23, 0x26, 0x1b, 0x94, 0x9e, 0xc8,
0xcc, 0x30, 0x81, 0x7b, 0x21, 0x9f, 0x6c, 0x54, 0x39, 0x20, 0x35, 0x88, 0xbb, 0xe1, 0x98, 0xf6,
0x62, 0x6c, 0xa9, 0x28, 0x65, 0xd4, 0x70, 0xe8, 0x40, 0xb2, 0xd5, 0x3d, 0x94, 0x9d, 0x45, 0x27,
0x0c, 0xc9, 0xf4, 0x09, 0x24, 0x12, 0x08, 0x59, 0xaf, 0x7c, 0xd2, 0xf3, 0xc4, 0x9a, 0xcd, 0x3d,
0x6c, 0xe7, 0xe9, 0x6f, 0x22, 0x49, 0x18, 0x1e, 0x40, 0x72, 0x0b, 0x80, 0x47, 0x49, 0x52, 0x5f,
0x33, 0x70, 0x77, 0xff, 0xbb, 0xc4, 0xf4, 0xb5, 0xdb, 0xbb, 0x3f, 0x02, 0x55, 0x73, 0xe9, 0x34,
0xa1, 0x99, 0x04, 0xc0, 0x43, 0xef, 0x73, 0x4e, 0xc6, 0xaa, 0xc7, 0xe1, 0x07, 0x92, 0xb7, 0x18,
0x84, 0x2f, 0x0d, 0x44, 0xa1, 0x5f, 0xe6, 0x7c, 0x15, 0x43, 0xbe, 0xa4, 0x16, 0x15, 0x03, 0xd0,
0x12, 0x7a, 0x3a, 0xeb, 0xdc, 0xf1, 0x7e, 0x2d, 0x05, 0x5e, 0xb9, 0x21, 0xbe, 0x4e, 0xeb, 0xf9,
0x78, 0x69, 0x40, 0x7f, 0x38, 0xe8, 0x19, 0xe2, 0x41, 0x45, 0xbf, 0x0c, 0x5f, 0xa8, 0x8c, 0xdc,
0x1e, 0x52, 0x0e, 0x37, 0x7a, 0x39, 0xd7, 0x6a, 0x5e, 0x65, 0x06, 0x8c, 0x85, 0x5d, 0x0a, 0xc9,
0xb4, 0xfd, 0xfe, 0x31, 0x6e, 0x8c, 0x0d, 0x7d, 0xa1, 0xad, 0x20, 0x6e, 0xe9, 0x57, 0xa0, 0xd7,
0x36, 0x0b, 0x2a, 0x0b, 0x2e, 0x92, 0xb3, 0xdf, 0x18, 0x30, 0x6a, 0x93, 0xd5, 0x9e, 0x0c, 0xa9,
0xf1, 0xfe, 0x6a, 0x2d, 0x8b, 0x92, 0x7d, 0x39, 0x3c, 0xd0, 0x23, 0x2f, 0xd7, 0x70, 0x37, 0xf7,
0x4a, 0xf1, 0xee, 0x3e, 0xe1, 0x2c, 0xbc, 0x97, 0xd8, 0x2d, 0xf1, 0x5b, 0xb0, 0x2e, 0xea, 0xaf,
]) # Unchanged
r4 = bytes([
0x4a, 0xf1, 0xee, 0x3e, 0xe1, 0x2c, 0xbc, 0x97, 0xd8, 0x2d, 0xf1, 0x5b, 0xb0, 0x2e, 0xea, 0xaf,
0x0e, 0xec, 0x36, 0xd2, 0x0a, 0xf8, 0x57, 0x65, 0x2b, 0x49, 0x76, 0x1c, 0x40, 0x3e, 0x45, 0x2e,
0x30, 0x27, 0x90, 0xf4, 0x26, 0x42, 0x4a, 0x4c, 0xaf, 0x75, 0xe8, 0x98, 0x79, 0xa7, 0x3e, 0x44,
0x03, 0x03, 0xd9, 0x86, 0xb3, 0xef, 0x13, 0xdf, 0xcd, 0x10, 0xf1, 0xe5, 0x63, 0x6b, 0xcd, 0x2a,
0x30, 0xe4, 0xa2, 0x16, 0xcf, 0xb7, 0xd5, 0x7c, 0x1b, 0xb0, 0x49, 0xb0, 0x4f, 0xc4, 0xd5, 0x8c,
0x8d, 0xd8, 0x3c, 0x81, 0xe8, 0xa1, 0x73, 0xee, 0x76, 0xb8, 0xb8, 0x49, 0x4d, 0x0b, 0x37, 0x42,
0x70, 0x4e, 0x08, 0x43, 0x2d, 0xdb, 0xf2, 0x37, 0xfe, 0x82, 0xcf, 0xc2, 0x99, 0xc4, 0xa1, 0xa9,
0xbd, 0x8b, 0xc0, 0x0e, 0x88, 0x87, 0xcd, 0x82, 0x1c, 0x75, 0x82, 0x62, 0x16, 0x77, 0xc6, 0xfa,
0x19, 0x5c, 0x04, 0xa7, 0x98, 0x40, 0x05, 0xbf, 0xf1, 0x98, 0x9b, 0xd8, 0x00, 0x95, 0x4a, 0x2a,
0xea, 0x45, 0x5b, 0xb9, 0x89, 0x94, 0x9e, 0x07, 0xb8, 0xb5, 0x86, 0x6c, 0x9d, 0xb9, 0xc6, 0xef,
0xc0, 0x45, 0x7b, 0x3b, 0xaf, 0x32, 0x53, 0x63, 0xd1, 0x49, 0x58, 0xe0, 0xda, 0x9b, 0x96, 0x86,
0x1a, 0xfa, 0xac, 0xaf, 0x66, 0xee, 0x49, 0x0c, 0x37, 0x01, 0x60, 0x8f, 0xca, 0x80, 0x71, 0xfd,
0x2c, 0x35, 0x59, 0xd8, 0x3d, 0xfe, 0xf8, 0xef, 0x0f, 0x3d, 0x12, 0x28, 0xef, 0x6c, 0xf3, 0xad,
0x76, 0x33, 0x37, 0x4f, 0xae, 0x1d, 0x59, 0xe3, 0xdc, 0x14, 0x88, 0x46, 0xa6, 0x96, 0x4a, 0xb8,
0xee, 0x6b, 0x74, 0xf4, 0x46, 0x13, 0x04, 0x45, 0xb1, 0x96, 0xc3, 0xca, 0x98, 0xcb, 0xe0, 0x9b,
0x53, 0x5f, 0xd1, 0xf9, 0x78, 0x0a, 0x90, 0x46, 0xd7, 0xb3, 0x49, 0x2a, 0x5b, 0x89, 0x91, 0x1f,
0x65, 0x49, 0x51, 0xf7, 0xea, 0x7b, 0x30, 0x57, 0x01, 0x72, 0xe2, 0x45, 0xe8, 0x42, 0xea, 0xd3
])
r5 = bytes([
0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
])
r5out = bytes([
0xf5, 0xbf, 0x46, 0xe4, 0x34, 0xf1, 0x73, 0x3c, 0xef, 0x6d, 0x46, 0x91, 0x3d, 0x75, 0x08, 0xcf,
0x3f, 0x09, 0x57, 0xcd, 0x7e, 0xe5, 0x1a, 0xa9, 0x7d, 0x83, 0x99, 0xf8, 0xf6, 0xe9, 0x58, 0x39,
0xa5, 0x37, 0xf0, 0x6c, 0x56, 0xa7, 0xef, 0x33, 0xe8, 0x11, 0x23, 0x26, 0x1b, 0x94, 0x9e, 0xc8,
0xcc, 0x30, 0x81, 0x7b, 0x21, 0x9f, 0x6c, 0x54, 0x39, 0x20, 0x35, 0x88, 0xbb, 0xe1, 0x98, 0xf6,
0x62, 0x6c, 0xa9, 0x28, 0x65, 0xd4, 0x70, 0xe8, 0x40, 0xb2, 0xd5, 0x3d, 0x94, 0x9d, 0x45, 0x27,
0x0c, 0xc9, 0xf4, 0x09, 0x24, 0x12, 0x08, 0x59, 0xaf, 0x7c, 0xd2, 0xf3, 0xc4, 0x9a, 0xcd, 0x3d,
0x6c, 0xe7, 0xe9, 0x6f, 0x22, 0x49, 0x18, 0x1e, 0x40, 0x72, 0x0b, 0x80, 0x47, 0x49, 0x52, 0x5f,
0x33, 0x70, 0x77, 0xff, 0xbb, 0xc4, 0xf4, 0xb5, 0xdb, 0xbb, 0x3f, 0x02, 0x55, 0x73, 0xe9, 0x34,
0xa1, 0x99, 0x04, 0xc0, 0x43, 0xef, 0x73, 0x4e, 0xc6, 0xaa, 0xc7, 0xe1, 0x07, 0x92, 0xb7, 0x18,
0x84, 0x2f, 0x0d, 0x44, 0xa1, 0x5f, 0xe6, 0x7c, 0x15, 0x43, 0xbe, 0xa4, 0x16, 0x15, 0x03, 0xd0,
0x12, 0x7a, 0x3a, 0xeb, 0xdc, 0xf1, 0x7e, 0x2d, 0x05, 0x5e, 0xb9, 0x21, 0xbe, 0x4e, 0xeb, 0xf9,
0x78, 0x69, 0x40, 0x7f, 0x38, 0xe8, 0x19, 0xe2, 0x41, 0x45, 0xbf, 0x0c, 0x5f, 0xa8, 0x8c, 0xdc,
0x1e, 0x52, 0x0e, 0x37, 0x7a, 0x39, 0xd7, 0x6a, 0x5e, 0x65, 0x06, 0x8c, 0x85, 0x5d, 0x0a, 0xc9,
0xb4, 0xfd, 0xfe, 0x31, 0x6e, 0x8c, 0x0d, 0x7d, 0xa1, 0xad, 0x20, 0x6e, 0xe9, 0x57, 0xa0, 0xd7,
0x36, 0x0b, 0x2a, 0x0b, 0x2e, 0x92, 0xb3, 0xdf, 0x18, 0x30, 0x6a, 0x93, 0xd5, 0x9e, 0x0c, 0xa9,
0xf1, 0xfe, 0x6a, 0x2d, 0x8b, 0x92, 0x7d, 0x39, 0x3c, 0xd0, 0x23, 0x2f, 0xd7, 0x70, 0x37, 0xf7,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
])
self.assertEqual(XeCryptBnQwNeMod(r3, r4, 0x20, 0x20), r5out)
def test_normal_2(self):
r3 = bytes([
0x93, 0x25, 0x50, 0x3e, 0xa5, 0xd4, 0x78, 0x10, 0x49, 0x8b, 0x32, 0x53, 0x33, 0x49, 0xf6, 0xbf,
0x4d, 0xf5, 0x1e, 0xaf, 0x72, 0xa8, 0x0b, 0x35, 0xc2, 0x53, 0xcd, 0xd8, 0x67, 0x71, 0xd2, 0x5a,
0x38, 0xf9, 0x65, 0xdd, 0x8f, 0x2c, 0xfd, 0xf9, 0x02, 0x2b, 0x3e, 0xeb, 0x9d, 0xc5, 0x0d, 0x9c,
0x72, 0x5f, 0x53, 0x7f, 0xf6, 0x84, 0x6d, 0x25, 0x88, 0x27, 0x55, 0x47, 0xad, 0x90, 0x03, 0x84,
0xb4, 0xdc, 0xe8, 0x16, 0x70, 0x8f, 0xdb, 0xeb, 0xc5, 0x7a, 0xa7, 0xc2, 0xcd, 0xe8, 0xf1, 0xc8,
0x7e, 0x4f, 0xea, 0xaf, 0xdf, 0xe8, 0x1b, 0xa8, 0xbe, 0xb4, 0xb1, 0xba, 0x00, 0x3e, 0xb5, 0x94,
0x6c, 0x1c, 0xff, 0x71, 0x53, 0x8f, 0x1a, 0x31, 0x32, 0xab, 0x34, 0xa7, 0x13, 0xce, 0x74, 0x37,
0x6b, 0xf8, 0x2a, 0xa3, 0xde, 0xe7, 0x6d, 0x3d, 0x22, 0x8c, 0x88, 0xf4, 0xc5, 0x74, 0x39, 0x92,
0x0c, 0xed, 0x7f, 0x75, 0x65, 0xc2, 0xcb, 0x75, 0xbf, 0x85, 0x20, 0x33, 0x70, 0xeb, 0x37, 0x80,
0x43, 0xc8, 0x17, 0xd9, 0x37, 0xe6, 0x08, 0x0c, 0xfd, 0xb1, 0x7d, 0x94, 0x87, 0x36, 0x15, 0xf8,
0x3c, 0xeb, 0x73, 0x45, 0x31, 0xa5, 0xde, 0xca, 0xf3, 0xaa, 0x02, 0xed, 0xfa, 0x9f, 0xb1, 0xb9,
0x9e, 0xc4, 0xb1, 0x2b, 0x4f, 0x5e, 0x6f, 0xda, 0xc6, 0xe9, 0x29, 0x8f, 0xc0, 0x73, 0xee, 0x7d,
0xf3, 0x05, 0x9b, 0xbb, 0xc9, 0x9f, 0x1e, 0x8c, 0x85, 0x8a, 0x39, 0xa3, 0xd6, 0xc7, 0xe7, 0x33,
0xe4, 0xfb, 0x5a, 0x29, 0xd4, 0x2e, 0x5c, 0x0e, 0xf5, 0x72, 0x9a, 0xe1, 0x2e, 0xb4, 0x86, 0xaf,
0xae, 0x07, 0x81, 0xa7, 0x0e, 0xb3, 0x76, 0xd2, 0xa9, 0xcf, 0xc3, 0xe6, 0x65, 0x89, 0x27, 0x4b,
0xf0, 0x7f, 0x2f, 0xe1, 0x2d, 0x72, 0x96, 0xc0, 0xeb, 0xdf, 0xbf, 0x66, 0x98, 0xb4, 0x9e, 0x3e,
0x44, 0x2b, 0x95, 0x32, 0x0d, 0x2a, 0x7b, 0x5f, 0x63, 0x94, 0x84, 0x6e, 0x71, 0x7a, 0xed, 0x76,
0x10, 0xe0, 0xd6, 0xba, 0x49, 0x9c, 0x59, 0x1e, 0x62, 0xbf, 0x29, 0xfb, 0xca, 0xc2, 0x39, 0x42,
0xff, 0x76, 0x5f, 0x04, 0x47, 0xbf, 0x60, 0x43, 0x8d, 0x6d, 0x09, 0x92, 0x51, 0x2f, 0x63, 0x9f,
0x62, 0xe4, 0xd2, 0x95, 0x25, 0x19, 0xa4, 0xd8, 0x93, 0xe1, 0xf2, 0x91, 0x9b, 0xdd, 0x45, 0x90,
0xe6, 0x4a, 0xe0, 0x85, 0x01, 0x89, 0x05, 0xde, 0xcf, 0xeb, 0xe0, 0x62, 0x31, 0xf1, 0xeb, 0xc4,
0x00, 0xf4, 0x85, 0xae, 0xee, 0xa5, 0x9b, 0x3d, 0xbf, 0x32, 0xaa, 0x51, 0xaa, 0x5c, 0xf7, 0xf3,
0x27, 0x47, 0xa9, 0x29, 0xa6, 0x87, 0xa8, 0x15, 0xe9, 0xe2, 0x2f, 0x5a, 0xaf, 0xd3, 0x4f, 0xb1,
0x6f, 0x47, 0x3c, 0x24, 0x3d, 0xc9, 0x83, 0x4e, 0x27, 0x51, 0x41, 0x0f, 0x44, 0xb8, 0xf5, 0x88,
0xfa, 0x1e, 0xeb, 0x26, 0x80, 0x99, 0x08, 0x24, 0x37, 0xed, 0x77, 0x75, 0xa6, 0x8b, 0x45, 0x25,
0x85, 0x27, 0x85, 0x7b, 0xa5, 0x3c, 0x41, 0x84, 0xb6, 0x42, 0xc7, 0x01, 0xeb, 0x40, 0x2e, 0x3c,
0x3a, 0xf4, 0xf2, 0x27, 0xfe, 0x2c, 0x40, 0x6f, 0x79, 0x17, 0xd8, 0x47, 0x6e, 0x87, 0x34, 0x55,
0x14, 0xc1, 0x47, 0x24, 0x0b, 0x2b, 0xa1, 0x20, 0x1f, 0x35, 0x4e, 0x5e, 0x37, 0xff, 0x6a, 0x3e,
0x3e, 0x67, 0xe4, 0xa1, 0xb0, 0x0b, 0xd4, 0x26, 0xe6, 0x45, 0x7c, 0xee, 0x68, 0xa9, 0x87, 0xd5,
0x74, 0xbf, 0x16, 0x21, 0xb7, 0xc0, 0x71, 0xea, 0x8c, 0xf3, 0x19, 0x1e, 0x52, 0x7a, 0x69, 0xf4,
0xd0, 0xeb, 0x45, 0xa8, 0xa4, 0x7a, 0x22, 0x8f, 0x24, 0xc0, 0x2a, 0x80, 0x38, 0xd2, 0x88, 0x82,
0x4b, 0x5f, 0xa6, 0x8b, 0xf2, 0x11, 0x40, 0x07, 0x15, 0xcb, 0x22, 0xa1, 0xd7, 0x0e, 0x1c, 0x25,
]) # Unchanged
r4 = bytes([
0x4a, 0xf1, 0xee, 0x3e, 0xe1, 0x2c, 0xbc, 0x97, 0xd8, 0x2d, 0xf1, 0x5b, 0xb0, 0x2e, 0xea, 0xaf,
0x0e, 0xec, 0x36, 0xd2, 0x0a, 0xf8, 0x57, 0x65, 0x2b, 0x49, 0x76, 0x1c, 0x40, 0x3e, 0x45, 0x2e,
0x30, 0x27, 0x90, 0xf4, 0x26, 0x42, 0x4a, 0x4c, 0xaf, 0x75, 0xe8, 0x98, 0x79, 0xa7, 0x3e, 0x44,
0x03, 0x03, 0xd9, 0x86, 0xb3, 0xef, 0x13, 0xdf, 0xcd, 0x10, 0xf1, 0xe5, 0x63, 0x6b, 0xcd, 0x2a,
0x30, 0xe4, 0xa2, 0x16, 0xcf, 0xb7, 0xd5, 0x7c, 0x1b, 0xb0, 0x49, 0xb0, 0x4f, 0xc4, 0xd5, 0x8c,
0x8d, 0xd8, 0x3c, 0x81, 0xe8, 0xa1, 0x73, 0xee, 0x76, 0xb8, 0xb8, 0x49, 0x4d, 0x0b, 0x37, 0x42,
0x70, 0x4e, 0x08, 0x43, 0x2d, 0xdb, 0xf2, 0x37, 0xfe, 0x82, 0xcf, 0xc2, 0x99, 0xc4, 0xa1, 0xa9,
0xbd, 0x8b, 0xc0, 0x0e, 0x88, 0x87, 0xcd, 0x82, 0x1c, 0x75, 0x82, 0x62, 0x16, 0x77, 0xc6, 0xfa,
0x19, 0x5c, 0x04, 0xa7, 0x98, 0x40, 0x05, 0xbf, 0xf1, 0x98, 0x9b, 0xd8, 0x00, 0x95, 0x4a, 0x2a,
0xea, 0x45, 0x5b, 0xb9, 0x89, 0x94, 0x9e, 0x07, 0xb8, 0xb5, 0x86, 0x6c, 0x9d, 0xb9, 0xc6, 0xef,
0xc0, 0x45, 0x7b, 0x3b, 0xaf, 0x32, 0x53, 0x63, 0xd1, 0x49, 0x58, 0xe0, 0xda, 0x9b, 0x96, 0x86,
0x1a, 0xfa, 0xac, 0xaf, 0x66, 0xee, 0x49, 0x0c, 0x37, 0x01, 0x60, 0x8f, 0xca, 0x80, 0x71, 0xfd,
0x2c, 0x35, 0x59, 0xd8, 0x3d, 0xfe, 0xf8, 0xef, 0x0f, 0x3d, 0x12, 0x28, 0xef, 0x6c, 0xf3, 0xad,
0x76, 0x33, 0x37, 0x4f, 0xae, 0x1d, 0x59, 0xe3, 0xdc, 0x14, 0x88, 0x46, 0xa6, 0x96, 0x4a, 0xb8,
0xee, 0x6b, 0x74, 0xf4, 0x46, 0x13, 0x04, 0x45, 0xb1, 0x96, 0xc3, 0xca, 0x98, 0xcb, 0xe0, 0x9b,
0x53, 0x5f, 0xd1, 0xf9, 0x78, 0x0a, 0x90, 0x46, 0xd7, 0xb3, 0x49, 0x2a, 0x5b, 0x89, 0x91, 0x1f,
0x65, 0x49, 0x51, 0xf7, 0xea, 0x7b, 0x30, 0x57, 0x01, 0x72, 0xe2, 0x45, 0xe8, 0x42, 0xea, 0xd3
])
r5 = bytes([
0xe8, 0x23, 0x02, 0xdc, 0xb7, 0x69, 0x2a, 0xbc, 0x4d, 0x8c, 0xae, 0x8b, 0x45, 0x1f, 0x2f, 0xa9,
0xc7, 0x5e, 0xfe, 0xb7, 0x15, 0x7b, 0x09, 0xac, 0x84, 0x92, 0x21, 0x5a, 0xfa, 0x22, 0x51, 0x87,
0xe4, 0xf0, 0xa2, 0x38, 0x87, 0x86, 0x28, 0x50, 0x29, 0xd2, 0x38, 0x47, 0x4f, 0x97, 0x37, 0x8c,
0x34, 0xb3, 0x0b, 0x79, 0xa9, 0xc2, 0x86, 0x8f, 0xfb, 0xb5, 0xfe, 0x0e, 0x51, 0x0e, 0x1d, 0x9b,
0xfb, 0xb7, 0x5a, 0x45, 0x10, 0x1a, 0xdb, 0x25, 0x46, 0x9d, 0x7b, 0x0a, 0xc0, 0x53, 0x25, 0xec,
0x7e, 0x18, 0x1a, 0xbe, 0xa4, 0xe6, 0xf3, 0xdb, 0x63, 0x58, 0x5b, 0xeb, 0xc0, 0xc0, 0xfd, 0x1f,
0x74, 0x5c, 0x31, 0x78, 0x7e, 0xbe, 0x17, 0xc2, 0xe5, 0x54, 0x0b, 0x1f, 0x75, 0x70, 0xba, 0x17,
0x67, 0x5b, 0x0b, 0x16, 0xa1, 0x66, 0x5d, 0x30, 0x41, 0x65, 0x03, 0x62, 0xac, 0x3e, 0x84, 0x5b,
0xef, 0x7f, 0xf8, 0x0a, 0x90, 0x7c, 0xdb, 0xb9, 0x34, 0x67, 0xf2, 0xaf, 0x00, 0xef, 0x49, 0x33,
0x6b, 0xf0, 0x07, 0xf0, 0x8d, 0x6e, 0x66, 0x30, 0x18, 0xa8, 0x9e, 0x53, 0xb4, 0x3b, 0xc5, 0xfb,
0xbc, 0x31, 0x00, 0xb9, 0x0b, 0x6d, 0x9e, 0x44, 0xbd, 0x6f, 0x20, 0x6f, 0x29, 0x84, 0xb5, 0x15,
0x31, 0x02, 0x24, 0x39, 0x17, 0xf7, 0x1c, 0x35, 0xc7, 0x0b, 0x7a, 0x70, 0x21, 0xe6, 0x53, 0x80,
0x56, 0x78, 0x27, 0x3c, 0xa0, 0xaa, 0x86, 0x4f, 0x6d, 0xfe, 0x5b, 0x3e, 0x09, 0x8c, 0xb3, 0x9e,
0x5d, 0x0a, 0x0a, 0x17, 0xaf, 0xf1, 0x19, 0x60, 0x6b, 0xff, 0x2f, 0xc8, 0x55, 0x9c, 0x6c, 0x08,
0xe2, 0x72, 0x7b, 0xc9, 0x23, 0x6a, 0xb6, 0x78, 0xa2, 0xb4, 0x9d, 0x3f, 0xee, 0x60, 0xb7, 0xad,
0x35, 0xcc, 0xbb, 0x6c, 0x91, 0xb0, 0xc4, 0xe9, 0x5b, 0xbe, 0x0f, 0x5a, 0x9a, 0x9f, 0xb9, 0x90,
0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x40, 0x80, 0x23, 0x03, 0x00,
0x00, 0x00, 0x00, 0x00, 0x70, 0x04, 0xfd, 0x60, 0x82, 0x01, 0x94, 0x20, 0x78, 0x1e, 0x12, 0xc0,
0x70, 0x04, 0xff, 0x50, 0x81, 0x5f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x01, 0x80,
0x28, 0x00, 0x00, 0x82, 0x80, 0x09, 0xad, 0xbc, 0x80, 0x09, 0x88, 0x74, 0x80, 0x0a, 0x18, 0xe0,
0x70, 0x04, 0xfe, 0x00, 0x78, 0x1a, 0xe0, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x82, 0x01, 0x02, 0xd0,
0x70, 0x04, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x23, 0x01, 0x48,
0x00, 0x00, 0x00, 0x00, 0x78, 0x1e, 0x12, 0xd0, 0xff, 0xff, 0xff, 0xff, 0x81, 0x5f, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x78, 0x1e, 0x0f, 0x80, 0x81, 0x75, 0x93, 0xa0, 0x80, 0x0a, 0x18, 0xe0,
0x70, 0x04, 0xfe, 0x60, 0x81, 0x75, 0xdc, 0x30, 0x00, 0x00, 0x00, 0x00, 0x82, 0x01, 0x02, 0xd0,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b,
0x00, 0x00, 0x00, 0x00, 0x78, 0x1e, 0x29, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x23, 0x01, 0x48,
])
r5out = bytes([
0x79, 0x67, 0x0d, 0x33, 0x3b, 0x82, 0x4d, 0x68, 0x66, 0x5a, 0x26, 0x47, 0x5c, 0x06, 0x13, 0xf6,
0x36, 0x1a, 0x0a, 0x85, 0x3b, 0xa8, 0x16, 0x1f, 0xef, 0x06, 0xe2, 0x26, 0x3b, 0x28, 0x45, 0x8c,
0xe3, 0xab, 0x11, 0x58, 0x43, 0x1f, 0xe5, 0x05, 0x9e, 0x27, 0x8a, 0xad, 0xc0, 0xc7, 0x66, 0x5f,
0x17, 0xea, 0x78, 0x90, 0x9c, 0xb6, 0xea, 0x09, 0xce, 0xe5, 0x22, 0xa2, 0xca, 0xe0, 0x72, 0xe1,
0xd8, 0xe7, 0x04, 0xc0, 0x1f, 0x57, 0x6c, 0x5e, 0xb2, 0x5d, 0xe0, 0x06, 0x16, 0x5b, 0xf7, 0x42,
0x77, 0x57, 0x20, 0x5b, 0xb4, 0x05, 0x26, 0xef, 0x07, 0x52, 0x35, 0xc8, 0xe8, 0x06, 0x93, 0xaf,
0xca, 0x04, 0xd7, 0x04, 0xe6, 0x0d, 0xd4, 0xf6, 0x6a, 0x97, 0x6f, 0x95, 0x17, 0x48, 0x63, 0xe9,
0xb8, 0x57, 0xa8, 0x1e, 0x88, 0x68, 0xa6, 0x0c, 0xbd, 0x5a, 0xc5, 0x40, 0x8b, 0x67, 0x4f, 0xef,
0x90, 0x61, 0x3b, 0x15, 0xc3, 0xb2, 0xd8, 0x4d, 0x2c, 0x46, 0x93, 0x0e, 0x11, 0xa4, 0x73, 0x03,
0x69, 0x69, 0xa4, 0x8a, 0xa8, 0x0a, 0x99, 0x88, 0x97, 0xdc, 0xcd, 0xbe, 0x75, 0xdb, 0xce, 0xb9,
0x15, 0x47, 0x43, 0x2d, 0x05, 0x55, 0xab, 0x94, 0xd1, 0x7d, 0xda, 0x52, 0x2a, 0x02, 0x01, 0x69,
0x2d, 0xfd, 0xa6, 0x2c, 0x2f, 0xbb, 0xba, 0x7d, 0x39, 0xf8, 0x86, 0x26, 0xcd, 0x4a, 0xb5, 0x03,
0x2c, 0xa6, 0x87, 0x99, 0xca, 0x8f, 0xc3, 0x58, 0x9a, 0x06, 0xb7, 0x80, 0xad, 0xbb, 0x21, 0x55,
0x49, 0x66, 0xb2, 0x38, 0xc8, 0x96, 0xfc, 0xbf, 0x74, 0x2f, 0x95, 0x9d, 0xbb, 0xf7, 0x58, 0x32,
0x69, 0x3d, 0x87, 0x57, 0xb2, 0x76, 0xef, 0x25, 0x42, 0xf1, 0x2d, 0x6e, 0x27, 0x57, 0x6e, 0xa0,
0x1f, 0x5e, 0xd7, 0xfb, 0x08, 0x79, 0xd3, 0x28, 0x97, 0xa3, 0x1b, 0xaa, 0xd0, 0x1e, 0x24, 0x60,
0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x40, 0x80, 0x23, 0x03, 0x00,
0x00, 0x00, 0x00, 0x00, 0x70, 0x04, 0xfd, 0x60, 0x82, 0x01, 0x94, 0x20, 0x78, 0x1e, 0x12, 0xc0,
0x70, 0x04, 0xff, 0x50, 0x81, 0x5f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x01, 0x80,
0x28, 0x00, 0x00, 0x82, 0x80, 0x09, 0xad, 0xbc, 0x80, 0x09, 0x88, 0x74, 0x80, 0x0a, 0x18, 0xe0,
0x70, 0x04, 0xfe, 0x00, 0x78, 0x1a, 0xe0, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x82, 0x01, 0x02, 0xd0,
0x70, 0x04, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x23, 0x01, 0x48,
0x00, 0x00, 0x00, 0x00, 0x78, 0x1e, 0x12, 0xd0, 0xff, 0xff, 0xff, 0xff, 0x81, 0x5f, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x78, 0x1e, 0x0f, 0x80, 0x81, 0x75, 0x93, 0xa0, 0x80, 0x0a, 0x18, 0xe0,
0x70, 0x04, 0xfe, 0x60, 0x81, 0x75, 0xdc, 0x30, 0x00, 0x00, 0x00, 0x00, 0x82, 0x01, 0x02, 0xd0,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b,
0x00, 0x00, 0x00, 0x00, 0x78, 0x1e, 0x29, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x23, 0x01, 0x48,
])
self.assertEqual(XeCryptBnQwNeMod(r3, r4, 0x40, 0x20), r5out)
def test_normal_3(self):
r3 = bytes([
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
])
r4 = bytes([
])
r5 = bytes([
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50, 0x6c, 0x6f, 0x6f, 0x50,
])
r5out = bytes([
])
self.assertEqual(XeCryptBnQwNeMod(r3, r4, r5, 0x12, 0x10))
def test_normal_4(self):
r3 = bytes([
])
r4 = bytes([
])
r5 = bytes([
])
r5out = bytes([
])
self.assertEqual(XeCryptBnQwNeMod(r3, r4, r5, 0x12, 0x10))
| gpl-3.0 | -194,653,558,820,259,620 | 64.93186 | 120 | 0.585542 | false |
joseamaya/tambox | contabilidad/urls.py | 1 | 4452 | from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from contabilidad.views import Tablero, ListadoCuentasContables, \
CargarCuentasContables, ListadoTiposDocumentos, CrearTipoDocumento, \
EliminarTipoDocumento, DetalleTipoDocumento, ModificarTipoDocumento, \
ReporteExcelCuentasContables, ModificarCuentaContable, CrearCuentaContable, \
DetalleCuentaContable, CrearImpuesto, DetalleImpuesto, ListadoImpuestos, \
ModificarImpuesto, CrearConfiguracion, ModificarConfiguracion, \
ListadoFormasPago, CrearFormaPago, ModificarFormaPago, DetalleFormaPago, \
ReporteExcelFormasPago, EliminarFormaPago, ReporteExcelTiposDocumentos, \
CargarTiposDocumentos, ListadoTiposCambio, CrearTipoCambio, DetalleTipoCambio, ModificarTipoCambio, \
ObtenerTipoCambio, ListadoTiposExistencias, CargarTiposExistencias
urlpatterns = [
url(r'^tablero/$', login_required(Tablero.as_view()), name="tablero"),
url(r'^formas_pago/$', login_required(ListadoFormasPago.as_view()), name="formas_pago"),
url(r'^crear_forma_pago/$', login_required(CrearFormaPago.as_view()), name="crear_forma_pago"),
url(r'^crear_tipo_cambio/$', login_required(CrearTipoCambio.as_view()), name="crear_tipo_cambio"),
url(r'^modificar_forma_pago/(?P<pk>.+)/$', login_required(ModificarFormaPago.as_view()),
name="modificar_forma_pago"),
url(r'^detalle_forma_pago/(?P<pk>.+)/$', login_required(DetalleFormaPago.as_view()), name="detalle_forma_pago"),
url(r'^detalle_tipo_cambio/(?P<pk>.+)/$', login_required(DetalleTipoCambio.as_view()), name="detalle_tipo_cambio"),
url(r'^maestro_formas_pago_excel/$', login_required(ReporteExcelFormasPago.as_view()),
name="maestro_formas_pago_excel"),
url(r'^eliminar_forma_pago/$', login_required(EliminarFormaPago.as_view()), name="eliminar_forma_pago"),
url(r'^cuentas_contables/$', (ListadoCuentasContables.as_view()), name="cuentas_contables"),
url(r'^tipos_existencias/$', (ListadoTiposExistencias.as_view()), name="tipos_existencias"),
url(r'^configuracion/$', (CrearConfiguracion.as_view()), name="configuracion"),
url(r'^tipos_documentos/$', (ListadoTiposDocumentos.as_view()), name="tipos_documentos"),
url(r'^tipos_cambio/$', (ListadoTiposCambio.as_view()), name="tipos_cambio"),
url(r'^impuestos/$', (ListadoImpuestos.as_view()), name="impuestos"),
url(r'^detalle_tipo_documento/(?P<pk>.+)/$', (DetalleTipoDocumento.as_view()), name="detalle_tipo_documento"),
url(r'^detalle_cuenta_contable/(?P<pk>.+)/$', (DetalleCuentaContable.as_view()), name="detalle_cuenta_contable"),
url(r'^detalle_impuesto/(?P<pk>.+)/$', (DetalleImpuesto.as_view()), name="detalle_impuesto"),
url(r'^cargar_cuentas_contables/$', (CargarCuentasContables.as_view()), name="cargar_cuentas_contables"),
url(r'^cargar_tipos_documento/$', (CargarTiposDocumentos.as_view()), name="cargar_tipos_documento"),
url(r'^crear_tipo_documento/$', (CrearTipoDocumento.as_view()), name="crear_tipo_documento"),
url(r'^crear_impuesto/$', (CrearImpuesto.as_view()), name="crear_impuesto"),
url(r'^crear_cuenta_contable/$', (CrearCuentaContable.as_view()), name="crear_cuenta_contable"),
url(r'^cargar_tipos_existencias/$', (CargarTiposExistencias.as_view()), name="cargar_tipos_existencias"),
url(r'^eliminar_tipo_documento/$', (EliminarTipoDocumento.as_view()), name="eliminar_tipo_documento"),
url(r'^modificar_tipo_documento/(?P<pk>.+)/$', (ModificarTipoDocumento.as_view()), name="modificar_tipo_documento"),
url(r'^modificar_tipo_cambio/(?P<pk>.+)/$', (ModificarTipoCambio.as_view()), name="modificar_tipo_cambio"),
url(r'^modificar_cuenta_contable/(?P<pk>.+)/$', (ModificarCuentaContable.as_view()),
name="modificar_cuenta_contable"),
url(r'^modificar_impuesto/(?P<pk>.+)/$', (ModificarImpuesto.as_view()), name="modificar_impuesto"),
url(r'^modificar_configuracion/(?P<pk>.+)/$', (ModificarConfiguracion.as_view()), name="modificar_configuracion"),
url(r'^maestro_cuentas_contables_excel/$', (ReporteExcelCuentasContables.as_view()),
name="maestro_cuentas_contables_excel"),
url(r'^maestro_tipos_documentos_excel/$', (ReporteExcelTiposDocumentos.as_view()),
name="maestro_tipos_documentos_excel"),
url(r'^obtener_tipo_cambio/$', (ObtenerTipoCambio.as_view()), name="obtener_tipo_cambio"),
]
| gpl-3.0 | 7,144,240,158,981,100,000 | 82 | 120 | 0.707772 | false |
kyubifire/softlayer-python | SoftLayer/CLI/user/permissions.py | 1 | 1893 | """List A users permissions."""
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
@click.command()
@click.argument('identifier')
@environment.pass_env
def cli(env, identifier):
"""User Permissions. TODO change to list all permissions, and which users have them"""
mgr = SoftLayer.UserManager(env.client)
user_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'username')
object_mask = "mask[id, permissions, isMasterUserFlag, roles]"
user = mgr.get_user(user_id, object_mask)
all_permissions = mgr.get_all_permissions()
user_permissions = perms_to_dict(user['permissions'])
if user['isMasterUserFlag']:
click.secho('This account is the Master User and has all permissions enabled', fg='green')
env.fout(roles_table(user))
env.fout(permission_table(user_permissions, all_permissions))
def perms_to_dict(perms):
"""Takes a list of permissions and transforms it into a dictionary for better searching"""
permission_dict = {}
for perm in perms:
permission_dict[perm['keyName']] = True
return permission_dict
def permission_table(user_permissions, all_permissions):
"""Creates a table of available permissions"""
table = formatting.Table(['Description', 'KeyName', 'Assigned'])
table.align['KeyName'] = 'l'
table.align['Description'] = 'l'
table.align['Assigned'] = 'l'
for perm in all_permissions:
assigned = user_permissions.get(perm['keyName'], False)
table.add_row([perm['name'], perm['keyName'], assigned])
return table
def roles_table(user):
"""Creates a table for a users roles"""
table = formatting.Table(['id', 'Role Name', 'Description'])
for role in user['roles']:
table.add_row([role['id'], role['name'], role['description']])
return table
| mit | -6,047,882,941,413,080,000 | 32.210526 | 98 | 0.688325 | false |
dracidoupe/graveyard | ddcz/migrations/0018_auto_20180617_1740.py | 1 | 2511 | # Generated by Django 2.0.2 on 2018-06-17 15:40
import ddcz.models.magic
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("ddcz", "0017_auto_20180617_1604"),
]
operations = [
migrations.AddField(
model_name="gallerypicture",
name="hodnota_hlasovani",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="gallerypicture",
name="pocet_hlasujicich",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="gallerypicture",
name="precteno",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="gallerypicture",
name="tisknuto",
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name="gallerypicture",
name="autmail",
field=ddcz.models.magic.MisencodedCharField(
blank=True, max_length=50, null=True
),
),
migrations.AlterField(
model_name="gallerypicture",
name="autor",
field=ddcz.models.magic.MisencodedCharField(
blank=True, max_length=50, null=True
),
),
migrations.AlterField(
model_name="gallerypicture",
name="datum",
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name="gallerypicture",
name="id",
field=models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="gallerypicture",
name="schvaleno",
field=ddcz.models.magic.MisencodedCharField(
choices=[("a", "Schváleno"), ("n", "Neschváleno")], max_length=1
),
),
migrations.AlterField(
model_name="gallerypicture",
name="zdrojmail",
field=ddcz.models.magic.MisencodedCharField(
blank=True, max_length=30, null=True
),
),
migrations.AlterField(
model_name="gallerypicture",
name="pochvez",
field=ddcz.models.magic.MisencodedIntegerField(max_length=5),
),
]
| mit | 5,232,469,323,310,818,000 | 30.759494 | 87 | 0.540454 | false |
actuino/unicorn-display | display-client/client.py | 1 | 3700 | #!/usr/bin/env python
# Unicorn Jauge Display Client
import json
import os
import sys, getopt
# pip install socketIO-client
# https://github.com/invisibleroads/socketIO-client
from socketIO_client import SocketIO, LoggingNamespace
current_page = 0;
# The socker server Hostname
DISPLAY_SERVER_HOST = 'localhost'
if 'DISPLAY_SERVER_HOST' in os.environ:
DISPLAY_SERVER_HOST = os.environ['DISPLAY_SERVER_HOST']
# The socker server Port
DISPLAY_SERVER_PORT = 80
if 'DISPLAY_SERVER_PORT' in os.environ:
DISPLAY_SERVER_PORT = os.environ['DISPLAY_SERVER_PORT']
# The Physical display name
DISPLAY_NAME = 'Astra' # Default unicorn name http://www.myangelcardreadings.com/unicornnames.html
# TODO : random unicorn name according to serial.
if 'DISPLAY_NAME' in os.environ:
DISPLAY_NAME = os.environ['DISPLAY_NAME']
CONFIG_FILE_NAME = 'res/config.json'
if 'CONFIG_FILE_NAME' in os.environ:
CONFIG_FILE_NAME = os.environ['CONFIG_FILE_NAME']
import unicorndisplay
def main(argv):
global CONFIG_FILE_NAME
try:
opts, args = getopt.getopt(argv,"hc:",["help","config="])
except getopt.GetoptError:
print 'client.py -c <configfile> '
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
print 'client.py -c <configfile> '
sys.exit()
elif opt in ("-c", "--config"):
CONFIG_FILE_NAME = arg
print 'Config file is "', CONFIG_FILE_NAME
unicorndisplay.init(CONFIG_FILE_NAME)
if __name__ == "__main__":
main(sys.argv[1:])
# From http://raspberrypi.stackexchange.com/questions/2086/how-do-i-get-the-serial-number
def getserial():
# Extract serial from cpuinfo file
cpuserial = "0000000000000000"
try:
f = open('/proc/cpuinfo','r')
for line in f:
if line[0:6]=='Serial':
cpuserial = line[10:26]
f.close()
except:
cpuserial = "ERROR000000000"
return cpuserial
def on_connect():
print "Connected"
socketIO.emit('name','{"Serial":"'+getserial()+'", "Name":"'+DISPLAY_NAME+'"}')
def on_file(*args):
unicorndisplay.receive_file(args[0])
def send_current_page():
page = {'Page':current_page,'Serial':getserial(),'Name':DISPLAY_NAME,'Channel':unicorndisplay.get_current_channel()}
socketIO.emit('page',json.dumps(page))
def on_command(*args):
global current_page
try:
# ? message for us ?
print "receive command: ",args[0]["Command"]
if DISPLAY_NAME != args[0]["Name"]:
print "Ignored Command"
return
command = args[0]["Command"]
if command == "NextPage" or command == 'LeftGesture':
current_page = unicorndisplay.next_page()
send_current_page()
#socketIO.emit('page','{"Page":"'+str(current_page)+'","Serial":"'+getserial()+'", "Name":"'+DISPLAY_NAME+'"}')
elif command == "PreviousPage" or command == 'RightGesture':
current_page = unicorndisplay.previous_page()
send_current_page()
#socketIO.emit('page','{"Page":"'+str(current_page)+'","Serial":"'+getserial()+'", "Name":"'+DISPLAY_NAME+'"}')
else:
print 'Unknown Command'
except Exception as e:
s = str(e)
print "Bad message" ,s
print DISPLAY_NAME,getserial(),"Connecting to",DISPLAY_SERVER_HOST, DISPLAY_SERVER_PORT
socketIO = SocketIO(DISPLAY_SERVER_HOST, DISPLAY_SERVER_PORT)
socketIO.on('connect', on_connect)
socketIO.on('file', on_file)
socketIO.on('command', on_command)
send_current_page()
# TODO : Manage exceptions and reconnect
while 1:
socketIO.wait(60)
socketIO.emit('ping')
| mit | 1,647,634,160,931,848,400 | 28.6 | 125 | 0.628919 | false |
magenta-aps/mox | oio_rest/oio_rest/validate.py | 1 | 13865 | # Copyright (C) 2015-2019 Magenta ApS, https://magenta.dk.
# Contact: [email protected].
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
import jsonschema
from . import settings
# A very nice reference explaining the JSON schema syntax can be found
# here: https://spacetelescope.github.io/understanding-json-schema/
# JSON schema types
BOOLEAN = {'type': 'boolean'}
INTEGER = {'type': 'integer'}
STRING = {'type': 'string'}
def _generate_schema_array(items, maxItems=None):
schema_array = {
'type': 'array',
'items': items
}
if maxItems:
schema_array['maxItems'] = maxItems
return schema_array
def _generate_schema_object(properties, required, kwargs=None):
schema_obj = {
'type': 'object',
'properties': properties,
'additionalProperties': False
}
# passing an empty array causes the schema to fail validation...
if required:
schema_obj['required'] = required
if kwargs:
schema_obj.update(kwargs)
return schema_obj
# Mapping from DATABASE_STRUCTURE types to JSON schema types
TYPE_MAP = {
'aktoerattr': _generate_schema_object(
{
'accepteret': STRING,
'obligatorisk': STRING,
'repraesentation_uuid': {'$ref': '#/definitions/uuid'},
},
['accepteret', 'obligatorisk', 'repraesentation_uuid']
),
'boolean': BOOLEAN,
'date': STRING,
'int': INTEGER,
'interval(0)': STRING,
'journaldokument': _generate_schema_object(
{
'dokumenttitel': STRING,
'offentlighedundtaget': {
'$ref': '#/definitions/offentlighedundtaget'}
},
['dokumenttitel', 'offentlighedundtaget']
),
'journalnotat': _generate_schema_object(
{
'titel': STRING,
'notat': STRING,
'format': STRING,
},
['titel', 'notat', 'format']
),
'offentlighedundtagettype': {
'$ref': '#/definitions/offentlighedundtaget'},
'soegeord': _generate_schema_array(_generate_schema_array(STRING), 2),
'text[]': _generate_schema_array(STRING),
'timestamptz': STRING,
'vaerdirelationattr': _generate_schema_object(
{
'forventet': BOOLEAN,
'nominelvaerdi': STRING
},
['forventet', 'nominelvaerdi']
)
}
def _get_metadata(obj, metadata_type, key):
"""
Get the metadata for a given attribute
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:param metadata_type: Must be either 'attributter' or 'relationer'
:param key: The attribute to get the metadata from, e.g. 'egenskaber'
:return: Dictionary containing the metadata for the attribute fields
"""
metadata = settings.REAL_DB_STRUCTURE[obj].get(
'{}_metadata'.format(metadata_type), [])
if not metadata or key not in metadata:
return metadata
return metadata[key]
def _get_mandatory(obj, attribute_name):
"""
Get a list of mandatory attribute fields for a given attribute.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:param attribute_name: The attribute to get the fields from,
e.g. 'egenskaber'
:return: Sorted list of mandatory attribute keys
"""
attribute = _get_metadata(obj, 'attributter', attribute_name)
mandatory = sorted(
key for key in attribute if attribute[key].get('mandatory', False)
)
return mandatory
def _handle_attribute_metadata(obj, fields, attribute_name):
"""
Update the types of the attribute fields.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:param fields: A dictionary of attribute fields to update.
:param attribute_name: The name of the attribute fields
:return: Dictionary of updated attribute fields.
"""
attribute = _get_metadata(obj, 'attributter', attribute_name)
fields.update(
{
key: TYPE_MAP[attribute[key]['type']]
for key in attribute
if attribute[key].get('type', False)
}
)
return fields
def _generate_attributter(obj):
"""
Generate the 'attributter' part of the JSON schema.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:return: Dictionary representing the 'attributter' part of the JSON schema.
"""
db_attributter = settings.REAL_DB_STRUCTURE[obj]['attributter']
attrs = {}
required = []
for attrname, attrval in db_attributter.items():
full_name = '{}{}'.format(obj, attrname)
schema = {
key: STRING
for key in attrval
}
schema.update({'virkning': {'$ref': '#/definitions/virkning'}})
schema = _handle_attribute_metadata(obj, schema, attrname)
mandatory = _get_mandatory(obj, attrname)
attrs[full_name] = _generate_schema_array(
_generate_schema_object(
schema,
mandatory + ['virkning'],
),
)
if mandatory:
required.append(full_name)
return _generate_schema_object(attrs, required)
def _generate_tilstande(obj):
"""
Generate the 'tilstande' part of the JSON schema.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:return: Dictionary representing the 'tilstande' part of the JSON schema.
"""
tilstande = dict(settings.REAL_DB_STRUCTURE[obj]['tilstande'])
properties = {}
required = []
for key in sorted(tilstande):
tilstand_name = obj + key
properties[tilstand_name] = _generate_schema_array(
_generate_schema_object(
{
key: {
'type': 'string',
'enum': tilstande[key]
},
'virkning': {'$ref': '#/definitions/virkning'},
},
[key, 'virkning']
)
)
required.append(tilstand_name)
return _generate_schema_object(properties, required)
def _handle_relation_metadata_all(obj, relation):
"""
Update relations an their metadata (e.g. types) for all relations of the
given LoRa object.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:param relation: The base relation to update.
:return: Dictionary representing the updated relation.
"""
metadata_all = _get_metadata(obj, 'relationer', '*')
for key in metadata_all:
if 'type' in metadata_all[key]:
relation['items']['oneOf'][0]['properties'][key] = TYPE_MAP[
metadata_all[key]['type']]
relation['items']['oneOf'][1]['properties'][key] = TYPE_MAP[
metadata_all[key]['type']]
return relation
def _handle_relation_metadata_specific(obj, relation_schema):
"""
Update relations an their metadata (e.g. types) for specific relations
of the given LoRa object.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:param relation_schema: Dictionary representing the 'relationer' part of
the JSON schema.
:return: Dictionary representing the updated 'relationer' part of
the JSON schema.
"""
metadata_specific = (
settings.REAL_DB_STRUCTURE[obj].get('relationer_metadata', [])
)
for relation in [key for key in metadata_specific if not key == '*']:
for i in range(2):
properties = relation_schema[relation]['items']['oneOf'][i][
'properties']
metadata = metadata_specific[relation]
for key in metadata:
if 'type' in metadata[key]:
properties[key] = TYPE_MAP[metadata[key]['type']]
if 'enum' in metadata[key]:
# Enum implies type = text
properties[key] = {
'type': 'string',
'enum': metadata[key]['enum']
}
if metadata[key].get('mandatory', False):
relation_schema[relation]['items']['oneOf'][i][
'required'].append(key)
if obj == 'tilstand':
# Handle special case for 'tilstand' where UUID not allowed
item = relation_schema['tilstandsvaerdi']['items']['oneOf'][0]
del item['properties']['uuid']
item['required'].remove('uuid')
relation_schema['tilstandsvaerdi']['items'] = item
return relation_schema
def _generate_relationer(obj):
"""
Generate the 'relationer' part of the JSON schema.
:param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc.
:return: Dictionary representing the 'relationer' part of the JSON schema.
"""
relationer_nul_til_en = \
settings.REAL_DB_STRUCTURE[obj]['relationer_nul_til_en']
relationer_nul_til_mange = settings.REAL_DB_STRUCTURE[obj][
'relationer_nul_til_mange']
relation_nul_til_mange = _generate_schema_array(
{
'oneOf': [
_generate_schema_object(
{
'uuid': {'$ref': '#/definitions/uuid'},
'virkning': {'$ref': '#/definitions/virkning'},
'objekttype': STRING
},
['uuid', 'virkning']
),
_generate_schema_object(
{
'urn': {'$ref': '#/definitions/urn'},
'virkning': {'$ref': '#/definitions/virkning'},
'objekttype': STRING
},
['urn', 'virkning']
)
]
}
)
relation_nul_til_mange = _handle_relation_metadata_all(
obj, relation_nul_til_mange)
relation_schema = {
relation: copy.deepcopy(relation_nul_til_mange)
for relation in relationer_nul_til_mange
}
relation_nul_til_en = copy.deepcopy(relation_nul_til_mange)
relation_nul_til_en['items']['oneOf'][0]['properties'].pop('indeks', None)
relation_nul_til_en['items']['oneOf'][1]['properties'].pop('indeks', None)
relation_nul_til_en['maxItems'] = 1
for relation in relationer_nul_til_en:
relation_schema[relation] = relation_nul_til_en
relation_schema = _handle_relation_metadata_specific(obj, relation_schema)
return {
'type': 'object',
'properties': relation_schema,
'additionalProperties': False
}
def _generate_varianter():
"""
Function to generate the special 'varianter' section of the JSON schema
used for the the 'Dokument' LoRa object type.
"""
return _generate_schema_array(_generate_schema_object(
{
'egenskaber': _generate_schema_array(_generate_schema_object(
{
'varianttekst': STRING,
'arkivering': BOOLEAN,
'delvisscannet': BOOLEAN,
'offentliggoerelse': BOOLEAN,
'produktion': BOOLEAN,
'virkning': {'$ref': '#/definitions/virkning'}
},
['varianttekst', 'virkning']
))
},
['egenskaber']
))
def generate_json_schema(obj):
"""
Generate the JSON schema corresponding to LoRa object type.
:param obj: The LoRa object type, i.e. 'bruger', 'organisation',...
:return: Dictionary representing the JSON schema.
"""
if obj == 'dokument':
# Due to an inconsistency between the way LoRa handles
# "DokumentVariantEgenskaber" and the specs' we will have to do
# this for now, i.e. we allow any JSON-object for "Dokument".
return {'type': 'object'}
schema = _generate_schema_object(
{
'attributter': _generate_attributter(obj),
'tilstande': _generate_tilstande(obj),
'relationer': _generate_relationer(obj),
'note': STRING,
},
['attributter', 'tilstande']
)
schema['$schema'] = 'http://json-schema.org/schema#'
schema['id'] = 'http://github.com/magenta-aps/mox'
schema['definitions'] = {
'urn': {
'type': 'string',
'pattern': '^urn:.'
},
'uuid': {
'type': 'string',
'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-'
'[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'
},
'virkning': _generate_schema_object(
{
'from': STRING,
'to': STRING,
'from_included': BOOLEAN,
'to_included': BOOLEAN,
'aktoerref': {'$ref': '#/definitions/uuid'},
'aktoertypekode': STRING,
'notetekst': STRING,
},
['from', 'to']
),
'offentlighedundtaget': _generate_schema_object(
{
'alternativtitel': STRING,
'hjemmel': STRING
},
['alternativtitel', 'hjemmel']
)
}
return schema
SCHEMAS = {}
def get_schema(obj_type):
try:
return SCHEMAS[obj_type]
except KeyError:
pass
schema = SCHEMAS[obj_type] = copy.deepcopy(generate_json_schema(obj_type))
return schema
def validate(input_json, obj_type):
"""
Validate request JSON according to JSON schema.
:param input_json: The request JSON
:raise jsonschema.exceptions.ValidationError: If the request JSON is not
valid according to the JSON schema.
"""
jsonschema.validate(input_json, get_schema(obj_type))
| mpl-2.0 | 8,253,058,164,997,057,000 | 30.227477 | 79 | 0.567039 | false |
heidtn/MeteorTracker | meteortracker/meteor_tracker.py | 1 | 2196 | """
@author(s): Nathan Heidt, Jean Nassar
This is the primary program for detecting and logging Meteors. Running
`python meteor_tracker.py` is sufficient.
Make sure the parameters specified in the config.ini file are correct.
"""
import configparser
from . import camera
from . import find_events
from . import save_event
class Tracker(object):
"""
A class for running and managing the primary meteor detection tasks.
Parameters
----------
source : str, optional
If given a the path of a video file, this will use that. Otherwise
it will use the primary camera.
Attributes
----------
cam : Camera
This is used to either access the camera or a video file stream
config : ConfigParser
This is to parse the config file for different user settings
event_logger : EventLogger
When an event is detected, the images are passed to this class for
logging
event_finder : EventFinder
This class is in charge of viewing images to actually find the
events themselves.
"""
def __init__(self, source=None):
self.cam = camera.Camera(source)
self.config = configparser.ConfigParser()
self.config.read('config.ini')
self.event_logger = save_event.EventLogger()
self.event_finder = find_events.EventFinder()
def run(self):
"""
Run the meteor tracker program. If a potential meteor is detected,
log the result.
"""
while True:
current_image = self.cam.get_frame()
previous_image = self.cam.get_previous_frame()
# detect number of anomalies (keypoints) and highlight them in im
keypoints, im = self.event_finder.find_motion_anomaly(
previous_image,
current_image
)
# we have found an anomaly
if keypoints:
print("Anomaly found!")
self.event_logger.add_event(curImg, prevImg)
if __name__ == "__main__":
Tracker().run()
| mit | -95,246,667,177,402,660 | 30.826087 | 77 | 0.584699 | false |
OpenSPA/dvbapp | lib/python/Screens/MessageBox.py | 1 | 4589 | from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from enigma import eTimer
class MessageBox(Screen):
TYPE_YESNO = 0
TYPE_INFO = 1
TYPE_WARNING = 2
TYPE_ERROR = 3
TYPE_MESSAGE = 4
def __init__(self, session, text, type=TYPE_YESNO, timeout=-1, close_on_any_key=False, default=True, enable_input=True, msgBoxID=None, picon=None, simple=False, list=[], timeout_default=None):
self.type = type
Screen.__init__(self, session)
self.skinName = ["MessageBox"]
if self.type == self.TYPE_YESNO:
self.setTitle(_("Question"))
elif self.type == self.TYPE_INFO:
self.setTitle(_("Information"))
elif self.type == self.TYPE_WARNING:
self.setTitle(_("Warning"))
elif self.type == self.TYPE_ERROR:
self.setTitle(_("Error"))
else:
self.setTitle(_("Message"))
if simple:
self.skinName="MessageBoxSimple"
self.msgBoxID = msgBoxID
self["text"] = Label(text)
self["Text"] = StaticText(text)
self["selectedChoice"] = StaticText()
self.text = text
self.close_on_any_key = close_on_any_key
self.timeout_default = timeout_default
self["ErrorPixmap"] = Pixmap()
self["QuestionPixmap"] = Pixmap()
self["InfoPixmap"] = Pixmap()
self["WarningPixmap"] = Pixmap()
self.timerRunning = False
self.initTimeout(timeout)
picon = picon or type
if picon != self.TYPE_ERROR:
self["ErrorPixmap"].hide()
if picon != self.TYPE_YESNO:
self["QuestionPixmap"].hide()
if picon != self.TYPE_INFO:
self["InfoPixmap"].hide()
if picon != self.TYPE_WARNING:
self["WarningPixmap"].hide()
self.title = self.type < self.TYPE_MESSAGE and [_("Question"), _("Information"), _("Warning"), _("Error")][self.type] or _("Message")
if type == self.TYPE_YESNO:
if list:
self.list = list
elif default == True:
self.list = [ (_("yes"), True), (_("no"), False) ]
else:
self.list = [ (_("no"), False), (_("yes"), True) ]
else:
self.list = []
self["list"] = MenuList(self.list)
if self.list:
self["selectedChoice"].setText(self.list[0][0])
else:
self["list"].hide()
if enable_input:
self["actions"] = ActionMap(["MsgBoxActions", "DirectionActions"],
{
"cancel": self.cancel,
"ok": self.ok,
"alwaysOK": self.alwaysOK,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right,
"upRepeated": self.up,
"downRepeated": self.down,
"leftRepeated": self.left,
"rightRepeated": self.right
}, -1)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(self.title)
def initTimeout(self, timeout):
self.timeout = timeout
if timeout > 0:
self.timer = eTimer()
self.timer.callback.append(self.timerTick)
self.onExecBegin.append(self.startTimer)
self.origTitle = None
if self.execing:
self.timerTick()
else:
self.onShown.append(self.__onShown)
self.timerRunning = True
else:
self.timerRunning = False
def __onShown(self):
self.onShown.remove(self.__onShown)
self.timerTick()
def startTimer(self):
self.timer.start(1000)
def stopTimer(self):
if self.timerRunning:
del self.timer
self.onExecBegin.remove(self.startTimer)
self.setTitle(self.origTitle)
self.timerRunning = False
def timerTick(self):
if self.execing:
self.timeout -= 1
if self.origTitle is None:
self.origTitle = self.instance.getTitle()
self.setTitle(self.origTitle + " (" + str(self.timeout) + ")")
if self.timeout == 0:
self.timer.stop()
self.timerRunning = False
self.timeoutCallback()
def timeoutCallback(self):
print "Timeout!"
if self.timeout_default is not None:
self.close(self.timeout_default)
else:
self.ok()
def cancel(self):
self.close(False)
def ok(self):
if self.list:
self.close(self["list"].getCurrent()[1])
else:
self.close(True)
def alwaysOK(self):
self.close(True)
def up(self):
self.move(self["list"].instance.moveUp)
def down(self):
self.move(self["list"].instance.moveDown)
def left(self):
self.move(self["list"].instance.pageUp)
def right(self):
self.move(self["list"].instance.pageDown)
def move(self, direction):
if self.close_on_any_key:
self.close(True)
self["list"].instance.moveSelection(direction)
if self.list:
self["selectedChoice"].setText(self["list"].getCurrent()[0])
self.stopTimer()
def __repr__(self):
return str(type(self)) + "(" + self.text + ")"
| gpl-2.0 | 843,997,252,445,277,600 | 24.780899 | 193 | 0.665504 | false |
HaoboGu/Structure-Similarity | Drugbank.py | 1 | 6376 | import random
import numpy as np
import time
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score
def read_drugbank_data():
# read interaction data
interaction_file = open('data/interacts.csv')
interact_dict = {}
line = interaction_file.readline()
while line:
db_id1, db_id2, interact_level = line[0:-1].split('\t')
interact_dict[db_id1, db_id2] = int(interact_level) # use multiple keys
line = interaction_file.readline()
interaction_file.close()
# read similarity data
similarity_file = open('data/chemicalsimilarity.csv')
similarity_dict = {}
line = similarity_file.readline()
while line:
db_id1, db_id2, similarity = line[0:-1].split('\t')
similarity_dict[db_id1, db_id2] = float(similarity)
line = similarity_file.readline()
similarity_file.close()
return interact_dict, similarity_dict
class Validation:
def __init__(self, interact_dict, similarity_dict):
self.interaction = interact_dict
self.similarity = similarity_dict
self.train_set = {}
self.validation_set = {}
self.sim_link = {}
self.positive_train = {}
self.max_sim_with_positive_link = {}
self.max_sim_with_positive_link_for_val = {}
def divide_data(self):
self.train_set = {}
self.validation_set = {}
index = random.sample(range(0, 9892), 989) # randomly select 1/10 interactions as test_set
flag = 0
for i in self.interaction:
if flag in index:
self.validation_set[i] = self.interaction[i]
else:
self.train_set[i] = self.interaction[i]
flag += 1
# create known ddi dict:
for key in self.train_set:
if self.train_set[key] == 1:
self.positive_train[key] = 1
def compute_link_sim(self, key1, key2):
link_sim1 = (self.similarity[key1[0], key2[0]] + self.similarity[key1[1], key2[1]]) / 2.0
link_sim2 = (self.similarity[key1[0], key2[1]] + self.similarity[key1[1], key2[0]]) / 2.0
return max(link_sim1, link_sim2)
def create_simlink(self):
self.sim_link = {}
# num = 1
for inter_key in self.train_set:
max_link_sim = 0
for inter_key2 in self.positive_train:
if inter_key[0] in inter_key2 and inter_key[1] in inter_key2:
continue
else:
link_sim = self.compute_link_sim(inter_key, inter_key2)
if link_sim > max_link_sim:
max_link_sim = link_sim
self.sim_link[inter_key] = inter_key2
self.max_sim_with_positive_link[inter_key] = max_link_sim
# print('iter', num)
# num += 1
def create_simlink_for_val(self):
self.sim_link = {}
# num = 1
for inter_key in self.validation_set:
max_link_sim = 0
for inter_key2 in self.positive_train:
if inter_key[0] in inter_key2 and inter_key[1] in inter_key2:
continue
else:
link_sim = self.compute_link_sim(inter_key, inter_key2)
if link_sim > max_link_sim:
max_link_sim = link_sim
# self.sim_link[inter_key] = inter_key2
self.max_sim_with_positive_link_for_val[inter_key] = max_link_sim
sim_list = []
inter_list = []
for inter_key in self.validation_set:
feature = self.max_sim_with_positive_link_for_val[inter_key]
sim_list.append(feature)
inter_list.append(self.validation_set[inter_key])
return sim_list, inter_list
def create_train_array(self):
sim_list = []
inter_list = []
num = 0
for inter_key in self.train_set:
if self.train_set[inter_key] == 1:
feature = self.max_sim_with_positive_link[inter_key]
sim_list.append(feature)
inter_list.append(self.train_set[inter_key])
num += 1
print('num of positive samples in train set: ', num)
num = num * 3
for inter_key in self.train_set:
if self.train_set[inter_key] == 0:
feature = self.max_sim_with_positive_link[inter_key]
sim_list.append(feature)
inter_list.append(self.train_set[inter_key])
num = num - 1
if num == 0:
break
return sim_list, inter_list
def lr(self, sim_list, inter_list):
lr = LogisticRegression(solver='sag')
sim_list = np.array(sim_list)
sim_list = sim_list.reshape(sim_list.shape[0], 1)
inter_list = np.array(inter_list)
inter_list = inter_list.reshape(inter_list.shape[0], 1)
lr.fit(sim_list, inter_list)
val_sim, val_inter = self.create_simlink_for_val()
val_sim = np.array(val_sim)
val_sim = val_sim.reshape(val_sim.shape[0], 1)
val_inter = np.array(val_inter).reshape(val_inter.__len__(), 1)
result = lr.predict(val_sim)
prob_re = lr.predict_proba(val_sim)
prob_re = prob_re.transpose()
auroc = roc_auc_score(val_inter, prob_re[1])
print('roc score:', auroc)
return result, prob_re, val_inter
start = time.time()
interact_dict, sim_dict = read_drugbank_data()
v = Validation(interact_dict, sim_dict)
v.divide_data()
v.create_simlink()
sim_list, inter_list = v.create_train_array()
result, prob_re, val_inter = v.lr(sim_list, inter_list)
TP = 0 # predict 1, actual 1
FP = 0 # predict 1, actual 0
TN = 0 # predict 0, actual 0
FN = 0 # predict 0, actual 1
for i in range(0, 989):
if result[i] == 0 and result[i] == 0:
TN += 1
elif result[i] == 0 and val_inter[i] == 1:
FN += 1
elif result[i] == 1 and val_inter[i] == 0:
FP += 1
elif result[i] == 1 and val_inter[i] == 1:
TP += 1
print('tp:', TP, ' tn:', TN, ' fp:', FP, ' fn:', FN)
precision = TP / (TP + FP)
recall = TP / (TP + FN)
print('precision:', precision)
print('recall:', recall)
print('f-score: ', 2 * precision * recall / (precision + recall))
end = time.time()
print(end-start) | mit | 7,838,667,354,194,536,000 | 35.649425 | 99 | 0.559285 | false |
dunkenj/smpy | scripts/data/fitting.py | 1 | 27412 | import numpy as np
import array
import os, sys
import re
import time
import multiprocessing
import h5py
import logging
from astropy.table import Table, Column
from astropy import units as u
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-p","--params", type=str,
help = "Parameter file")
parser.add_argument("-q", "--quiet", help = "Suppress extra outputs",
action = "store_true")
args = parser.parse_args()
quiet = args.quiet
params_root = re.split(".py", args.params)[0]
if os.path.isfile(params_root+".pyc"):
os.remove(params_root+".pyc")
import importlib
try:
params = importlib.import_module(params_root)
print('Successfully loaded "{0}" as params'.format(args.params))
importlib.reload(params)
except:
print('Failed to load "{0}" as params'.format(args.params))
raise
if quiet:
quietprint = lambda *a: None
else:
def quietprint(*args):
for arg in args:
print(arg, end=' ')
print()
# Fitting function definition for later use by Processess
def galaxyFit(inputQueue, printQueue, printlock):
for gal in iter(inputQueue.get, 'STOP'):
j = np.argmin(np.abs(z-zobs[gal])) # Find closest model redshift
flux_obs = obs[gal,:]
flux_err = obs_err[gal,:]
#flux_obs[fo <= 0.] = 0. # Set negative fluxes to zero
I = np.where(flux_err > 0.)[0] # Find bands with no observation
if len(I) == 0:
if include_rest:
M_scaled = np.ones(len(fo)) * -99.
restframe_output = ' '.join(M_scaled.astype('str'))
output_string = '{0} {1} {2} {3} {4} {5} {6} {7}' \
' {8} {9} {10} {11} {12} {13} {14} {15} {16}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99, -99, -99,-99,len(I),-99,z[j],restframe_output,'\n')
else:
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99,-99, -99,-99,len(I),-99,'\n')
printQueue.put(output_string)
continue
flux_obs = flux_obs[I] # and exclude from fit
flux_err = flux_err[I]
flux_models = f[j,I,:]
tot_err = np.sqrt(flux_err**2 + (0.1*flux_obs)**2)
top = 0.
bottom = 0.
for i in range(len(flux_obs)):
top += (flux_models[i,:]*flux_obs[i])/(tot_err[i]**2)
bottom += (flux_models[i,:]**2)/(tot_err[i]**2)
scale = top/bottom
scale = np.reshape(scale, (n_metal, n_tg, n_tau, n_tauv, n_fesc))
chisq = 0.
for i in range(len(flux_obs)):
chisq += ((np.abs(scale*flux_models[i,:]-flux_obs[i])**2)/(flux_err[i])**2)
chimin, minind = np.nanmin(chisq), np.nanargmin(chisq)
if np.isinf(chimin) or np.isnan(minind):
if include_rest:
M_scaled = np.ones(len(flux_obs)) * -99.
restframe_output = ' '.join(M_scaled.astype('str'))
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15} {16}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99, -99, -99,-99,len(I),-99,z[j],restframe_output,'\n')
else:
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99,-99, -99,-99,len(I),-99,'\n')
printQueue.put(output_string)
continue
#Find the coordinate of the model with the bestfit mass
mi, tgi, ti, tvi, fi = np.unravel_index(minind,
(n_metal, n_tg,
n_tau, n_tauv, n_fesc))
Bestfit_Mass = np.log10(scale[mi, tgi, ti, tvi, fi]*flux_corr)
Bestfit_SFR = (scale[mi, tgi, ti, tvi, fi] *
SFR[mi, tgi, ti, tvi, fi]*flux_corr)
#Bestfit_Beta = beta[tgi,tvi,ti,mi]
Bestfit_Beta = -99.
#Scale the observed tot_mag band of the template to be the same as the observed tot_mag band of the galaxy
#Convert the templates so they are no longer units of per stellar mass
F_rest = f[0,:]*scale[mi, tgi, ti, tvi, fi]*flux_corr
restframeMags = 23.9 - 2.5*np.log10(F_rest)
#UV_rest = UV_flux[0]*scale[tgi,tvi,ti,mi]*flux_corr
#restframeMUV = 23.9 - 2.5*np.log10(UV_rest)
M_scaled = restframeMags[:, mi, tgi, ti, tvi, fi]
#MUV_scaled = restframeMUV[tgi,tvi,ti,mi]
MUV_scaled = -99.
if np.isnan(Bestfit_Mass) or np.isinf(chimin):
Bestfit_Mass = -99
#M_scaled[:] = -99
tgs = -99
tvs = -99
taus = -99
mis = -99
escape_fraction = -99
else:
tgs = tg[tgi]/1e9
tvs = tv[tvi]
taus = tau[ti]
mis = metallicities[mi]
escape_fraction = fesc[fi]
printlock.acquire()
print('{0:6d} {1:8d} {2:>5.2f} {3:>7.2f} {4:>8.1f} {5:>8.3f} {6:>5.1f} {7:>8.2f} {8:>4.2f} {9:>5.2f}'.format(gal+1,ID[gal], zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis,np.log10(Bestfit_SFR)))
if include_rest:
restframe_output = ' '.join(M_scaled.astype('str'))
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15} {16}'.format(gal+1,ID[gal],zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis, MUV_scaled, minind,Bestfit_SFR,len(I),Bestfit_Beta,z[j],restframe_output,'\n')
else:
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14}'.format(gal+1,ID[gal],zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis, MUV_scaled, minind,Bestfit_SFR,len(I),Bestfit_Beta,'\n')
printlock.release()
printQueue.put(output_string)
def galaxyFit2(inputQueue, printQueue, printlock):
for gal in iter(inputQueue.get, 'STOP'):
output_string = '{0[0]} {0[1]} {0[2]} {0[3]} {0[4]} {0[5]} ' + \
'{0[6]} {0[7]} {0[8]} {0[9]} {0[10]} {0[11]} ' + \
'{0[12]} {0[13]} {0[14]}'
j = np.argmin(np.abs(z-zobs[gal])) # Find closest model redshift
log_mass_min, log_mass_max = 7, 13
log_sfr_min, log_sfr_max = -3, 4
flux_obs = obs[gal,:]
flux_err = obs_err[gal,:]
#flux_obs[fo <= 0.] = 0. # Set negative fluxes to zero
I = np.where(flux_err > 0.)[0] # Find bands with no observation
if len(I) == 0:
output_array = [gal+1, ID[gal], zobs[gal], z[j],
-99, -99, -99, -99, -99, -99, -99,
-99,-99,len(I),-99,'\n']
output = output_string.format(output_array)
if include_rest:
M_scaled = np.ones(len(flux_obs)) * -99.
restframe_output = ' '.join(M_scaled.astype('str'))
output = output + restframe_output + ' \n'
else:
output = output + ' \n'
printQueue.put(output_string)
continue
flux_obs = flux_obs[I] # and exclude from fit
flux_err = flux_err[I]
flux_models = f[j,I,:]
tot_err = np.sqrt(flux_err**2 + (params.flux_err*flux_obs)**2)
top = 0.
bottom = 0.
for i in range(len(flux_obs)):
top += (flux_models[i,:]*flux_obs[i])/(tot_err[i]**2)
bottom += (flux_models[i,:]**2)/(tot_err[i]**2)
scale = top/bottom
scale = np.reshape(scale, (n_metal, n_tg, n_tau, n_tauv, n_fesc))
chisq = 0.
for i in range(len(flux_obs)):
chisq += ((np.abs(scale*flux_models[i,:]-flux_obs[i])**2)/(tot_err[i])**2)
chimin, minind = np.nanmin(chisq), np.nanargmin(chisq)
likelihood = np.reshape(np.exp(-0.5*chisq),
(n_metal, n_tg, n_tau, n_tauv, n_fesc))
likelihood[np.isnan(likelihood)] = 0.
likelihood = np.abs(likelihood/likelihood.sum())
if np.isinf(chimin) or np.isnan(minind):
output_array = [gal+1, ID[gal], zobs[gal], z[j],
-99, -99, -99, -99, -99, -99, -99,
-99,-99,len(I),-99,'\n']
output = output_string.format(output_array)
else:
#Find the coordinate of the model with the bestfit mass
mi, tgi, ti, tvi, fi = np.unravel_index(minind,
(n_metal, n_tg,
n_tau, n_tauv, n_fesc))
Masses = np.abs(np.log10(scale*flux_corr))
SFRs = np.abs(np.log10(scale * SFR * flux_corr))
mass_hist = np.histogram(Masses.flatten(),
range = (log_mass_min, log_mass_max),
bins = 120,
weights = likelihood.flatten(),
density = True)
sfr_hist = np.histogram(SFRs.flatten(),
range = (log_sfr_min, log_sfr_max),
bins = 140,
weights = likelihood.flatten(),
density = True)
Bestfit_Mass = np.abs(np.log10(scale[mi, tgi, ti, tvi, fi]*flux_corr))
Bestfit_SFR = np.abs(np.log10(scale[mi, tgi, ti, tvi, fi] *
SFR[mi, tgi, ti, tvi, fi]*flux_corr))
if np.isnan(Bestfit_Mass) or np.isinf(chimin):
Bestfit_Mass = -99
#M_scaled[:] = -99
tgs = -99
tvs = -99
taus = -99
mis = -99
escape_fraction = -99
else:
tgs = tg[tgi]/1e9
tvs = tv[tvi]
taus = tau[ti]
mis = metallicities[mi]
escape_fraction = fesc[fi]
m16, m50, m84 = weighted_quantile(Masses.flatten(),
[0.16, 0.5, 0.84],
sample_weight=likelihood.flatten(),
values_sorted=False)
s16, s50, s84 = weighted_quantile(SFRs.flatten(),
[0.16, 0.5, 0.84],
sample_weight=likelihood.flatten(),
values_sorted=False)
printlock.acquire()
MUV_scaled = -99.
Bestfit_Beta = -99.
print_string = "{0[0]:6d} {0[1]:8d} {0[2]:>5.2f} " + \
"{0[3]:>7.2f} {0[4]:>8.1f} {0[5]:>8.3f} " + \
"{0[6]:>5.1f} {0[7]:>8.2f} {0[8]:>4.2f} " + \
"{0[9]:>5.2f}"
print_array = [gal+1, ID[gal], zobs[gal],
Bestfit_Mass, chimin,
tgs, tvs, taus, mis,
Bestfit_SFR]
print(print_string.format(print_array))
output_string = '{n} {id} {zobs} {ztemp} {mass_best} {sfr_best} '+ \
'{chi_best} {tg} {tvs} {taus} {mis} {fesc} '+ \
'{mass_med} {mass_l68} {mass_u68} ' + \
'{sfr_med} {sfr_l68} {sfr_u68} ' + \
'{nfilts} '
output_values = {'n': gal+1,
'id': ID[gal],
'zobs': zobs[gal], 'ztemp':z[j],
'mass_best': Bestfit_Mass,
'sfr_best': Bestfit_SFR,
'chi_best': chimin,
'tg': tgs, 'tvs': tvs, 'taus': taus,
'mis': mis, 'fesc': escape_fraction,
'mass_med': m50, 'mass_l68': m16, 'mass_u68': m84,
'sfr_med': s50, 'sfr_l68': s16, 'sfr_u68': s84,
'nfilts': len(I)}
output_array = [gal+1, ID[gal], zobs[gal],
Bestfit_Mass, chimin, tgs, tvs, taus, mis,
MUV_scaled, minind, Bestfit_SFR, len(I), -99., '\n']
output = output_string.format(**output_values)
if include_rest:
if np.isinf(chimin) or np.isnan(minind):
M_scaled = np.ones(len(flux_obs)) * -99.
restframe_output = ' '.join(M_scaled.astype('str'))
output = output + restframe_output + ' \n'
else:
F_rest = np.array(f[0, :, mi, tgi, ti, tvi, fi] *
scale[mi, tgi, ti, tvi, fi] * flux_corr)
restframeMags = 23.9 - 2.5*np.log10(F_rest)
restframe_output = ' '.join(restframeMags.astype('str'))
output = output + restframe_output + ' \n'
else:
output = output + ' \n'
printlock.release()
printQueue.put([output, mass_hist, sfr_hist])
def galaxyFitPlus(inputQueue, printQueue, printlock):
for gal in iter(inputQueue.get, 'STOP'):
mass_range = 7, 13
log_sfr_min, log_sfr_max = -3, 4
j = np.argmin(np.abs(z-zobs[gal])) # Find closest model redshift
fo = obs[gal,:]
ferr = obs_err[gal,:]
flux_obs[fo <= 0.] = 0. # Set negative fluxes to zero
#print fo
I = (ferr > 0.)*(ferr < 1e6) # Find bands with no observation
fo = flux_obs[I] # and exclude from fit
ferr = flux_err[I]
fm = f[I,j,:]
#print flux_models[:,0,0,0,0]
top = 0.
bottom = 0.
for i in range(len(fo)):
top += (flux_models[i,:]*flux_obs[i])/(flux_err[i]**2)
bottom += (flux_models[i,:]**2)/(flux_err[i]**2)
scale = top/bottom
scale = np.reshape(scale, (n_metal, n_tg, n_tau, n_tauv, n_fesc))
chisq = 0.
for i in range(len(fo)):
chisq += ((np.abs(scale*flux_models[i,:]-flux_obs[i])**2)/(flux_err[i])**2)
chimin, minind = np.nanmin(chisq), np.nanargmin(chisq)
chisq -= (chisq.min() - 1)
likelihood = np.exp(-0.5*chisq)
likelihood /= likelihood.sum()
if np.isinf(chimin) or np.isnan(minind) or len(fo) == 0:
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} \
{10} {11} {12} {13} {14} {15} {16} {17} {18}'.format(gal+1,ID[gal],zobs[gal],
-99,-99,-99,-99,-99,-99,
-99, -99, -99, -99,-99,-99,-99,
len(I),-99,'\n')
massLikelihood = np.zeros(mass_bins+1)
massLikelihood[0] = gal
muvLikelihood = np.zeros(muv_bins+1)
muvLikelihood[0] = gal
betaLikelihood = np.zeros(beta_bins+1)
betaLikelihood[0] = gal
#tauLikelihood = np.zeros(n_tau)
#tauLikelihood = np.insert(tauLikelihood,0,gal)
printQueue.put([output_string,massLikelihood,muvLikelihood,betaLikelihood])
continue
#Find the coordinate of the model with the bestfit mass
si,tgi,tvi,ti,mi = np.unravel_index(minind,(mass_bins,n_tg,n_tauv,n_tau,n_ssp))
Bestfit_Mass = np.log10(mass_range[si]*flux_corr)
Bestfit_SFR = (mass_range[si]*SFR[tgi,ti,mi]*flux_corr)
Bestfit_Beta = beta[tgi,tvi,ti,mi]
F_rest = f[:,0]*mass_range[likelihood.argmax(0)]*flux_corr
restframeMags = 23.9 - 2.5*np.log10(F_rest)
UV_rest = UV_flux[0]*mass_range[likelihood.argmax(0)]*flux_corr
restframeMUV = 23.9 - 2.5*np.log10(UV_rest)
Bestfit_restframeMags = restframeMags[:,tgi,tvi,ti,mi]
Bestfit_restframeMUV = restframeMUV[tgi,tvi,ti,mi]
if np.isnan(Bestfit_Mass) or np.isinf(chimin):
Bestfit_Mass = -99
#M_scaled[:] = -99
tgs = -99
tvs = -99
taus = -99
mis = -99
else:
tgs = tg[tgi]/1.e9
tvs = tv[tvi]
taus = tau[ti]/1.e9
mis = mi
"""
Likelihood array section:
"""
mass_hist = np.histogram(np.log10(mass_))
printlock.acquire()
if calc_mode:
print('{0:4d} {1:6d} {2:>6.2f} {3:>8.1f} {4:>6.2f}'.format(gal+1,ID[gal],Bestfit_Mass,chimin, np.log10(Mode_Mass), '/n'))
else:
print('{0:6d} {1:8f} {2:>5.2f} {3:>7.2f} {4:>8.1f} {5:>8.3f} {6:>5.1f} {7:>8.2f} {8:>3d} {9:>5.2f}'.format(gal+1,int(ID[gal]),zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis,np.log10(Bestfit_SFR)))
output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15}'.format(gal+1,int(ID[gal]),zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis,Bestfit_restframeMags[tot],Bestfit_restframeMUV,minind,Bestfit_SFR,len(I),Bestfit_Beta,'\n')
printlock.release()
printQueue.put([output_string, massLikelihoods, muvLikelihoods, betaLikelihoods])
def getObservations(inputpath):
input_data = Table.read(inputpath,format=input_format)
column_names = list(input_data.columns.keys())
ID = input_data[ID_col]
zobs = input_data[z_col]
filter_names = []
k,l = 0,0
for ii in range(len(column_names)):
if column_names[ii].lower().endswith(flux_col_end.lower()):
if k == 0:
fluxes = input_data[column_names[ii]]
else:
fluxes = np.column_stack((fluxes,input_data[column_names[ii]]))
k+=1
filter_names.append(column_names[ii])
if column_names[ii].lower().endswith(fluxerr_col_end.lower()):
if l == 0:
fluxerrs = input_data[column_names[ii]]
else:
fluxerrs = np.column_stack((fluxerrs,input_data[column_names[ii]]))
l+=1
"""
if filts_used != None:
try:
fluxes = fluxes[:,filts_used]
fluxerrs = fluxerrs[:,filts_used]
except:r
print('Filter mismatch 1')
# Array slicing fail
"""
return ID, zobs, fluxes, fluxerrs, k, filter_names
class _function_wrapper(object):
"""
This is a hack to make the likelihood function pickleable when ``args``
or ``kwargs`` are also included.
Stolen from emcee
"""
def __init__(self, f, args, kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self, x):
try:
return self.f(x, *self.args, **self.kwargs)
except:
import traceback
print("emcee: Exception while calling your likelihood function:")
print(" params:", x)
print(" args:", self.args)
print(" kwargs:", self.kwargs)
print(" exception:")
traceback.print_exc()
raise
def weighted_quantile(values, quantiles, sample_weight=None, values_sorted=False, old_style=False):
""" Very close to np.percentile, but supports weights.
NOTE: quantiles should be in [0, 1]!
:param values: np.array with data
:param quantiles: array-like with many quantiles needed
:param sample_weight: array-like of the same length as `array`
:param values_sorted: bool, if True, then will avoid sorting of initial array
:param old_style: if True, will correct output to be consistent with np.percentile.
:return: np.array with computed quantiles.
"""
values = np.array(values)
quantiles = np.array(quantiles)
if sample_weight is None:
sample_weight = np.ones(len(values))
sample_weight = np.array(sample_weight)
assert np.all(quantiles >= 0) and np.all(quantiles <= 1), 'quantiles should be in [0, 1]'
if not values_sorted:
sorter = np.argsort(values)
values = values[sorter]
sample_weight = sample_weight[sorter]
weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight
if old_style:
# To be convenient with np.percentile
weighted_quantiles -= weighted_quantiles[0]
weighted_quantiles /= weighted_quantiles[-1]
else:
weighted_quantiles /= np.sum(sample_weight)
return np.interp(quantiles, weighted_quantiles, values)
if __name__ == '__main__':
logfile = open("error.log", "w")
original_stderr = sys.stderr
sys.stderr = logfile
start = time.time()
"""
SECTION 1
"""
model_path = params.model_path
input_catalog = params.input_catalog
input_format = params.input_format
z_col = params.z_col
ID_col = params.ID_col
flux_col_end = params.flux_col_end
fluxerr_col_end = params.fluxerr_col_end
ncpus = params.ncpus
filts_used = params.filts_used
include_rest = params.include_rest
output_path = params.output_catalog_path
output_format = params.output_format
output_hdf_path = params.output_hdf_path
calc_mode = params.fitting_mode
flux_corr = params.flux_corr
ID, zobs, obs, obs_err, filters_found, filter_names = getObservations(input_catalog)
"""
Section 2
"""
print("Loading synthetic mags and mass array:")
models = h5py.File(model_path, 'r')
tg = models['ages'].value
tv = models['dust'].value
tau = models['sfh'].value
metallicities = models['metallicities'].value
fesc = models['fesc'].value
Mshape = models['fluxes'].shape
z = models['z']
nfilts = Mshape[1]
n_metal = Mshape[2]
n_tg = Mshape[3]
n_tau = Mshape[4]
n_tauv = Mshape[5]
n_fesc = Mshape[6]
#UV_flux = synmags['UV_flux']
SFR = models['SFR']
Ms = models['Ms']
if (nfilts == filters_found) and (filts_used == None):
f = models['fluxes']
elif filts_used != None:
try:
f = models['fluxes'][:,filts_used]
obs = obs[:,filts_used]
obs_err = obs_err[:,filts_used]
filter_names = np.array(filter_names)[filts_used]
except:
print('Mis-match between model and observed filter numbers')
raise
# Slice fail
print ("Done.")
"""
SECTION 3
"""
if os.path.isfile(output_path+".temp_output.txt"):
os.remove(output_path+".temp_output.txt")
temp_file = open(output_path+".temp_output.txt","w")
"""
SECTION 4
Chi-sq calculation
"""
out_string = '{0:6s} {1:8s} {2:>5s} {3:>7s} {4:>8s} {5:>8s}' + \
'{6:>5s} {7:>8s} {8:>4s} {9:>5s}'
print(out_string.format('N','ID','zobs','Best', 'chimin',
'tg', 'tauv','tau','met', 'sfr'))
loop_start = time.time()
ncpus = np.clip(ncpus, 1, multiprocessing.cpu_count())
inputQueue = multiprocessing.Queue()
printQueue = multiprocessing.Queue()
printlock = multiprocessing.Lock()
if calc_mode == 'hist':
output_hdf = h5py.File(output_hdf_path, 'w')
output_hdf.create_dataset("mass_pdf", (len(ID), 120), dtype="f")
output_hdf.create_dataset("sfr_pdf", (len(ID), 140), dtype="f")
fitFunction = galaxyFit2
else:
fitFunction = galaxyFit
for i in range( ncpus ):
multiprocessing.Process(target = fitFunction,
args = (inputQueue, printQueue,
printlock)).start()
# Put elements in the send queue for processing
for gal in range( len(ID) ):
inputQueue.put( gal )
if calc_mode == 'hist':
for i, gal in enumerate(ID):
printout, mass_hist, sfr_hist = printQueue.get()
if i == 0:
mass_centers = 0.5*(mass_hist[1][1:] + mass_hist[1][:-1])
sfr_centers = 0.5*(sfr_hist[1][1:] + sfr_hist[1][:-1])
output_hdf.create_dataset("mass_bins", data = mass_centers)
output_hdf.create_dataset("sfr_bins", data = sfr_centers)
output_hdf["mass_pdf"][i] = mass_hist[0]
output_hdf["sfr_pdf"][i] = sfr_hist[0]
temp_file.write( printout )
#tau_array.tofile(tau_file)
else:
for i, gal in enumerate(ID):
printout = printQueue.get()
temp_file.write( printout )
#print len(mass_array), len(muv_array), len(beta_array)
# Stop all the running processes
for i in range( ncpus ):
inputQueue.put( 'STOP' )
# Close both send and receive queues
inputQueue.close()
printQueue.close()
temp_file.close()
models.close()
output_hdf.close()
print("Fitting time taken: {0:.2f} {1}".format(time.time()-loop_start,
'\n'))
"""
Section 3
Reload, format and save output table
"""
while temp_file.closed == False:
pause(0.1)
data = np.loadtxt(output_path+".temp_output.txt")
try:
rows, cols = data.shape
except:
cols = len(data)
output = Table()
names = ['N', 'ID', 'z', 'zmodel',
'Mass_best', 'SFR_best', 'chi_best',
'Age_best','Dust_best', 'SFH_best',
'Metallicity_best', 'fesc_best',
'Mass_median', 'Mass_l68', 'Mass_u68',
'SFR_median', 'SFR_l68', 'SFR_u68',
'Nfilts']
units = [None, None, None, None,
u.Msun, u.Msun/u.yr, None,
u.Gyr, None, None,
None, None,
u.Msun, u.Msun, u.Msun,
u.Msun/u.yr, u.Msun/u.yr, u.Msun/u.yr,
None]
types = ['i4', 'i4', 'f4', 'f4',
'f4', 'f4', 'f4',
'f4', 'f4', 'f4',
'f4', 'f4',
'f4', 'f4', 'f4',
'f4', 'f4', 'f4',
'i4']
if include_rest:
for name in filter_names:
names.append(name[:-len(flux_col_end)]+'_rest')
units.append(u.mag)
types.append('f4')
for col in range(cols):
column = Column( data[:,col], name = names[col], unit=units[col], dtype=types[col])
output.add_column(column)
table_format = 'ascii.commented_header'
output.sort('ID')
if os.path.isfile(output_path):
os.remove(output_path)
output.write(output_path,format=table_format)
print('Catalog saved')
os.remove(temp_file.name)
print()
print("Total time taken: "+str(time.time()-start))
sys.stderr = original_stderr
logfile.close()
| mit | 8,329,940,140,997,636,000 | 35.844086 | 261 | 0.489311 | false |
ledatelescope/bifrost | test/test_resizing.py | 1 | 4303 | # Copyright (c) 2016, The Bifrost Authors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Bifrost Authors nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""@module test_resizing
This file checks different aspects of resizing a ring for segmentation faults."""
import unittest
import json
import numpy as np
from bifrost.block import TestingBlock, SinkBlock, Pipeline
class ModResizeAsciiBlock(SinkBlock):
"""Copies input ring's data into ascii format in a text file,
after resizing late (after opening sequence)."""
def __init__(self, filename, gulp_size=None):
"""@param[in] filename Name of file to write ascii to"""
self.filename = filename
self.gulp_size = gulp_size
open(self.filename, "w").close()
def load_settings(self, input_header):
"""Load the header, and set the gulp appropriately"""
header_dict = json.loads(input_header.tostring())
self.shape = header_dict['shape']
size_of_float32 = 4
if self.gulp_size is None:
self.gulp_size = np.product(self.shape) * size_of_float32
def iterate_ring_read(self, input_ring):
"""Iterate through one input ring
@param[in] input_ring Ring to read through"""
for sequence in input_ring.read(guarantee=True):
self.load_settings(sequence.header)
input_ring.resize(self.gulp_size)
for span in sequence.read(self.gulp_size):
yield span
def main(self, input_ring):
"""Initiate the writing to file
@param[in] input_rings First ring in this list will be used"""
span_generator = self.iterate_ring_read(input_ring)
span = span_generator.next()
text_file = open(self.filename, 'a')
np.savetxt(text_file, span.data_view(np.float32).reshape((1,-1)))
text_file.close()
class TestLateResize(unittest.TestCase):
"""Test late resizing of a ring in a pipeline"""
def test_modified_write_ascii(self):
"""Using a modified WriteAciiBlock, test the late resize.
This should fail if ModWriteAscii block does not read the
size of the input ring ahead of time, and resize accordingly."""
blocks = []
blocks.append((TestingBlock([1, 2, 3]), [], [0]))
blocks.append((ModResizeAsciiBlock('.log.txt'), [0], []))
Pipeline(blocks).main()
np.testing.assert_almost_equal(
np.loadtxt('.log.txt'), [1, 2, 3])
class TestLargeGulpSize(unittest.TestCase):
"""Create a gulp size larger than ring size"""
def test_simple_large_gulp(self):
"""Test if a large gulp size produces a seg fault"""
blocks = []
blocks.append((TestingBlock([1, 2, 3]), [], [0]))
blocks.append((ModResizeAsciiBlock('.log.txt', gulp_size=1024), [0], []))
Pipeline(blocks).main()
np.testing.assert_almost_equal(
np.loadtxt('.log.txt'), [1, 2, 3])
| bsd-3-clause | 2,158,393,822,636,490,500 | 47.897727 | 81 | 0.684871 | false |
kszys/num2words | num2words/lang_ID.py | 1 | 6232 | # Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals, print_function
class Num2Word_ID():
BASE = {0: [],
1: ["satu"],
2: ["dua"],
3: ["tiga"],
4: ["empat"],
5: ["lima"],
6: ["enam"],
7: ["tujuh"],
8: ["delapan"],
9: ["sembilan"]}
TENS_TO = {3: "ribu",
6: "juta",
9: "miliar",
12: "triliun",
15: "kuadriliun",
18: "kuantiliun",
21: "sekstiliun",
24: "septiliun",
27: "oktiliun",
30: "noniliun",
33: "desiliun"}
errmsg_floatord = "Cannot treat float number as ordinal"
errmsg_negord = "Cannot treat negative number as ordinal"
errmsg_toobig = "Too large"
max_num = 10**36
def split_by_koma(self, number):
return str(number).split('.')
def split_by_3(self, number):
"""
starting here, it groups the number by three from the tail
'1234567' -> (('1',),('234',),('567',))
:param number:str
:rtype:tuple
"""
blocks = ()
length = len(number)
if length < 3:
blocks += ((number,),)
else:
len_of_first_block = length % 3
if len_of_first_block > 0:
first_block = number[0:len_of_first_block],
blocks += first_block,
for i in range(len_of_first_block, length, 3):
next_block = (number[i:i+3],),
blocks += next_block
return blocks
def spell(self, blocks):
"""
it adds the list of spelling to the blocks
(('1',),('034',)) -> (('1',['satu']),('234',['tiga', 'puluh', 'empat']))
:param blocks: tuple
:rtype: tuple
"""
word_blocks = ()
first_block = blocks[0]
if len(first_block[0]) == 1:
if first_block[0] == '0':
spelling = ['nol']
else:
spelling = self.BASE[int(first_block[0])]
elif len(first_block[0]) == 2:
spelling = self.puluh(first_block[0])
else:
spelling = self.ratus(first_block[0][0]) + self.puluh(first_block[0][1:3])
word_blocks += (first_block[0], spelling),
for block in blocks[1:]:
spelling = self.ratus(block[0][0]) + self.puluh(block[0][1:3])
block += spelling,
word_blocks += block,
return word_blocks
def ratus(self, number):
# it is used to spell
if number == '1':
return ['seratus']
elif number == '0':
return []
else:
return self.BASE[int(number)]+['ratus']
def puluh(self, number):
# it is used to spell
if number[0] == '1':
if number[1]== '0':
return ['sepuluh']
elif number[1] == '1':
return ['sebelas']
else:
return self.BASE[int(number[1])]+['belas']
elif number[0] == '0':
return self.BASE[int(number[1])]
else:
return self.BASE[int(number[0])]+['puluh']+ self.BASE[int(number[1])]
def spell_float(self, float_part):
# spell the float number
word_list = []
for n in float_part:
if n == '0':
word_list += ['nol']
continue
word_list += self.BASE[int(n)]
return ' '.join(['','koma']+word_list)
def join(self, word_blocks, float_part):
"""
join the words by first join lists in the tuple
:param word_blocks: tuple
:rtype: str
"""
word_list = []
length = len(word_blocks)-1
first_block = word_blocks[0],
start = 0
if length == 1 and first_block[0][0] == '1':
word_list += ['seribu']
start = 1
for i in range(start, length+1, 1):
word_list += word_blocks[i][1]
if not word_blocks[i][1]:
continue
if i == length:
break
word_list += [self.TENS_TO[(length-i)*3]]
return ' '.join(word_list)+float_part
def to_cardinal(self, number):
if number >= self.max_num:
raise OverflowError(self.errmsg_toobig % (number, self.maxnum))
minus = ''
if number < 0:
minus = 'min '
float_word = ''
n = self.split_by_koma(abs(number))
if len(n)==2:
float_word = self.spell_float(n[1])
return minus + self.join(self.spell(self.split_by_3(n[0])), float_word)
def to_ordinal(self, number):
self.verify_ordinal(number)
out_word = self.to_cardinal(number)
if out_word == "satu":
return "pertama"
return "ke" + out_word
def to_ordinal_num(self, number):
self.verify_ordinal(number)
return "ke-" + str(number)
def to_currency(self, value):
return self.to_cardinal(value)+" rupiah"
def to_year(self, value):
return self.to_cardinal(value)
def verify_ordinal(self, value):
if not value == int(value):
raise TypeError(self.errmsg_floatord % value)
if not abs(value) == value:
raise TypeError(self.errmsg_negord % value)
| lgpl-2.1 | 64,732,259,284,972,610 | 30.795918 | 86 | 0.516367 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/operations/_bgp_service_communities_operations.py | 1 | 5134 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class BgpServiceCommunitiesOperations(object):
"""BgpServiceCommunitiesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.BgpServiceCommunityListResult"]
"""Gets all the available bgp service communities.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BgpServiceCommunityListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.BgpServiceCommunityListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpServiceCommunityListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('BgpServiceCommunityListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/bgpServiceCommunities'} # type: ignore
| mit | 470,148,117,307,068,350 | 44.433628 | 133 | 0.643942 | false |
Qubad786/pr-code-review | gitcodereview/settings.py | 1 | 3117 | """
Django settings for gitcodereview project.
"""
import os
from os.path import abspath, dirname, join
import dj_database_url
from django.core.urlresolvers import reverse_lazy
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG', True)
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'web',
'web.pullrequest',
'web.user',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'gitcodereview.urls'
# Custom user model
AUTH_USER_MODEL = "user.User"
AUTHENTICATION_BACKENDS = [
'web.user.auth_backend.UserAuthenticationBackend',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gitcodereview.wsgi.application'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
# Login URL
LOGIN_URL = reverse_lazy('index')
# Login Redirect URL
LOGIN_REDIRECT_URL = reverse_lazy('dashboard')
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Secret key used in production secret.
SECRET_KEY = os.environ.get('SECRET_KEY', 'secret_key')
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config()
}
# Github Oauth settings
OAUTH_SETTINGS = {
'CLIENT_ID': os.environ.get('CLIENT_ID', None),
'CLIENT_SECRET': os.environ.get('CLIENT_SECRET', None),
'BASE_URL': os.environ.get('BASE_URL', None),
'ACCESS_TOKEN_URL': os.environ.get('ACCESS_TOKEN_URL', None),
'REDIRECT_URL': os.environ.get('REDIRECT_URL', None),
}
# Use developer's overrides if environment variables are not set.
if os.path.isfile(join(dirname(abspath(__file__)), 'private.py')):
from private import *
| mit | 6,610,438,276,086,385,000 | 25.87069 | 71 | 0.688803 | false |
BadrYoubiIdrissi/TIPE-Algorithme-Genetique | Source/NEAT/test.py | 1 | 2640 | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 12 11:36:14 2016
@author: Badr Youbi Idrissi
"""
import pygame
import pygame.gfxdraw
import numpy as np
from pygame.locals import *
from individu import Individu
from phenotype import Phenotype
from population import Population
from datadisplay import DataDisplay
import utilitaires as ut
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
pygame.init()
screen = pygame.display.set_mode((860, 600), DOUBLEBUF and RESIZABLE)
pygame.display.set_caption("Test")
f = pygame.font.SysFont(pygame.font.get_default_font(), 20)
clock = pygame.time.Clock()
nb_e = 3
nb_s = 1
pop = Population(10, nb_e, nb_s)
pop.generer()
status = DataDisplay((0,0), padding = 20)
status.add("FPS", lambda : clock.get_fps())
status.add("Current generation", lambda : pop.generationCount)
status.add("Number of species", lambda : len(pop.especes))
status.add("Best fitness", pop.getBestFitness)
status.add("Best shared fitness", pop.getBestSharedFitness)
status.add("Average fitness", lambda : pop.averageFitness)
evol = False
while True:
clock.tick()
screen.fill((255,255,255))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
exit()
elif event.type == KEYDOWN and event.key == K_UP:
nbPoints = 100
X,Y = np.meshgrid(np.linspace(0,1,nbPoints),np.linspace(0,1,nbPoints))
Z = np.zeros((nbPoints,nbPoints))
for i in range(nbPoints):
for j in range(nbPoints):
pop.best[-1].phenotype.evaluate(ut.entree('1;'+str(X[i,j])+';'+str(Y[i,j])))
Z[i,j] = pop.best[-1].output()
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(X, Y, Z)
plt.show()
elif event.type == KEYDOWN and event.key == K_DOWN:
l = [pop.contenu[i].fitness for i in range(pop.length)]
l2 = [pop.contenu[i].sharedFitness for i in range(pop.length)]
plt.plot(range(pop.length), l)
plt.plot(range(pop.length), l2)
plt.show()
elif event.type == KEYDOWN and event.key == K_e:
evol = not(evol)
elif event.type == VIDEORESIZE:
pygame.display.set_mode((event.w, event.h), DOUBLEBUF and RESIZABLE)
if evol:
pop.evoluer()
if (pop.generationCount % 10 == 0):
pop.updateBest()
pop.draw(status.police)
status.draw()
pygame.display.flip()
| gpl-3.0 | 4,906,027,643,944,131,000 | 29.697674 | 96 | 0.595455 | false |
tu-darmstadt-ros-pkg/hector_flexbe_behavior | behaviors/behavior_pathdrivemission/src/behavior_pathdrivemission/pathdrivemission_sm.py | 1 | 2883 | #!/usr/bin/env python
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
import roslib; roslib.load_manifest('behavior_pathdrivemission')
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger
from hector_flexbe_states.create_path import CreatePath
from hector_flexbe_states.invert_path import InvertPath
from hector_flexbe_states.move_along_path import MoveAlongPath
from hector_flexbe_states.sparse_path import SparsePath
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
from geometry_msgs.msg import PoseStamped
# [/MANUAL_IMPORT]
'''
Created on Thu Jun 02 2016
@author: Gabriel, Elisa
'''
class PathDriveMissionSM(Behavior):
'''
Robot moves along a given path
'''
def __init__(self):
super(PathDriveMissionSM, self).__init__()
self.name = 'PathDriveMission'
# parameters of this behavior
# references to used behaviors
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
# [/MANUAL_INIT]
# Behavior comments:
def create(self):
# x:52 y:481, x:179 y:505
_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'])
_state_machine.userdata.speed = 0.2
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
# [/MANUAL_CREATE]
with _state_machine:
# x:169 y:61
OperatableStateMachine.add('Create_Path',
CreatePath(),
transitions={'succeeded': 'Invert_Path', 'retry': 'Create_Path'},
autonomy={'succeeded': Autonomy.Off, 'retry': Autonomy.Off},
remapping={'path': 'path'})
# x:309 y:56
OperatableStateMachine.add('Invert_Path',
InvertPath(),
transitions={'reached': 'Sparse_Path', 'failed': 'failed'},
autonomy={'reached': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'path': 'path'})
# x:670 y:162
OperatableStateMachine.add('Move_Along_Path',
MoveAlongPath(),
transitions={'reached': 'finished', 'failed': 'failed'},
autonomy={'reached': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'path': 'path', 'speed': 'speed'})
# x:482 y:64
OperatableStateMachine.add('Sparse_Path',
SparsePath(max_dist=.2, max_angle=.2, min_dist=.1),
transitions={'done': 'Move_Along_Path'},
autonomy={'done': Autonomy.Off},
remapping={'path': 'path'})
return _state_machine
# Private functions can be added inside the following tags
# [MANUAL_FUNC]
# [/MANUAL_FUNC]
| bsd-3-clause | -1,531,267,439,377,354,000 | 29.347368 | 115 | 0.623656 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global_/hellos/__init__.py | 1 | 18060 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
class hellos(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/global/hellos. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Top level container for RSVP hello parameters
"""
__slots__ = ("_path_helper", "_extmethods", "__config", "__state")
_yang_name = "hellos"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"signaling-protocols",
"rsvp-te",
"global",
"hellos",
]
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container)
YANG Description: Configuration parameters relating to RSVP
hellos
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to RSVP
hellos
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container)
YANG Description: State information associated with RSVP hellos
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information associated with RSVP hellos
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([("config", config), ("state", state)])
from . import config
from . import state
class hellos(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/global/hellos. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Top level container for RSVP hello parameters
"""
__slots__ = ("_path_helper", "_extmethods", "__config", "__state")
_yang_name = "hellos"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"signaling-protocols",
"rsvp-te",
"global",
"hellos",
]
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container)
YANG Description: Configuration parameters relating to RSVP
hellos
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to RSVP
hellos
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container)
YANG Description: State information associated with RSVP hellos
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information associated with RSVP hellos
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([("config", config), ("state", state)])
| apache-2.0 | 4,378,248,771,271,248,000 | 37.181818 | 377 | 0.577187 | false |
GodotNativeTools/godot-cpp | binding_generator.py | 1 | 27665 | #!/usr/bin/env python
import json
# comment.
# Convenience function for using template get_node
def correct_method_name(method_list):
for method in method_list:
if method["name"] == "get_node":
method["name"] = "get_node_internal"
classes = []
def generate_bindings(path, use_template_get_node):
global classes
classes = json.load(open(path))
icalls = set()
for c in classes:
# print c['name']
used_classes = get_used_classes(c)
if use_template_get_node and c["name"] == "Node":
correct_method_name(c["methods"])
header = generate_class_header(used_classes, c, use_template_get_node)
impl = generate_class_implementation(icalls, used_classes, c, use_template_get_node)
header_file = open("include/gen/" + strip_name(c["name"]) + ".hpp", "w+")
header_file.write(header)
source_file = open("src/gen/" + strip_name(c["name"]) + ".cpp", "w+")
source_file.write(impl)
icall_header_file = open("include/gen/__icalls.hpp", "w+")
icall_header_file.write(generate_icall_header(icalls))
register_types_file = open("src/gen/__register_types.cpp", "w+")
register_types_file.write(generate_type_registry(classes))
init_method_bindings_file = open("src/gen/__init_method_bindings.cpp", "w+")
init_method_bindings_file.write(generate_init_method_bindings(classes))
def is_reference_type(t):
for c in classes:
if c['name'] != t:
continue
if c['is_reference']:
return True
return False
def make_gdnative_type(t, ref_allowed):
if is_enum(t):
return remove_enum_prefix(t) + " "
elif is_class_type(t):
if is_reference_type(t) and ref_allowed:
return "Ref<" + strip_name(t) + "> "
else:
return strip_name(t) + " *"
else:
if t == "int":
return "int64_t "
if t == "float" or t == "real":
return "real_t "
return strip_name(t) + " "
def generate_class_header(used_classes, c, use_template_get_node):
source = []
source.append("#ifndef GODOT_CPP_" + strip_name(c["name"]).upper() + "_HPP")
source.append("#define GODOT_CPP_" + strip_name(c["name"]).upper() + "_HPP")
source.append("")
source.append("")
source.append("#include <gdnative_api_struct.gen.h>")
source.append("#include <stdint.h>")
source.append("")
source.append("#include <core/CoreTypes.hpp>")
class_name = strip_name(c["name"])
# Ref<T> is not included in object.h in Godot either,
# so don't include it here because it's not needed
if class_name != "Object" and class_name != "Reference":
source.append("#include <core/Ref.hpp>")
ref_allowed = True
else:
source.append("#include <core/TagDB.hpp>")
ref_allowed = False
included = []
for used_class in used_classes:
if is_enum(used_class) and is_nested_type(used_class):
used_class_name = remove_enum_prefix(extract_nested_type(used_class))
if used_class_name not in included:
included.append(used_class_name)
source.append("#include \"" + used_class_name + ".hpp\"")
elif is_enum(used_class) and is_nested_type(used_class) and not is_nested_type(used_class, class_name):
used_class_name = remove_enum_prefix(used_class)
if used_class_name not in included:
included.append(used_class_name)
source.append("#include \"" + used_class_name + ".hpp\"")
source.append("")
if c["base_class"] != "":
source.append("#include \"" + strip_name(c["base_class"]) + ".hpp\"")
source.append("namespace godot {")
source.append("")
for used_type in used_classes:
if is_enum(used_type) or is_nested_type(used_type, class_name):
continue
else:
source.append("class " + strip_name(used_type) + ";")
source.append("")
vararg_templates = ""
# generate the class definition here
source.append("class " + class_name + (" : public _Wrapped" if c["base_class"] == "" else (" : public " + strip_name(c["base_class"])) ) + " {")
if c["base_class"] == "":
source.append("public: enum { ___CLASS_IS_SCRIPT = 0, };")
source.append("")
source.append("private:")
if c["singleton"]:
source.append("\tstatic " + class_name + " *_singleton;")
source.append("")
source.append("\t" + class_name + "();")
source.append("")
# Generate method table
source.append("\tstruct ___method_bindings {")
for method in c["methods"]:
source.append("\t\tgodot_method_bind *mb_" + method["name"] + ";")
source.append("\t};")
source.append("\tstatic ___method_bindings ___mb;")
source.append("\tstatic void *_detail_class_tag;")
source.append("")
source.append("public:")
source.append("\tstatic void ___init_method_bindings();")
# class id from core engine for casting
source.append("\tinline static size_t ___get_id() { return (size_t)_detail_class_tag; }")
source.append("")
if c["singleton"]:
source.append("\tstatic inline " + class_name + " *get_singleton()")
source.append("\t{")
source.append("\t\tif (!" + class_name + "::_singleton) {")
source.append("\t\t\t" + class_name + "::_singleton = new " + class_name + ";")
source.append("\t\t}")
source.append("\t\treturn " + class_name + "::_singleton;")
source.append("\t}")
source.append("")
# godot::api->godot_global_get_singleton((char *) \"" + strip_name(c["name"]) + "\");"
# ___get_class_name
source.append("\tstatic inline const char *___get_class_name() { return (const char *) \"" + strip_name(c["name"]) + "\"; }")
source.append("\tstatic inline Object *___get_from_variant(Variant a) { godot_object *o = (godot_object*) a; return (o) ? (Object *) godot::nativescript_1_1_api->godot_nativescript_get_instance_binding_data(godot::_RegisterState::language_index, o) : nullptr; }")
enum_values = []
source.append("\n\t// enums")
for enum in c["enums"]:
source.append("\tenum " + strip_name(enum["name"]) + " {")
for value in enum["values"]:
source.append("\t\t" + remove_nested_type_prefix(value) + " = " + str(enum["values"][value]) + ",")
enum_values.append(value)
source.append("\t};")
source.append("\n\t// constants")
for name in c["constants"]:
if name not in enum_values:
source.append("\tconst static int " + name + " = " + str(c["constants"][name]) + ";")
if c["instanciable"]:
source.append("")
source.append("")
source.append("\tstatic " + class_name + " *_new();")
source.append("\n\t// methods")
if class_name == "Object":
source.append("#ifndef GODOT_CPP_NO_OBJECT_CAST")
source.append("\ttemplate<class T>")
source.append("\tstatic T *cast_to(const Object *obj);")
source.append("#endif")
source.append("")
for method in c["methods"]:
method_signature = ""
# TODO decide what to do about virtual methods
# method_signature += "virtual " if method["is_virtual"] else ""
method_signature += make_gdnative_type(method["return_type"], ref_allowed)
method_name = escape_cpp(method["name"])
method_signature += method_name + "("
has_default_argument = False
method_arguments = ""
for i, argument in enumerate(method["arguments"]):
method_signature += "const " + make_gdnative_type(argument["type"], ref_allowed)
argument_name = escape_cpp(argument["name"])
method_signature += argument_name
method_arguments += argument_name
# default arguments
def escape_default_arg(_type, default_value):
if _type == "Color":
return "Color(" + default_value + ")"
if _type == "bool" or _type == "int":
return default_value.lower()
if _type == "Array":
return "Array()"
if _type in ["PoolVector2Array", "PoolStringArray", "PoolVector3Array", "PoolColorArray", "PoolIntArray", "PoolRealArray"]:
return _type + "()"
if _type == "Vector2":
return "Vector2" + default_value
if _type == "Vector3":
return "Vector3" + default_value
if _type == "Transform":
return "Transform()"
if _type == "Transform2D":
return "Transform2D()"
if _type == "Rect2":
return "Rect2" + default_value
if _type == "Variant":
return "Variant()" if default_value == "Null" else default_value
if _type == "String":
return "\"" + default_value + "\""
if _type == "RID":
return "RID()"
if default_value == "Null" or default_value == "[Object:null]":
return "nullptr"
return default_value
if argument["has_default_value"] or has_default_argument:
method_signature += " = " + escape_default_arg(argument["type"], argument["default_value"])
has_default_argument = True
if i != len(method["arguments"]) - 1:
method_signature += ", "
method_arguments += ","
if method["has_varargs"]:
if len(method["arguments"]) > 0:
method_signature += ", "
method_arguments += ", "
vararg_templates += "\ttemplate <class... Args> " + method_signature + "Args... args){\n\t\treturn " + method_name + "(" + method_arguments + "Array::make(args...));\n\t}\n"""
method_signature += "const Array& __var_args = Array()"
method_signature += ")" + (" const" if method["is_const"] else "")
source.append("\t" + method_signature + ";")
source.append(vararg_templates)
if use_template_get_node and class_name == "Node":
# Extra definition for template get_node that calls the renamed get_node_internal; has a default template parameter for backwards compatibility.
source.append("\ttemplate <class T = Node>")
source.append("\tT *get_node(const NodePath path) const {")
source.append("\t\treturn Object::cast_to<T>(get_node_internal(path));")
source.append("\t}")
source.append("};")
source.append("")
# ...And a specialized version so we don't unnecessarily cast when using the default.
source.append("template <>")
source.append("inline Node *Node::get_node<Node>(const NodePath path) const {")
source.append("\treturn get_node_internal(path);")
source.append("}")
source.append("")
else:
source.append("};")
source.append("")
source.append("}")
source.append("")
source.append("#endif")
return "\n".join(source)
def generate_class_implementation(icalls, used_classes, c, use_template_get_node):
class_name = strip_name(c["name"])
ref_allowed = class_name != "Object" and class_name != "Reference"
source = []
source.append("#include \"" + class_name + ".hpp\"")
source.append("")
source.append("")
source.append("#include <core/GodotGlobal.hpp>")
source.append("#include <core/CoreTypes.hpp>")
source.append("#include <core/Ref.hpp>")
source.append("#include <core/Godot.hpp>")
source.append("")
source.append("#include \"__icalls.hpp\"")
source.append("")
source.append("")
for used_class in used_classes:
if is_enum(used_class):
continue
else:
source.append("#include \"" + strip_name(used_class) + ".hpp\"")
source.append("")
source.append("")
source.append("namespace godot {")
core_object_name = "this"
source.append("")
source.append("")
if c["singleton"]:
source.append("" + class_name + " *" + class_name + "::_singleton = NULL;")
source.append("")
source.append("")
# FIXME Test if inlining has a huge impact on binary size
source.append(class_name + "::" + class_name + "() {")
source.append("\t_owner = godot::api->godot_global_get_singleton((char *) \"" + strip_name(c["name"]) + "\");")
source.append("}")
source.append("")
source.append("")
# Method table initialization
source.append(class_name + "::___method_bindings " + class_name + "::___mb = {};")
source.append("")
source.append("void *" + class_name + "::_detail_class_tag = nullptr;")
source.append("")
source.append("void " + class_name + "::___init_method_bindings() {")
for method in c["methods"]:
source.append("\t___mb.mb_" + method["name"] + " = godot::api->godot_method_bind_get_method(\"" + c["name"] + "\", \"" + ("get_node" if use_template_get_node and method["name"] == "get_node_internal" else method["name"]) + "\");")
source.append("\tgodot_string_name class_name;")
source.append("\tgodot::api->godot_string_name_new_data(&class_name, \"" + c["name"] + "\");")
source.append("\t_detail_class_tag = godot::core_1_2_api->godot_get_class_tag(&class_name);")
source.append("\tgodot::api->godot_string_name_destroy(&class_name);")
source.append("}")
source.append("")
if c["instanciable"]:
source.append(class_name + " *" + strip_name(c["name"]) + "::_new()")
source.append("{")
source.append("\treturn (" + class_name + " *) godot::nativescript_1_1_api->godot_nativescript_get_instance_binding_data(godot::_RegisterState::language_index, godot::api->godot_get_class_constructor((char *)\"" + c["name"] + "\")());")
source.append("}")
for method in c["methods"]:
method_signature = ""
method_signature += make_gdnative_type(method["return_type"], ref_allowed)
method_signature += strip_name(c["name"]) + "::" + escape_cpp(method["name"]) + "("
for i, argument in enumerate(method["arguments"]):
method_signature += "const " + make_gdnative_type(argument["type"], ref_allowed)
method_signature += escape_cpp(argument["name"])
if i != len(method["arguments"]) - 1:
method_signature += ", "
if method["has_varargs"]:
if len(method["arguments"]) > 0:
method_signature += ", "
method_signature += "const Array& __var_args"
method_signature += ")" + (" const" if method["is_const"] else "")
source.append(method_signature + " {")
if method["name"] == "free":
# dirty hack because Object::free is marked virtual but doesn't actually exist...
source.append("\tgodot::api->godot_object_destroy(_owner);")
source.append("}")
source.append("")
continue
return_statement = ""
return_type_is_ref = is_reference_type(method["return_type"]) and ref_allowed
if method["return_type"] != "void":
if is_class_type(method["return_type"]):
if is_enum(method["return_type"]):
return_statement += "return (" + remove_enum_prefix(method["return_type"]) + ") "
elif return_type_is_ref:
return_statement += "return Ref<" + strip_name(method["return_type"]) + ">::__internal_constructor(";
else:
return_statement += "return " + ("(" + strip_name(method["return_type"]) + " *) " if is_class_type(method["return_type"]) else "")
else:
return_statement += "return "
def get_icall_type_name(name):
if is_enum(name):
return "int"
if is_class_type(name):
return "Object"
return name
if method["has_varargs"]:
if len(method["arguments"]) != 0:
source.append("\tVariant __given_args[" + str(len(method["arguments"])) + "];")
for i, argument in enumerate(method["arguments"]):
source.append("\tgodot::api->godot_variant_new_nil((godot_variant *) &__given_args[" + str(i) + "]);")
source.append("")
for i, argument in enumerate(method["arguments"]):
source.append("\t__given_args[" + str(i) + "] = " + escape_cpp(argument["name"]) + ";")
source.append("")
size = ""
if method["has_varargs"]:
size = "(__var_args.size() + " + str(len(method["arguments"])) + ")"
else:
size = "(" + str(len(method["arguments"])) + ")"
source.append("\tgodot_variant **__args = (godot_variant **) alloca(sizeof(godot_variant *) * " + size + ");")
source.append("")
for i, argument in enumerate(method["arguments"]):
source.append("\t__args[" + str(i) + "] = (godot_variant *) &__given_args[" + str(i) + "];")
source.append("")
if method["has_varargs"]:
source.append("\tfor (int i = 0; i < __var_args.size(); i++) {")
source.append("\t\t__args[i + " + str(len(method["arguments"])) + "] = (godot_variant *) &((Array &) __var_args)[i];")
source.append("\t}")
source.append("")
source.append("\tVariant __result;")
source.append("\t*(godot_variant *) &__result = godot::api->godot_method_bind_call(___mb.mb_" + method["name"] + ", ((const Object *) " + core_object_name + ")->_owner, (const godot_variant **) __args, " + size + ", nullptr);")
source.append("")
if is_class_type(method["return_type"]):
source.append("\tObject *obj = Object::___get_from_variant(__result);")
source.append("\tif (obj->has_method(\"reference\"))")
source.append("\t\tobj->callv(\"reference\", Array());")
source.append("")
for i, argument in enumerate(method["arguments"]):
source.append("\tgodot::api->godot_variant_destroy((godot_variant *) &__given_args[" + str(i) + "]);")
source.append("")
if method["return_type"] != "void":
cast = ""
if is_class_type(method["return_type"]):
if return_type_is_ref:
cast += "Ref<" + strip_name(method["return_type"]) + ">::__internal_constructor(__result);"
else:
cast += "(" + strip_name(method["return_type"]) + " *) " + strip_name(method["return_type"] + "::___get_from_variant(") + "__result);"
else:
cast += "__result;"
source.append("\treturn " + cast)
else:
args = []
for arg in method["arguments"]:
args.append(get_icall_type_name(arg["type"]))
icall_ret_type = get_icall_type_name(method["return_type"])
icall_sig = tuple((icall_ret_type, tuple(args)))
icalls.add(icall_sig)
icall_name = get_icall_name(icall_sig)
return_statement += icall_name + "(___mb.mb_" + method["name"] + ", (const Object *) " + core_object_name
for arg in method["arguments"]:
arg_is_ref = is_reference_type(arg["type"]) and ref_allowed
return_statement += ", " + escape_cpp(arg["name"]) + (".ptr()" if arg_is_ref else "")
return_statement += ")"
if return_type_is_ref:
return_statement += ")"
source.append("\t" + return_statement + ";")
source.append("}")
source.append("")
source.append("}")
return "\n".join(source)
def generate_icall_header(icalls):
source = []
source.append("#ifndef GODOT_CPP__ICALLS_HPP")
source.append("#define GODOT_CPP__ICALLS_HPP")
source.append("")
source.append("#include <gdnative_api_struct.gen.h>")
source.append("#include <stdint.h>")
source.append("")
source.append("#include <core/GodotGlobal.hpp>")
source.append("#include <core/CoreTypes.hpp>")
source.append("#include \"Object.hpp\"")
source.append("")
source.append("")
source.append("namespace godot {")
source.append("")
for icall in icalls:
ret_type = icall[0]
args = icall[1]
method_signature = "static inline "
method_signature += get_icall_return_type(ret_type) + get_icall_name(icall) + "(godot_method_bind *mb, const Object *inst"
for i, arg in enumerate(args):
method_signature += ", const "
if is_core_type(arg):
method_signature += arg + "&"
elif arg == "int":
method_signature += "int64_t "
elif arg == "float":
method_signature += "double "
elif is_primitive(arg):
method_signature += arg + " "
else:
method_signature += "Object *"
method_signature += "arg" + str(i)
method_signature += ")"
source.append(method_signature + " {")
if ret_type != "void":
source.append("\t" + ("godot_object *" if is_class_type(ret_type) else get_icall_return_type(ret_type)) + "ret;")
if is_class_type(ret_type):
source.append("\tret = nullptr;")
source.append("\tconst void *args[" + ("1" if len(args) == 0 else "") + "] = {")
for i, arg in enumerate(args):
wrapped_argument = "\t\t"
if is_primitive(arg) or is_core_type(arg):
wrapped_argument += "(void *) &arg" + str(i)
else:
wrapped_argument += "(void *) (arg" + str(i) + ") ? arg" + str(i) + "->_owner : nullptr"
wrapped_argument += ","
source.append(wrapped_argument)
source.append("\t};")
source.append("")
source.append("\tgodot::api->godot_method_bind_ptrcall(mb, inst->_owner, args, " + ("nullptr" if ret_type == "void" else "&ret") + ");")
if ret_type != "void":
if is_class_type(ret_type):
source.append("\tif (ret) {")
source.append("\t\treturn (Object *) godot::nativescript_1_1_api->godot_nativescript_get_instance_binding_data(godot::_RegisterState::language_index, ret);")
source.append("\t}")
source.append("")
source.append("\treturn (Object *) ret;")
else:
source.append("\treturn ret;")
source.append("}")
source.append("")
source.append("}")
source.append("")
source.append("#endif")
return "\n".join(source)
def generate_type_registry(classes):
source = []
source.append("#include \"TagDB.hpp\"")
source.append("#include <typeinfo>")
source.append("\n")
for c in classes:
source.append("#include <" + strip_name(c["name"]) + ".hpp>")
source.append("")
source.append("")
source.append("namespace godot {")
source.append("void ___register_types()")
source.append("{")
for c in classes:
class_name = strip_name(c["name"])
base_class_name = strip_name(c["base_class"])
class_type_hash = "typeid(" + class_name + ").hash_code()"
base_class_type_hash = "typeid(" + base_class_name + ").hash_code()"
if base_class_name == "":
base_class_type_hash = "0"
source.append("\tgodot::_TagDB::register_global_type(\"" + c["name"] + "\", " + class_type_hash + ", " + base_class_type_hash + ");")
source.append("}")
source.append("")
source.append("}")
return "\n".join(source)
def generate_init_method_bindings(classes):
source = []
for c in classes:
source.append("#include <" + strip_name(c["name"]) + ".hpp>")
source.append("")
source.append("")
source.append("namespace godot {")
source.append("void ___init_method_bindings()")
source.append("{")
for c in classes:
class_name = strip_name(c["name"])
source.append("\t" + strip_name(c["name"]) + "::___init_method_bindings();")
source.append("}")
source.append("")
source.append("}")
return "\n".join(source)
def get_icall_return_type(t):
if is_class_type(t):
return "Object *"
if t == "int":
return "int64_t "
if t == "float" or t == "real":
return "double "
return t + " "
def get_icall_name(sig):
ret_type = sig[0]
args = sig[1]
name = "___godot_icall_"
name += strip_name(ret_type)
for arg in args:
name += "_" + strip_name(arg)
return name
def get_used_classes(c):
classes = []
for method in c["methods"]:
if is_class_type(method["return_type"]) and not (method["return_type"] in classes):
classes.append(method["return_type"])
for arg in method["arguments"]:
if is_class_type(arg["type"]) and not (arg["type"] in classes):
classes.append(arg["type"])
return classes
def strip_name(name):
if len(name) == 0:
return name
if name[0] == '_':
return name[1:]
return name
def extract_nested_type(nested_type):
return strip_name(nested_type[:nested_type.find("::")])
def remove_nested_type_prefix(name):
return name if name.find("::") == -1 else strip_name(name[name.find("::") + 2:])
def remove_enum_prefix(name):
return strip_name(name[name.find("enum.") + 5:])
def is_nested_type(name, type = ""):
return name.find(type + "::") != -1
def is_enum(name):
return name.find("enum.") == 0
def is_class_type(name):
return not is_core_type(name) and not is_primitive(name)
def is_core_type(name):
core_types = ["Array",
"Basis",
"Color",
"Dictionary",
"Error",
"NodePath",
"Plane",
"PoolByteArray",
"PoolIntArray",
"PoolRealArray",
"PoolStringArray",
"PoolVector2Array",
"PoolVector3Array",
"PoolColorArray",
"PoolIntArray",
"PoolRealArray",
"Quat",
"Rect2",
"AABB",
"RID",
"String",
"Transform",
"Transform2D",
"Variant",
"Vector2",
"Vector3"]
return name in core_types
def is_primitive(name):
core_types = ["int", "bool", "real", "float", "void"]
return name in core_types
def escape_cpp(name):
escapes = {
"class": "_class",
"char": "_char",
"short": "_short",
"bool": "_bool",
"int": "_int",
"default": "_default",
"case": "_case",
"switch": "_switch",
"export": "_export",
"template": "_template",
"new": "new_",
"operator": "_operator",
"typename": "_typename"
}
if name in escapes:
return escapes[name]
return name
| mit | 4,136,963,166,776,560,600 | 31.547059 | 267 | 0.53396 | false |
oliverodaa/cs184-final-proj | dwinelle/video/gen_3d.py | 1 | 1238 | #!/usr/bin/env python3
# This file is part of dwinelle-tools.
# dwinelle-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# dwinelle-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>.
# This can be used to generate data3d.js for the web frontend.
import utils
edge_lengths = utils.load_edge_lengths()
print('var el = {{{}}};'.format(','.join('"{} {}":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items())))
print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items())))
print('var eh = {{{}}};'.format(','.join('"{} {}":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for k, v in utils.load_edge_heights().items())))
| mit | 3,032,383,156,941,196,300 | 48.52 | 159 | 0.667205 | false |
iwhiz/Algorist | fibonacci_dp.py | 1 | 1163 | # This function uses the dynamic programming concept, where it stores previously computed values rather
# computing it every time like in normal recursion.
import time
# This version of fibonacci uses dynamic programming concept.
# First check the normal fibonacci.py for better understanding
def fibo_dp(n: int): # pass 'n' to this function and it should be of type int
memo = {} # create an empty dictionary to hold the already calculated items
if n < len(memo): # if 'n' is less than the length of memo, then it already has that value stored in it
return memo.get(n) # then return that value
else:
if n <= 2:
f = 1 # if 'n' is less than 2 then the value of f is 1
else:
f = fibo_dp(n - 1) + fibo_dp(n - 2) # otherwise call the recursion
memo[n] = f # store it in memo for future use
return f # return the value to the calling function
if __name__ == "__main__":
start_time = time.clock() # starting the timer
print(fibo_dp(15)) # calling the function here to find 700th number in fibonnaci series.
print(time.clock() - start_time) # print the finish time
| mit | -8,367,374,644,856,106,000 | 45.52 | 108 | 0.66466 | false |
openweave/openweave-core | src/test-apps/happy/tests/standalone/wdmNext/test_weave_wdm_next_one_way_subscribe_05.py | 1 | 3250 | #!/usr/bin/env python3
#
# Copyright (c) 2016-2017 Nest Labs, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# Calls Weave WDM one way subscribe between nodes.
# C03: One way Subscribe: Root path. Null Version. Mutate data in Publisher. Client cancels
# L05: Stress One way Subscribe: Root path. Null Version. Mutate data in Publisher. Client cancels
#
from __future__ import absolute_import
from __future__ import print_function
import unittest
import set_test_path
from weave_wdm_next_test_base import weave_wdm_next_test_base
import WeaveUtilities
class test_weave_wdm_next_one_way_subscribe_05(weave_wdm_next_test_base):
def test_weave_wdm_next_one_way_subscribe_05(self):
wdm_next_args = {}
wdm_next_args['wdm_option'] = "one_way_subscribe"
wdm_next_args['total_client_count'] = 2
wdm_next_args['final_client_status'] = 0
wdm_next_args['timer_client_period'] = 5000
wdm_next_args['test_client_iterations'] = 1
wdm_next_args['test_client_delay'] = 2000
wdm_next_args['enable_client_flip'] = 0
wdm_next_args['total_server_count'] = 2
wdm_next_args['final_server_status'] = 4
wdm_next_args['timer_server_period'] = 4000
wdm_next_args['enable_server_flip'] = 1
wdm_next_args['client_clear_state_between_iterations'] = True
wdm_next_args['server_clear_state_between_iterations'] = True
wdm_next_args['client_log_check'] = [('Client\[0\] \[(ALIVE|CONFM)\] EndSubscription Ref\(\d+\)', wdm_next_args['test_client_iterations'] * 1),
('Client->kEvent_OnNotificationProcessed', wdm_next_args['test_client_iterations'] * (wdm_next_args['total_server_count'] + 1)),
('Client\[0\] moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations'] * 1)]
wdm_next_args['server_log_check'] = [('Handler\[0\] \[(ALIVE|CONFM)\] CancelRequestHandler Ref\(\d+\)', wdm_next_args['test_client_iterations'] * 1),
('Handler\[0\] Moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations'] * 1)]
wdm_next_args['test_tag'] = self.__class__.__name__[19:].upper()
wdm_next_args['test_case_name'] = ['L05: Stress One way Subscribe: Root path, Null Version. Mutate data in Publisher. Client cancels']
print('test file: ' + self.__class__.__name__)
print("weave-wdm-next test C03 and L05")
super(test_weave_wdm_next_one_way_subscribe_05, self).weave_wdm_next_test_base(wdm_next_args)
if __name__ == "__main__":
WeaveUtilities.run_unittest()
| apache-2.0 | -7,014,199,987,476,152,000 | 44.138889 | 173 | 0.635385 | false |
tedye/leetcode | Python/leetcode.037.sudoku-solver.py | 1 | 2045 | class Solution(object):
def solveSudoku(self, board):
"""
:type board: List[List[str]]
:rtype: void Do not return anything, modify board in-place instead.
"""
hset = [{'1','2','3','4','5','6','7','8','9'} for _ in range(9)]
vset = [{'1','2','3','4','5','6','7','8','9'} for _ in range(9)]
boxset = [{'1','2','3','4','5','6','7','8','9'}for _ in range(9)]
temp = self.solver(board,hset,vset,boxset)
board[:] = temp[:]
def solver(self, board, h, v, b):
q = []
for i in range(9):
for j in range(9):
if board[i][j] == '.':
q.append([(i,j), h[i], v[j], b[(i//3) * 3 + j // 3]])
else:
num = board[i][j]
h[i] -= {num}
v[j] -= {num}
b[(i//3) * 3 + j // 3] -= {num}
while q:
q.sort(key = lambda x: len(x[1] & x[2] & x[3]))
cur = q.pop(0)
avail = cur[1]&cur[2]&cur[3]
i = cur[0][0]
j = cur[0][1]
if len(avail) == 0:
return []
elif len(avail) == 1:
num = avail.pop()
h[i] -= {num}
v[j] -= {num}
b[(i//3) * 3 + j // 3] -= {num}
board[i][j] = num
else:
l = len(avail)
for k in range(l):
num = avail.pop()
h[i] -= {num}
v[j] -= {num}
b[(i//3) * 3 + j // 3] -= {num}
board[i][j] = num
temp = self.solver([x[:] for x in board], [set(a) for a in h], [set(a) for a in v], [set(a) for a in b])
if temp:
return temp
board[i][j] = '.'
h[i].add(num)
v[j].add(num)
b[(i//3) * 3 + j // 3].add(num)
return []
return board | mit | -8,874,471,319,951,807,000 | 36.888889 | 124 | 0.326161 | false |
opennode/nodeconductor-paas-oracle | src/nodeconductor_paas_oracle/extension.py | 1 | 3675 | from nodeconductor.core import NodeConductorExtension
class OracleExtension(NodeConductorExtension):
class Settings:
ORACLE_TICKET_TEMPLATES = {
'provision': {
'summary': "Request for a new Oracle instance",
'details': """
Oracle DB purchase details
Customer name: {customer.name}
Project name: {project.project_group.name}
Environment name: {project.name}
Customer UUID: {customer.uuid.hex}
Project UUID: {project.project_group.uuid.hex}
Environment UUID: {project.uuid.hex}
OpenStack tenant id: {deployment.tenant.backend_id}
Hardware Configuration:
Name: {deployment.name}
Flavor: {deployment.flavor_info}
SSH key: {ssh_key.name}
SSH key UUID: {ssh_key.uuid.hex}
Oracle DB Configuration:
Name: {deployment.db_name}
Size: {deployment.db_size} GB / {deployment.db_arch_size} GB
Version: {deployment.db_version_type}
Database type: {deployment.db_template}
Character set: {deployment.db_charset}
Additional data: {deployment.user_data}
""",
},
'undeploy': {
'summary': "Request for removing Oracle DB PaaS instance",
'details': """
Customer name: {customer.name}
Project name: {project.project_group.name}
Environment name: {project.name}
Customer UUID: {customer.uuid.hex}
Project UUID: {project.project_group.uuid.hex}
Environment UUID: {project.uuid.hex}
Oracle DB details:
Name: {deployment.name}
UUID: {deployment.uuid.hex}
""",
},
'resize': {
'summary': "Request for resizing Oracle DB PaaS instance",
'details': """
Customer name: {customer.name}
Project name: {project.project_group.name}
Environment name: {project.name}
Customer UUID: {customer.uuid.hex}
Project UUID: {project.project_group.uuid.hex}
Environment UUID: {project.uuid.hex}
Oracle DB details:
Name: {deployment.name}
UUID: {deployment.uuid.hex}
Hardware Configuration:
Flavor: {deployment.flavor_info}
""",
},
'support': {
'summary': "Custom support request",
'details': """
Customer name: {customer.name}
Project name: {project.project_group.name}
Environment name: {project.name}
Customer UUID: {customer.uuid.hex}
Project UUID: {project.project_group.uuid.hex}
Environment UUID: {project.uuid.hex}
Oracle DB details:
Name: {deployment.name}
UUID: {deployment.uuid.hex}
{message}
""",
},
}
@staticmethod
def django_app():
return 'nodeconductor_paas_oracle'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
| mit | 7,087,574,279,063,508,000 | 37.684211 | 80 | 0.477279 | false |
SheffieldML/TVB | likelihoods.py | 1 | 1637 | # Copyright (c) 2014, James Hensman, Max Zwiessele
# Distributed under the terms of the GNU General public License, see LICENSE.txt
import numpy as np
from scipy.special import gamma, digamma
from scipy import stats
class student_t():
def __init__(self):
self._set_params(np.ones(2))
def _set_params(self, p):
self.nu, self.lamb = p
#compute some constants so that they don't appear in a loop
self._pdf_const = gamma((self.nu + 1)/2.) / gamma(self.nu/2.) * np.sqrt(self.lamb/(self.nu*np.pi) )
self._dnu_const = 0.5*digamma((self.nu + 1.)/2.) - 0.5*digamma(self.nu/2.) - 0.5/self.nu
def _get_params(self):
return np.array([self.nu, self.lamb])
def _get_param_names(self):
return ['nu', 'lambda']
def pdf(self, x, Y):
x2 = np.square(x-Y)
return self._pdf_const * np.power(1 + self.lamb*x2/self.nu, -(self.nu + 1.)/2.)
def dlnpdf_dtheta(self, x, Y):
x2 = np.square(x-Y)
dnu = self._dnu_const - 0.5*np.log(1. + self.lamb*x2/self.nu) + 0.5*(self.nu + 1.)*(self.lamb*x2/self.nu**2)/(1. + self.lamb*x2/self.nu)
dlamb = 0.5/self.lamb - 0.5*(self.nu + 1.)*(x2/self.nu/(1.+self.lamb*x2/self.nu))
return np.vstack((dnu, dlamb))
def predictive_values(self, mu, var, percentiles):
if len(percentiles)==0:
return mu, []
samples = (np.random.randn(40e3,*mu.shape) + mu)*np.sqrt(var)
samples = stats.t.rvs(self.nu, loc=samples, scale=np.array(self.lamb).reshape(1,1))
qs = [stats.scoreatpercentile(samples,q,axis=0) for q in percentiles]
return samples.mean(0), qs
| gpl-3.0 | 8,509,867,237,343,463,000 | 45.771429 | 144 | 0.602932 | false |
krux/graphite-web | webapp/tests/test_whitelist.py | 1 | 3982 | import errno
import mock
import os
import pickle
from . import DATA_DIR
from django.conf import settings
from django.core.urlresolvers import reverse
from .base import TestCase
from graphite.whitelist.views import load_whitelist, save_whitelist
class WhitelistTester(TestCase):
settings.WHITELIST_FILE = os.path.join(DATA_DIR, 'lists/whitelist')
def wipe_whitelist(self):
try:
os.remove(settings.WHITELIST_FILE)
except OSError:
pass
def create_whitelist(self):
try:
os.makedirs(settings.WHITELIST_FILE.replace('whitelist', ''))
except OSError:
pass
fh = open(settings.WHITELIST_FILE, 'wb')
pickle.dump({'a.b.c.d', 'e.f.g.h'}, fh)
fh.close()
def test_whitelist_show_no_whitelist(self):
url = reverse('whitelist_show')
with self.assertRaises(IOError):
response = self.client.get(url)
def test_whitelist_show(self):
url = reverse('whitelist_show')
self.create_whitelist()
self.addCleanup(self.wipe_whitelist)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "a.b.c.d\ne.f.g.h")
def test_whitelist_add(self):
self.create_whitelist()
self.addCleanup(self.wipe_whitelist)
url = reverse('whitelist_add')
response = self.client.post(url, {'metrics': ['i.j.k.l']})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "OK")
url = reverse('whitelist_show')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "a.b.c.d\ne.f.g.h\ni.j.k.l")
def test_whitelist_add_existing(self):
self.create_whitelist()
self.addCleanup(self.wipe_whitelist)
url = reverse('whitelist_add')
response = self.client.post(url, {'metrics': ['a.b.c.d']})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "OK")
url = reverse('whitelist_show')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "a.b.c.d\ne.f.g.h")
def test_whitelist_remove(self):
self.create_whitelist()
self.addCleanup(self.wipe_whitelist)
url = reverse('whitelist_remove')
response = self.client.post(url, {'metrics': ['a.b.c.d']})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "OK")
url = reverse('whitelist_show')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "e.f.g.h")
def test_whitelist_remove_missing(self):
self.create_whitelist()
self.addCleanup(self.wipe_whitelist)
url = reverse('whitelist_remove')
response = self.client.post(url, {'metrics': ['i.j.k.l']})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "OK")
url = reverse('whitelist_show')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, "a.b.c.d\ne.f.g.h")
def test_save_whitelist(self):
try:
os.makedirs(settings.WHITELIST_FILE.replace('whitelist', ''))
except OSError:
pass
self.addCleanup(self.wipe_whitelist)
self.assertEqual(save_whitelist({'a.b.c.d','e.f.g.h'}), None)
self.assertEqual(load_whitelist(), {'a.b.c.d','e.f.g.h'})
@mock.patch('os.rename')
def test_save_whitelist_rename_failure(self, rename):
self.addCleanup(self.wipe_whitelist)
rename.side_effect = OSError(errno.EPERM, 'Operation not permitted')
with self.assertRaises(OSError):
save_whitelist({'a.b.c.d','e.f.g.h'})
| apache-2.0 | 5,494,387,643,403,174,000 | 33.626087 | 76 | 0.629332 | false |
bulax41/Commands | scripts/mcast_listen.py | 1 | 2340 | #!/bin/python
import socket
import struct
import sys
import signal
import time
import datetime
import argparse
import threading
class McastSocket(socket.socket):
def __init__(self, local_port='', reuse=False):
socket.socket.__init__(self, socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
if(reuse):
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, "SO_REUSEPORT"):
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,8388608)
self.bind(('', local_port))
def mcast_add(self, addr, iface):
self.setsockopt(
socket.IPPROTO_IP,
socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(addr) + socket.inet_aton(iface))
def signal_handler(signal, frame):
global estop
estop.set()
sys.exit(0)
def join_group(group,args,event):
global count
(mcast_group,mcast_port) = group.split(":")
sock = McastSocket(local_port=int(mcast_port),reuse=1)
sock.mcast_add(mcast_group, args.interface)
stime= datetime.datetime.now()
print "Joining %s:%s at %s" % (mcast_group,mcast_port,stime.strftime("%b %d %Y %X.%f"))
while not event.isSet():
msg,source = sock.recvfrom(1500)
count[group] += 1
print "Exiting Group %s... %s" % (group,datetime.datetime.now().strftime("%b %d %Y %X.%f"))
def main():
parser = argparse.ArgumentParser(description='Subscribe and decode multicast for CME or LMAX')
parser.add_argument('-g', '--group',action="append",required=True,help="Group to join in IP:Port format, may be used more than once")
parser.add_argument('-i','--interface',required=True,help="IP address of the Interface to join on")
parser.add_argument('-q','--quiet',action="count",help="Do not print packet count")
args = parser.parse_args()
global estop, count
count = {}
signal.signal(signal.SIGINT, signal_handler)
estop = threading.Event()
threads = []
for group in args.group:
count[group] = 0
t = threading.Thread(target=join_group, args=(group,args,estop))
threads.append(t)
t.start()
while True:
time.sleep(1)
for c,v in count.items():
print "%s: %s" % (c,v),
print "\r",
if __name__ == '__main__':
main()
| gpl-3.0 | 1,310,441,972,813,679,400 | 30.2 | 137 | 0.641026 | false |
PatrikValkovic/grammpy | tests/transformations_test/RemoveEpsilonRules/Reverse/SimpleTest.py | 1 | 1825 | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 20.08.2017 16:01
:Licence MIT
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.parsers import cyk
from grammpy.transforms import ContextFree, InverseContextFree
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class RuleS0B(Rule): rule = ([S], [0, B])
class RuleA1B(Rule): rule = ([A], [1, B])
class RuleAEps(Rule): rule = ([A], [EPS])
class RuleBEps(Rule): rule = ([B], [EPS])
class RuleB1C(Rule): rule = ([B], [2, C])
class RuleC11(Rule): rule = ([C], [3, 3])
"""
S->1B A->1B A->eps B->eps B->1C C->11
ToEpsilon: A,B
S->1B A->1B A->eps B->eps B->1C C->11 S->1 A->1
------ ------ ++++ ++++
"""
class SimpleTest(TestCase):
def test_simpleTest(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[S, A, B, C],
rules=[RuleS0B, RuleA1B, RuleAEps, RuleBEps, RuleB1C, RuleC11],
start_symbol=S)
gr = ContextFree.transform_to_chomsky_normal_form(ContextFree.remove_unit_rules(ContextFree.remove_rules_with_epsilon(g)))
pars = cyk(gr, [0])
s = InverseContextFree.epsilon_rules_restore(InverseContextFree.unit_rules_restore(InverseContextFree.transform_from_chomsky_normal_form(pars)))
self.assertIsInstance(s, S)
self.assertIsInstance(s.to_rule, RuleS0B)
self.assertIsInstance(s.to_rule.to_symbols[0], Terminal)
self.assertEqual(s.to_rule.to_symbols[0].s, 0)
b = s.to_rule.to_symbols[1]
self.assertIsInstance(b, B)
self.assertIsInstance(b.to_rule, RuleBEps)
self.assertIs(b.to_rule.to_symbols[0].s, EPS)
if __name__ == '__main__':
main()
| mit | 7,512,837,500,692,749,000 | 32.181818 | 152 | 0.614795 | false |
McIntyre-Lab/papers | newman_events_2017/python_workflow/programs/build_intron2border_junction_index.py | 1 | 5945 | #!/usr/bin/env python3
#######################################################################################################################
#
# DATE: 2017-12-15
# NAME: build_Event2Transcript_index.py
# AUTHOR: Jeremy R. B. Newman ([email protected])
#
# DESCRIPTION: This script creates an intron-to-border junction index file used by Event Analysis to report
# the read coverage of introns, their associated border junctions and flanking exonic regions (fusions), to aid
# the user in deciding whether there is evidence on intron retention, alternative/novel splice usage, etc.
# It takes the annotation CSVs for junctions, exonic regions and introns to assemble a complete intron/border index,
# where each border junction and intron are assigned to a single intron event, flanked by its neighboring
# exonic regions. Where the exonic regions of intron events can be assigned to multiple genes, then the output of this
# intron event is suppressed, to avoid instances of overlapping intron events.
#
# REQUIRED PACKAGES: pandas (tested with v0.19.2)
# argparse (tested with v1.1)
# logging (tested with v0.5.1.2)
#
#######################################################################################################################
# Import required packages
import pandas as pd
import logging
import argparse
import sqlite3
def getOptions():
# Parse command line arguments
parser = argparse.ArgumentParser(description="Generate an intron-to-border-junction index file for"
"interpreting read coverage of intronic regions")
parser.add_argument('--intron-annotation-file', dest="inIntrons", required=True, help="Input intron annotation CSV")
parser.add_argument("--junction-annotation-file", dest="inJunctions", required=True,
help="Input junction annotation CSV")
parser.add_argument("--output-intron-index", dest="outCSV", required=True,
help="Output event index CSV")
args = parser.parse_args()
return args
def main():
# Connect to SQL database
con = sqlite3.connect(":memory:")
cur = con.cursor()
# Import intron and junction annotations
logger.info("Importing intron and junction annotations")
intronDF = pd.read_csv(args.inIntrons, usecols=('intron_id','chr','intron_start','intron_stop','gene_id',
'exonic_region_id_5prime','exonic_region_id_3prime'))
juncDF = pd.read_csv(args.inJunctions, usecols=('junction_id','chr','donor_stop','acceptor_start','transcript_id',
'gene_id','flag_border_junction'))
# Convert to SQL tables
intronDF.to_sql("intronInfo", con, if_exists="replace")
juncDF.to_sql("juncInfo", con, if_exists="replace")
# So border junctions and introns can be merged, donor_stop and acceptor start need to renamed to intron_start
# and intron_stop respectively. When the "donor exon" is an intron, donor_stop = intron_stop
# When the "acceptor exon" is an intron, acceptor_start = intron_start
# I am going to first map 5' border junctions to the 5' end of introns, then 3'
# border junctions for the 3' end of the introns.
# First, I want to expand concatenated gene IDs. Junctions with multiple gene ID shouldn't be retained in the
# final output, but iterate over these for completeness
cur.execute("""Select junction_id, chr , donor_stop , acceptor_start , gene_id from juncInfo WHERE flag_border_junction = 1""")
allBorders = cur.fetchall()
cur.execute("""CREATE TABLE IF NOT EXISTS borderInfo
(junction_id TEXT, chr TEXT, donor_stop INT, acceptor_start INT, gene_id TEXT)""")
for border in allBorders:
genes = border[4].split("|")
for gn in genes:
cur.execute("INSERT INTO borderInfo VALUES(:junction_id, :chr, :donor_stop, :acceptor_start, :gene_id)",
{"junction_id": border[0], "chr": border[1], "donor_stop": border[2],
"acceptor_start": border[3], "gene_id":gn})
# Merge INNER with intron table on chromosome, gene, and acceptor_start (as intron_start)
cur.execute("CREATE TABLE intronWstart AS SELECT in1.intron_id, in1.chr, in1.intron_start, in1.intron_stop, "
"in1.gene_id, in1.exonic_region_id_5prime, in2.junction_id AS border_junction_id_5prime "
"FROM intronInfo in1 INNER JOIN borderInfo in2 "
"ON in1.chr = in2.chr AND in1.gene_id = in2.gene_id AND in1.intron_start = in2.acceptor_start ;")
# Merge INNER with intron table on chromosome, gene, and donor_stop (as intron_stop)
cur.execute("CREATE TABLE intronWstop AS SELECT in1.intron_id, in1.chr, in1.gene_id, "
"in1.exonic_region_id_3prime, in2.junction_id AS border_junction_id_3prime "
"FROM intronInfo in1 INNER JOIN borderInfo in2 "
"ON in1.chr = in2.chr AND in1.gene_id = in2.gene_id AND in1.intron_stop = in2.donor_stop ;")
cur.execute("CREATE TABLE intronBorderIndex AS SELECT in1.*, in2.exonic_region_id_3prime,"
"in2.border_junction_id_3prime FROM intronWstart in1 "
"INNER JOIN intronWstop in2 ON in1.gene_id = in2.gene_id AND in1.intron_id = in2.intron_id ;")
intronBorderIndexDF = pd.read_sql("SELECT * FROM intronBorderIndex ORDER BY chr, intron_start, intron_stop ;", con)
# Write output index
with open(args.outCSV, 'w') as outIndex:
intronBorderIndexDF.to_csv(outIndex, encoding='utf-8', index=False)
if __name__ == '__main__':
# Parse command line arguments
global args
args = getOptions()
# Setting up logger
logger = logging.getLogger()
logger.info('Starting script')
# Calling main script
main()
logger.info('Script complete: index created!') | lgpl-3.0 | 2,977,821,361,398,456,300 | 56.728155 | 131 | 0.639024 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/operations/_virtual_network_gateways_operations.py | 1 | 103438 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations(object):
"""VirtualNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Updates a virtual network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to update virtual network gateway tags.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkGatewayListResult"]
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def list_connections(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkGatewayListConnectionsResult"]
"""Gets all the connections in a virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListConnectionsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGatewayListConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_connections.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/connections'} # type: ignore
def _reset_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.VirtualNetworkGateway"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if gateway_vip is not None:
query_parameters['gatewayVip'] = self._serialize.query("gateway_vip", gateway_vip, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def begin_reset(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def _reset_vpn_client_shared_key_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._reset_vpn_client_shared_key_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_vpn_client_shared_key_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/resetvpnclientsharedkey'} # type: ignore
def begin_reset_vpn_client_shared_key(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Resets the VPN client shared key of the virtual network gateway in the specified resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_vpn_client_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_vpn_client_shared_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/resetvpnclientsharedkey'} # type: ignore
def _generatevpnclientpackage_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def begin_generatevpnclientpackage(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def _generate_vpn_profile_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generate_vpn_profile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generate_vpn_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
def begin_generate_vpn_profile(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN profile for P2S client of the virtual network gateway in the specified resource
group. Used for IKEV2 and radius based authentication.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generate_vpn_profile_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generate_vpn_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
def _get_vpn_profile_package_url_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpn_profile_package_url_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpn_profile_package_url_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
def begin_get_vpn_profile_package_url(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Gets pre-generated VPN profile for P2S client of the virtual network gateway in the specified
resource group. The profile needs to be generated first using generateVpnProfile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vpn_profile_package_url_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpn_profile_package_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
def _get_bgp_peer_status_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.BgpPeerStatusListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_bgp_peer_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if peer is not None:
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def begin_get_bgp_peer_status(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.BgpPeerStatusListResult"]
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either BgpPeerStatusListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.BgpPeerStatusListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def supported_vpn_devices(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> str
"""Gets a xml format representation for supported vpn devices.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.supported_vpn_devices.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_vpn_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/supportedvpndevices'} # type: ignore
def _get_learned_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_learned_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def begin_get_learned_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def _get_advertised_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_advertised_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def begin_get_advertised_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def _set_vpnclient_ipsec_parameters_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
vpnclient_ipsec_params, # type: "_models.VpnClientIPsecParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.VpnClientIPsecParameters"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnClientIPsecParameters"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._set_vpnclient_ipsec_parameters_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpnclient_ipsec_params, 'VpnClientIPsecParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_vpnclient_ipsec_parameters_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/setvpnclientipsecparameters'} # type: ignore
def begin_set_vpnclient_ipsec_parameters(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
vpnclient_ipsec_params, # type: "_models.VpnClientIPsecParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VpnClientIPsecParameters"]
"""The Set VpnclientIpsecParameters operation sets the vpnclient ipsec policy for P2S client of
virtual network gateway in the specified resource group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param vpnclient_ipsec_params: Parameters supplied to the Begin Set vpnclient ipsec parameters
of Virtual Network Gateway P2S client operation through Network resource provider.
:type vpnclient_ipsec_params: ~azure.mgmt.network.v2018_06_01.models.VpnClientIPsecParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnClientIPsecParameters or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VpnClientIPsecParameters]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._set_vpnclient_ipsec_parameters_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
vpnclient_ipsec_params=vpnclient_ipsec_params,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_vpnclient_ipsec_parameters.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/setvpnclientipsecparameters'} # type: ignore
def _get_vpnclient_ipsec_parameters_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VpnClientIPsecParameters"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpnclient_ipsec_parameters_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpnclient_ipsec_parameters_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnclientipsecparameters'} # type: ignore
def begin_get_vpnclient_ipsec_parameters(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VpnClientIPsecParameters"]
"""The Get VpnclientIpsecParameters operation retrieves information about the vpnclient ipsec
policy for P2S client of virtual network gateway in the specified resource group through
Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The virtual network gateway name.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnClientIPsecParameters or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VpnClientIPsecParameters]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vpnclient_ipsec_parameters_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpnclient_ipsec_parameters.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnclientipsecparameters'} # type: ignore
def vpn_device_configuration_script(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "_models.VpnDeviceScriptParameters"
**kwargs # type: Any
):
# type: (...) -> str
"""Gets a xml format representation for vpn device configuration script.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection for which the configuration script is generated.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the generate vpn device script operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VpnDeviceScriptParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.vpn_device_configuration_script.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnDeviceScriptParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
vpn_device_configuration_script.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/vpndeviceconfigurationscript'} # type: ignore
| mit | -3,029,695,423,026,583,000 | 51.373671 | 255 | 0.652188 | false |
cragusa/cocoma | bin/Logger.py | 1 | 5704 | #!/usr/bin/env python
#Copyright 2012-2013 SAP Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is part of the COCOMA framework
#
# COCOMA is a framework for COntrolled COntentious and MAlicious patterns
#
import psutil,time,Library,logging,EMQproducer
from datetime import datetime as dt
from logging import handlers
from EMQproducer import Producer
global producer
producer = Producer()
global myName
myName = "Logger"
emulationEndLogger = None
def singleLogger(elementName,level=None,filename=None):
#file writing handler
producer=Producer()
HOMEPATH= Library.getHomepath()
global emulationEndLogger
emulationEndLogger=Library.loggerSet("Logger")
def logLevelGet():
LOG_LEVEL=logging.INFO
LogLevel=Library.readLogLevel("coreloglevel")
if LogLevel=="info":
LOG_LEVEL=logging.INFO
if LogLevel=="debug":
LOG_LEVEL=logging.DEBUG
else:
LOG_LEVEL=logging.INFO
return LOG_LEVEL
if level==None:
level=logLevelGet()
fileLogger=logging.getLogger(elementName)
fileLogger.setLevel(level)
#we do not add additional handlers if they are there
if not len(fileLogger.handlers):
#adding producer handler
#bHandler= EMQproducer.BroadcastLogHandler(elementName,producer)
#fileLogger.addHandler(bHandler)
#EMQproducer.StreamAndBroadcastHandler("TEST",producer)
if filename == None:
#setting log rotation for 10 files each up to 10000000 bytes (10MB)
fileHandler = handlers.RotatingFileHandler(HOMEPATH+"/logs/COCOMAlogfile.csv",'a', 10000000, 10)
fileLoggerFormatter=logging.Formatter ('%(asctime)s;%(name)s;%(levelname)s;%(message)s',datefmt='%m/%d/%Y %H:%M:%S')
fileHandler.setFormatter(fileLoggerFormatter)
fileLogger.addHandler(fileHandler)
#cli writing handler
cliLoggerFormatter=logging.Formatter ('%(asctime)s - [%(name)s] - %(levelname)s : %(message)s',datefmt='%m/%d/%Y %H:%M:%S')
cliHandler = logging.StreamHandler()
cliHandler.setFormatter(cliLoggerFormatter)
fileLogger.addHandler(cliHandler)
else:
fileHandler= logging.FileHandler(HOMEPATH+"/logs/"+str(filename))
fileLoggerFormatter=logging.Formatter ('%(asctime)s;%(name)s;%(levelname)s;%(message)s',datefmt='%m/%d/%Y %H:%M:%S')
fileHandler.setFormatter(fileLoggerFormatter)
fileLogger.addHandler(fileHandler)
return fileLogger
#Logger job that collects system stats during emulation , run by scheduler
def emulationEnd(emulationName):
"""
IN: job that executes at the end of emulation
DOING: just producing logger notification
OUT: nothing
"""
try:
print "Emulation Time expired, removing extra jobs and stopping running processes"
global emulationEndLogger
msg = {"Action":"Emulation finished","EmulationName":str(emulationName)}
producer.sendmsg(myName,msg)
emulationEndLogger.info(msg)
#emulationEndLogger.info("Emulation '"+str(emulationName)+"' finished.")
Library.removeExtraJobs(emulationName)
Library.killRemainingProcesses()
Library.deleteFiles("/tmp/stressapptestFile", "*") # Remove any stressappTest files left behind from I/O loading
return True
except:
return False
def loadMon(duration,interval,emulationID,emulationName,emuStartTime):
HOMEPATH= Library.getHomepath()
emulationName=str(emulationName)
interval=int(interval)
'''
starting cpu monitoring in the loop
'''
iterationsNo=int(duration)/int(interval)
try:
f = open(HOMEPATH+"/logs/"+str(emulationID)+"-"+str(emulationName)+"-res"+"_"+str(emuStartTime)+".csv", 'a')
f.write(emulationName+";\nCountdown;Time;CPU(%);MEM(%);IOread(bytes);IOwrite(bytes);NET(bytes_sent)\n")
#start time
initTime=time.time()
while iterationsNo !=0:
CPU=str(psutil.cpu_percent(interval, False))
#MEM=str(psutil.virtual_memory().percent)
MEM=str(psutil.avail_virtmem())
IOr=str(psutil.disk_io_counters().read_time)
IOw=str(psutil.disk_io_counters().write_time)
NET=str(psutil.network_io_counters(False).bytes_sent)
#print (emulationName+";\nTime;CPU(%);MEM(%);IOread(bytes);IOwrite(bytes);NET(bytes_sent)\n"+str(time.time())+";"+CPU+";"+MEM+";"+IOr+";"+IOw+";"+NET)
probeTime=time.time()-initTime
timeStamp=dt.now()
f.write(str(int(probeTime))+";"+str(timeStamp.strftime("%Y-%m-%d %H:%M:%S.%f"))+";"+CPU+";"+MEM+";"+IOr+";"+IOw+";"+NET+"\n")
iterationsNo=iterationsNo-1
except Exception,e:
print "Unable to create log file\nError: ",e
f.closed
if __name__ == '__main__':
duration = 20
interval = 1
emulationName = "Emulation-1"
loadMon(duration,interval,emulationName)
pass
| apache-2.0 | -399,721,959,556,241,200 | 32.162791 | 162 | 0.645863 | false |
isc-projects/forge | tests/dhcpv4/ddns/test_ddns_no_tsig_request.py | 1 | 41287 | """DDNS without TSIG"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_control
import srv_msg
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_success_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_fail_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.exae.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.exae.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_notenabled_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
# Response option 81 MUST contain flags 10. #later make it 's' 'n' and 'o' should be 10
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_update
def test_ddns4_notsig_forw_and_rev_update_success_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_setup()
srv_control.start_srv('DHCP', 'stopped')
srv_control.clear_some_data('leases')
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.11-192.168.50.11')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.11')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '11.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_two_dhci_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.11')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
# Client 2 add
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.11')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_dhci_conflicts_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.11')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
# Client 2 add
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.11')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client2.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '11.50.168.192.in-addr.arpa.')
# Client 2 try to update client's 1 domain
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
# address and domain name should not be changed!
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_success_withoutflag_override_client():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('override-client-update', True)
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 3)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_success_withoutflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 0)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_withoutflag_notenabled():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
# Response option 81 MUST contain flags 0. #later make it 's' 'n' and 'o' should be 10
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_Nflag_override_no_update():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('override-no-update', True)
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_flags', 'N')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 3)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com.', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
| isc | 4,452,881,845,453,598,000 | 38.546935 | 91 | 0.673263 | false |
qedsoftware/commcare-hq | corehq/apps/settings/tests/test_utils.py | 1 | 1321 | import os
from django.test import SimpleTestCase
from corehq.apps.settings.utils import get_temp_file
class GetTempFileTests(SimpleTestCase):
def test_file_closed(self):
"""
Check that an error is not raised if the file is closed by the caller
"""
try:
with get_temp_file() as (fd, name):
os.close(fd)
except Exception as err:
self.fail('Failed with exception "{}"'.format(err))
else:
file_exists = os.access(name, os.F_OK)
self.assertFalse(file_exists)
def test_file_unused(self):
"""
Check that an error is not raised if the file is unused by the caller
"""
try:
with get_temp_file() as (fd, name):
pass
except Exception as err:
self.fail('Failed with exception "{}"'.format(err))
else:
file_exists = os.access(name, os.F_OK)
self.assertFalse(file_exists)
def test_file_deleted(self):
"""
Check that an error is not raised if the file is deleted by the caller
"""
try:
with get_temp_file() as (fd, name):
os.unlink(name)
except Exception as err:
self.fail('Failed with exception "{}"'.format(err))
| bsd-3-clause | 8,103,971,329,567,022,000 | 30.452381 | 78 | 0.55564 | false |
LooseTerrifyingSpaceMonkey/DecMeg2014 | src/benchmark_pooling.py | 1 | 3692 | """DecMeg2014 example code.
Simple prediction of the class labels of the test set by:
- pooling all the triaining trials of all subjects in one dataset.
- Extracting the MEG data in the first 500ms from when the
stimulus starts.
- Using a linear classifier (logistic regression).
"""
import numpy as np
from sklearn.linear_model import LogisticRegression
from scipy.io import loadmat
def create_features(XX, tmin, tmax, sfreq, tmin_original=-0.5):
"""Creation of the feature space:
- restricting the time window of MEG data to [tmin, tmax]sec.
- Concatenating the 306 timeseries of each trial in one long
vector.
- Normalizing each feature independently (z-scoring).
"""
print "Applying the desired time window."
beginning = np.round((tmin - tmin_original) * sfreq).astype(np.int)
end = np.round((tmax - tmin_original) * sfreq).astype(np.int)
XX = XX[:, :, beginning:end].copy()
print "2D Reshaping: concatenating all 306 timeseries."
XX = XX.reshape(XX.shape[0], XX.shape[1] * XX.shape[2])
print "Features Normalization."
XX -= XX.mean(0)
XX = np.nan_to_num(XX / XX.std(0))
return XX
if __name__ == '__main__':
print "DecMeg2014: https://www.kaggle.com/c/decoding-the-human-brain"
print
subjects_train = range(1, 7) # use range(1, 17) for all subjects
print "Training on subjects", subjects_train
# We throw away all the MEG data outside the first 0.5sec from when
# the visual stimulus start:
tmin = 0.0
tmax = 0.500
print "Restricting MEG data to the interval [%s, %s]sec." % (tmin, tmax)
X_train = []
y_train = []
X_test = []
ids_test = []
print
print "Creating the trainset."
for subject in subjects_train:
filename = '../data/mat/train_subject%02d.mat' % subject
print "Loading", filename
data = loadmat(filename, squeeze_me=True)
XX = data['X']
yy = data['y']
sfreq = data['sfreq']
tmin_original = data['tmin']
print "Dataset summary:"
print "XX:", XX.shape
print "yy:", yy.shape
print "sfreq:", sfreq
XX = create_features(XX, tmin, tmax, sfreq)
X_train.append(XX)
y_train.append(yy)
X_train = np.vstack(X_train)
y_train = np.concatenate(y_train)
print "Trainset:", X_train.shape
print
print "Creating the testset."
subjects_test = range(17, 24)
for subject in subjects_test:
filename = '../data/mat/test_subject%02d.mat' % subject
print "Loading", filename
data = loadmat(filename, squeeze_me=True)
XX = data['X']
ids = data['Id']
sfreq = data['sfreq']
tmin_original = data['tmin']
print "Dataset summary:"
print "XX:", XX.shape
print "ids:", ids.shape
print "sfreq:", sfreq
XX = create_features(XX, tmin, tmax, sfreq)
X_test.append(XX)
ids_test.append(ids)
X_test = np.vstack(X_test)
ids_test = np.concatenate(ids_test)
print "Testset:", X_test.shape
print
clf = LogisticRegression(random_state=0) # Beware! You need 10Gb RAM to train LogisticRegression on all 16 subjects!
print "Classifier:"
print clf
print "Training."
clf.fit(X_train, y_train)
print "Predicting."
y_pred = clf.predict(X_test)
print
filename_submission = "../output/submissionBenchmarkPooling25s.csv"
print "Creating submission file", filename_submission
f = open(filename_submission, "w")
print >> f, "Id,Prediction"
for i in range(len(y_pred)):
print >> f, str(ids_test[i]) + "," + str(y_pred[i])
f.close()
print "Done." | gpl-2.0 | 7,230,487,808,953,025,000 | 29.270492 | 120 | 0.623239 | false |
karolciba/playground | markov/baumwelch.py | 1 | 4027 | import numpy as np
# functions and classes go here
def fb_alg(A_mat, O_mat, observ):
# set up
k = observ.size
(n,m) = O_mat.shape
prob_mat = np.zeros( (n,k) )
fw = np.zeros( (n,k+1) )
bw = np.zeros( (n,k+1) )
# forward part
fw[:, 0] = 1.0/n
for obs_ind in xrange(k):
f_row_vec = np.matrix(fw[:,obs_ind])
fw[:, obs_ind+1] = f_row_vec * \
np.matrix(A_mat) * \
np.matrix(np.diag(O_mat[:,observ[obs_ind]]))
fw[:,obs_ind+1] = fw[:,obs_ind+1]/np.sum(fw[:,obs_ind+1])
# backward part
bw[:,-1] = 1.0
for obs_ind in xrange(k, 0, -1):
b_col_vec = np.matrix(bw[:,obs_ind]).transpose()
bw[:, obs_ind-1] = (np.matrix(A_mat) * \
np.matrix(np.diag(O_mat[:,observ[obs_ind-1]])) * \
b_col_vec).transpose()
bw[:,obs_ind-1] = bw[:,obs_ind-1]/np.sum(bw[:,obs_ind-1])
# combine it
prob_mat = np.array(fw)*np.array(bw)
prob_mat = prob_mat/np.sum(prob_mat, 0)
# get out
return prob_mat, fw, bw
def baum_welch( num_states, num_obs, observ ):
# allocate
# A_mat = np.ones( (num_states, num_states) )
A_mat = np.random.random( (num_states, num_states) )
A_mat = A_mat / np.sum(A_mat,1)[:,None]
# O_mat = np.ones( (num_states, num_obs) )
O_mat = np.random.random( (num_states, num_obs) )
O_mat = O_mat / np.sum(O_mat,1)[:,None]
theta = np.zeros( (num_states, num_states, observ.size) )
while True:
old_A = A_mat
old_O = O_mat
A_mat = np.ones( (num_states, num_states) )
O_mat = np.ones( (num_states, num_obs) )
# A_mat = np.random.random( (num_states, num_states) )
# A_mat = A_mat / np.sum(A_mat,1)[:,None]
# O_mat = np.random.random( (num_states, num_obs) )
# O_mat = O_mat / np.sum(O_mat,1)[:,None]
# expectation step, forward and backward probs
P,F,B = fb_alg( old_A, old_O, observ)
# need to get transitional probabilities at each time step too
for a_ind in xrange(num_states):
for b_ind in xrange(num_states):
for t_ind in xrange(observ.size):
theta[a_ind,b_ind,t_ind] = \
F[a_ind,t_ind] * \
B[b_ind,t_ind+1] * \
old_A[a_ind,b_ind] * \
old_O[b_ind, observ[t_ind]]
# form A_mat and O_mat
for a_ind in xrange(num_states):
for b_ind in xrange(num_states):
A_mat[a_ind, b_ind] = np.sum( theta[a_ind, b_ind, :] )/ \
np.sum(P[a_ind,:])
A_mat = A_mat / np.sum(A_mat,1)
for a_ind in xrange(num_states):
for o_ind in xrange(num_obs):
right_obs_ind = np.array(np.where(observ == o_ind))+1
O_mat[a_ind, o_ind] = np.sum(P[a_ind,right_obs_ind])/ \
np.sum( P[a_ind,1:])
O_mat = O_mat / np.sum(O_mat,1)
# compare
if np.linalg.norm(old_A-A_mat) < .00001 and np.linalg.norm(old_O-O_mat) < .00001:
break
# get out
return A_mat, O_mat
import casino
num_obs = 100
g = casino.casino()
observations1 = [ 1 if g.next()[0].name == 'H' else 0 for x in xrange(num_obs) ]
observations1 = np.array(observations1)
# observations1 = np.random.randn( num_obs )
# observations1[observations1>0] = 1
# observations1[observations1<=0] = 0
# import pdb; pdb.set_trace()
A_mat, O_mat = baum_welch(2,2,observations1)
print "observation 1"
print observations1[:30]
print "trans"
print A_mat
print "emiss"
print O_mat
# observations2 = np.random.random(num_obs)
# observations2[observations2>.15] = 1
# observations2[observations2<=.85] = 0
# A_mat, O_mat = baum_welch(2,2,observations2)
# print "observations2"
# print observations2[:30]
# print A_mat
# print O_mat
# A_mat, O_mat = baum_welch(2,2,np.hstack( (observations1, observations2) ) )
# print A_mat
# print O_mat
| unlicense | -5,241,369,529,496,385,000 | 36.635514 | 89 | 0.534145 | false |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/device_adapter.py | 1 | 21488 | import os
import binascii
import base64
import datetime
import logging
import queue
import uuid
from iotile.core.exceptions import IOTileException, ArgumentError, HardwareError
from iotile.core.hw.transport.adapter import DeviceAdapter
from iotile.core.hw.reports.parser import IOTileReportParser
from iotile.core.dev.registry import ComponentRegistry
from .mqtt_client import OrderedAWSIOTClient
from .topic_validator import MQTTTopicValidator
from .connection_manager import ConnectionManager
from . import messages
class AWSIOTDeviceAdapter(DeviceAdapter):
"""A device adapter allowing connections to devices over AWS IoT
Args:
port (string): A optional port string specifying a topic prefix
to use if we are trying to connect to a gateway, otherwise,
we assume that we're connecting directly to a device that
is attached to AWS IoT.
"""
def __init__(self, port):
super(AWSIOTDeviceAdapter, self).__init__()
self.set_config('default_timeout', 5.0)
reg = ComponentRegistry()
endpoint = reg.get_config('awsiot-endpoint')
rootcert = reg.get_config('awsiot-rootcert')
iamuser = reg.get_config('awsiot-iamkey')
iamsecret = reg.get_config('awsiot-iamtoken')
iamsession = reg.get_config('awsiot-session', default=None)
args = {}
args['endpoint'] = endpoint
args['root_certificate'] = rootcert
args['use_websockets'] = True
args['iam_key'] = iamuser
args['iam_secret'] = iamsecret
args['iam_session'] = iamsession
self._logger = logging.getLogger(__name__)
# Port should be a topic prefix that allows us to connect
# only to subset of IOTile devices managed by a gateway
# rather than to directly accessible iotile devices.
if port is None:
port = ""
if len(port) > 0 and port[-1] != '/':
port = port + '/'
self.client = OrderedAWSIOTClient(args)
self.name = str(uuid.uuid4())
self.client.connect(self.name)
self.prefix = port
self.conns = ConnectionManager(self.id)
self.conns.start()
self.client.subscribe(self.prefix + 'devices/+/data/advertisement', self._on_advertisement, ordered=False)
self._deferred = queue.Queue()
self.set_config('minimum_scan_time', 5.0)
self.set_config('probe_supported', True)
self.set_config('probe_required', True)
self.mtu = self.get_config('mtu', 60*1024) # Split script payloads larger than this
self.report_parser = IOTileReportParser()
def connect_async(self, connection_id, connection_string, callback):
"""Connect to a device by its connection_string
This function looks for the device on AWS IOT using the preconfigured
topic prefix and looking for:
<prefix>/devices/connection_string
It then attempts to lock that device for exclusive access and
returns a callback if successful.
Args:
connection_id (int): A unique integer set by the caller for referring to this connection
once created
connection_string (string): A device id of the form d--XXXX-YYYY-ZZZZ-WWWW
callback (callable): A callback function called when the connection has succeeded or
failed
"""
topics = MQTTTopicValidator(self.prefix + 'devices/{}'.format(connection_string))
key = self._generate_key()
name = self.name
conn_message = {'type': 'command', 'operation': 'connect', 'key': key, 'client': name}
context = {'key': key, 'slug': connection_string, 'topics': topics}
self.conns.begin_connection(connection_id, connection_string, callback, context, self.get_config('default_timeout'))
self._bind_topics(topics)
try:
self.client.publish(topics.connect, conn_message)
except IOTileException:
self._unbind_topics(topics)
self.conns.finish_connection(connection_id, False, 'Failed to send connection message')
def disconnect_async(self, conn_id, callback):
"""Asynchronously disconnect from a device that has previously been connected
Args:
conn_id (int): a unique identifier for this connection on the DeviceManager
that owns this adapter.
callback (callable): A function called as callback(conn_id, adapter_id, success, failure_reason)
when the disconnection finishes. Disconnection can only either succeed or timeout.
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
self.conns.begin_disconnection(conn_id, callback, self.get_config('default_timeout'))
topics = context['topics']
disconn_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'disconnect'}
self.client.publish(topics.action, disconn_message)
def send_script_async(self, conn_id, data, progress_callback, callback):
"""Asynchronously send a a script to this IOTile device
Args:
conn_id (int): A unique identifer that will refer to this connection
data (string): the script to send to the device
progress_callback (callable): A function to be called with status on our progress, called as:
progress_callback(done_count, total_count)
callback (callable): A callback for when we have finished sending the script. The callback will be called as
callback(connection_id, adapter_id, success, failure_reason)
'connection_id': the connection id
'adapter_id': this adapter's id
'success': a bool indicating whether we received a response to our attempted RPC
'failure_reason': a string with the reason for the failure if success == False
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
topics = context['topics']
context['progress_callback'] = progress_callback
self.conns.begin_operation(conn_id, 'script', callback, 60.0)
chunks = 1
if len(data) > self.mtu:
chunks = len(data) // self.mtu
if len(data) % self.mtu != 0:
chunks += 1
# Send the script out possibly in multiple chunks if it's larger than our maximum transmit unit
for i in range(0, chunks):
start = i*self.mtu
chunk = data[start:start + self.mtu]
encoded = base64.standard_b64encode(chunk)
script_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'send_script',
'script': encoded, 'fragment_count': chunks, 'fragment_index': i}
self.client.publish(topics.action, script_message)
def send_rpc_async(self, conn_id, address, rpc_id, payload, timeout, callback):
"""Asynchronously send an RPC to this IOTile device
Args:
conn_id (int): A unique identifier that will refer to this connection
address (int): the address of the tile that we wish to send the RPC to
rpc_id (int): the 16-bit id of the RPC we want to call
payload (bytearray): the payload of the command
timeout (float): the number of seconds to wait for the RPC to execute
callback (callable): A callback for when we have finished the RPC. The callback will be called as"
callback(connection_id, adapter_id, success, failure_reason, status, payload)
'connection_id': the connection id
'adapter_id': this adapter's id
'success': a bool indicating whether we received a response to our attempted RPC
'failure_reason': a string with the reason for the failure if success == False
'status': the one byte status code returned for the RPC if success == True else None
'payload': a bytearray with the payload returned by RPC if success == True else None
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information", 0xFF, bytearray())
return
self.conns.begin_operation(conn_id, 'rpc', callback, timeout)
topics = context['topics']
encoded_payload = binascii.hexlify(payload)
rpc_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'rpc',
'address': address, 'rpc_id': rpc_id, 'payload': encoded_payload, 'timeout': timeout}
self.client.publish(topics.action, rpc_message)
def _open_rpc_interface(self, conn_id, callback):
"""Enable RPC interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'rpc', callback)
def _open_streaming_interface(self, conn_id, callback):
"""Enable streaming interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'streaming', callback)
def _open_tracing_interface(self, conn_id, callback):
"""Enable tracing interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'tracing', callback)
def _open_script_interface(self, conn_id, callback):
"""Enable script interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'script', callback)
def _open_interface(self, conn_id, iface, callback):
"""Open an interface on this device
Args:
conn_id (int): the unique identifier for the connection
iface (string): the interface name to open
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
self.conns.begin_operation(conn_id, 'open_interface', callback, self.get_config('default_timeout'))
topics = context['topics']
open_iface_message = {'key': context['key'], 'type': 'command', 'operation': 'open_interface', 'client': self.name, 'interface': iface}
self.client.publish(topics.action, open_iface_message)
def stop_sync(self):
"""Synchronously stop this adapter
"""
conn_ids = self.conns.get_connections()
# If we have any open connections, try to close them here before shutting down
for conn in list(conn_ids):
try:
self.disconnect_sync(conn)
except HardwareError:
pass
self.client.disconnect()
self.conns.stop()
def probe_async(self, callback):
"""Probe for visible devices connected to this DeviceAdapter.
Args:
callback (callable): A callback for when the probe operation has completed.
callback should have signature callback(adapter_id, success, failure_reason) where:
success: bool
failure_reason: None if success is True, otherwise a reason for why we could not probe
"""
topics = MQTTTopicValidator(self.prefix)
self.client.publish(topics.probe, {'type': 'command', 'operation': 'probe', 'client': self.name})
callback(self.id, True, None)
def periodic_callback(self):
"""Periodically help maintain adapter internal state
"""
while True:
try:
action = self._deferred.get(False)
action()
except queue.Empty:
break
except Exception:
self._logger.exception('Exception in periodic callback')
def _bind_topics(self, topics):
"""Subscribe to all the topics we need to communication with this device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we are connecting to.
"""
# FIXME: Allow for these subscriptions to fail and clean up the previous ones
# so that this function is atomic
self.client.subscribe(topics.status, self._on_status_message)
self.client.subscribe(topics.tracing, self._on_trace)
self.client.subscribe(topics.streaming, self._on_report)
self.client.subscribe(topics.response, self._on_response_message)
def _unbind_topics(self, topics):
"""Unsubscribe to all of the topics we needed for communication with device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we have connected to.
"""
self.client.unsubscribe(topics.status)
self.client.unsubscribe(topics.tracing)
self.client.unsubscribe(topics.streaming)
self.client.unsubscribe(topics.response)
def _generate_key(self):
"""Generate a random 32 byte key and encode it in hex
Returns:
string: Cryptographically random 64 character string
"""
key = os.urandom(32)
return binascii.hexlify(key)
def _find_connection(self, topic):
"""Attempt to find a connection id corresponding with a topic
The device is found by assuming the topic ends in <slug>/[control|data]/channel
Args:
topic (string): The topic we received a message on
Returns:
int: The internal connect id (device slug) associated with this topic
"""
parts = topic.split('/')
if len(parts) < 3:
return None
slug = parts[-3]
return slug
def _on_advertisement(self, sequence, topic, message):
try:
# FIXME: We need a global topic validator to validate these messages
# message = self.topics.validate_message(['advertisement'], message_type, message)
del message['operation']
del message['type']
self._trigger_callback('on_scan', self.id, message, 60.) # FIXME: Get the timeout from somewhere
except IOTileException as exc:
pass
def _on_report(self, sequence, topic, message):
"""Process a report received from a device.
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping report message that does not correspond with a known connection, topic=%s", topic)
return
try:
rep_msg = messages.ReportNotification.verify(message)
serialized_report = {}
serialized_report['report_format'] = rep_msg['report_format']
serialized_report['encoded_report'] = rep_msg['report']
serialized_report['received_time'] = datetime.datetime.strptime(rep_msg['received_time'].encode().decode(), "%Y%m%dT%H:%M:%S.%fZ")
report = self.report_parser.deserialize_report(serialized_report)
self._trigger_callback('on_report', conn_id, report)
except Exception:
self._logger.exception("Error processing report conn_id=%d", conn_id)
def _on_trace(self, sequence, topic, message):
"""Process a trace received from a device.
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping trace message that does not correspond with a known connection, topic=%s", topic)
return
try:
tracing = messages.TracingNotification.verify(message)
self._trigger_callback('on_trace', conn_id, tracing['trace'])
except Exception:
self._logger.exception("Error processing trace conn_id=%d", conn_id)
def _on_status_message(self, sequence, topic, message):
"""Process a status message received
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
self._logger.debug("Received message on (topic=%s): %s" % (topic, message))
try:
conn_key = self._find_connection(topic)
except ArgumentError:
self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message)
return
if messages.ConnectionResponse.matches(message):
if self.name != message['client']:
self._logger.debug("Connection response received for a different client, client=%s, name=%s", message['client'], self.name)
return
self.conns.finish_connection(conn_key, message['success'], message.get('failure_reason', None))
else:
self._logger.warn("Dropping message that did not correspond with a known schema, message=%s", message)
def _on_response_message(self, sequence, topic, message):
"""Process a response message received
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
try:
conn_key = self._find_connection(topic)
context = self.conns.get_context(conn_key)
except ArgumentError:
self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message)
return
if 'client' in message and message['client'] != self.name:
self._logger.debug("Dropping message that is for another client %s, we are %s", message['client'], self.name)
if messages.DisconnectionResponse.matches(message):
self.conns.finish_disconnection(conn_key, message['success'], message.get('failure_reason', None))
elif messages.OpenInterfaceResponse.matches(message):
self.conns.finish_operation(conn_key, message['success'], message.get('failure_reason', None))
elif messages.RPCResponse.matches(message):
rpc_message = messages.RPCResponse.verify(message)
self.conns.finish_operation(conn_key, rpc_message['success'], rpc_message.get('failure_reason', None), rpc_message.get('status', None), rpc_message.get('payload', None))
elif messages.ProgressNotification.matches(message):
progress_callback = context.get('progress_callback', None)
if progress_callback is not None:
progress_callback(message['done_count'], message['total_count'])
elif messages.ScriptResponse.matches(message):
if 'progress_callback' in context:
del context['progress_callback']
self.conns.finish_operation(conn_key, message['success'], message.get('failure_reason', None))
elif messages.DisconnectionNotification.matches(message):
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping disconnect notification that does not correspond with a known connection, topic=%s", topic)
return
self.conns.unexpected_disconnect(conn_key)
self._trigger_callback('on_disconnect', self.id, conn_id)
else:
self._logger.warn("Invalid response message received, message=%s", message)
| gpl-3.0 | 8,618,029,404,942,219,000 | 40.562863 | 181 | 0.624721 | false |
schef/schef.github.io | source/14/mc-14-04-whf-pid.py | 1 | 2581 | #!/usr/bin/python
# Written by Stjepan Horvat
# ( [email protected] )
# by the exercises from David Lucal Burge - Perfect Pitch Ear Traning Supercourse
# Thanks to Wojciech M. Zabolotny ( [email protected] ) for snd-virmidi example
# ( [email protected] )
import random
import sys
sys.path.append("/home/schef/github/schef.github.io/source/")
from pptraning import *
print ("Exercise: 14-03")
print ("White harmonic fours. Pitch indentification drill. OVR.")
runda = 0
try:
while True:
runda += 1
print ("Possible commands: 1-again, 2-play, 3-next, 4-compare-to-c:")
while True:
notes = []
for i in range(0, 4):
notes.append(random.choice(whiteNotes[7:28]))
# if len(list(set(notes))) == 4:
# break;
if (len(list(set(notes))) == 4 \
and (notes[0]%12 != notes[1]%12) \
and (notes[0]%12 != notes[2]%12) \
and (notes[0]%12 != notes[3]%12) \
and (notes[1]%12 != notes[2]%12) \
and (notes[1]%12 != notes[3]%12) \
and (notes[2]%12 != notes[3]%12) \
):
break;
#notes.sort()
match = False
noteError = None
while not match: #here starts the practice
done = False
#playFourNotes(notes)
playNote(notes[0])
playNote(notes[1])
playNote(notes[2])
playNote(notes[3])
while not done:
n = input("? ")
if n =="1":
#playFourNotes(notes)
playNote(notes[0])
playNote(notes[1])
playNote(notes[2])
playNote(notes[3])
elif n == "3":
print ("Next")
print (str(runda) + ". round.")
done = True
match = True
elif n =="5":
print (num2Name(notes[0]), num2Name(notes[1]), num2Name(notes[2]), num2Name(notes[3]))
elif n =="4":
print ("C the comparrer")
playNote(name2Num("c"))
elif n =="2":
print(num2Name(notes[0]), num2Name(notes[1]), num2Name(notes[2]), num2Name(notes[3]))
elif re.compile("^[0-3] [0-3]$").match(n):
splited = n.split()
playTwoNotes(notes[int(splited[0])], notes[int(splited[1])])
elif re.compile("^[0-3] [0-3] [0-3]$").match(n):
splited = n.split()
playThreeNotes(notes[int(splited[0])], notes[int(splited[1])], notes[int(splited[2])])
elif splitFour.match(n):
splitNote = n.split()
if splitNote[0] == num2Name(notes[0]).lower() and splitNote[1] == num2Name(notes[1]).lower() and splitNote[2] == num2Name(notes[2]).lower() and splitNote[3] == num2Name(notes[3]).lower():
print ("Next")
print (str(runda) + ". round.")
done = True
match = True
except KeyboardInterrupt:
pass
| mit | 1,993,438,828,375,399,400 | 30.47561 | 193 | 0.590469 | false |
Zephor5/zspider | zspider/crawler.py | 1 | 5759 | # coding=utf-8
import json
import logging
from queue import Queue
from pooled_pika import PooledConn
from scrapy.crawler import CrawlerProcess
from scrapy.settings import Settings
from scrapy.utils.log import log_scrapy_info
from scrapy.utils.ossignal import install_shutdown_handlers
from twisted.internet import defer
from twisted.internet.error import ConnectionDone
from zspider.confs.conf import AMQP_PARAM
from zspider.confs.conf import EXCHANGE_PARAMS
from zspider.confs.conf import TASK_BIND_PARAMS
from zspider.confs.conf import TASK_Q_PARAMS
__author__ = "zephor"
logger = logging.getLogger("crawler")
class TestCrawler(CrawlerProcess):
def __init__(self):
from zspider.confs import crawl_conf as p_settings
settings = Settings()
settings.setmodule(p_settings)
super(CrawlerProcess, self).__init__(settings)
self.task_q = defer.DeferredQueue()
self.res_q = Queue()
self.task_q.get().addCallback(self.crawl)
def crawl(self, kwargs):
spider_name = kwargs.pop("spider_name", "")
crawler = self._create_crawler(spider_name)
self.crawlers.add(crawler)
d = crawler.crawl(**kwargs)
self._active.add(d)
def _done(_):
self.crawlers.discard(crawler)
self._active.discard(d)
try:
result = crawler.spider.test_result
del crawler.spider.test_result
except AttributeError:
result = None # spider may be None in case Failure
self.res_q.put(result)
return _
d.addBoth(_done)
d.addErrback(lambda _: logger.error(_))
d.addCallback(lambda _: self.task_q.get().addCallback(self.crawl))
return d
def debug(_=None):
"""
for debug use
"""
import objgraph
# with open('logs/test', 'w') as f:
# objs = objgraph.get_leaking_objects()
# for o in objs:
# f.write('%s\n' % o.encode('utf-8') if isinstance(o, unicode) else str(o))
leak_ref = objgraph.by_type("Newspaper")
objgraph.show_backrefs(leak_ref, max_depth=10, filename="my_leak.png")
class CrawlerDaemon(CrawlerProcess):
def __init__(self):
from zspider.confs import crawl_conf as p_settings
settings = Settings()
settings.setmodule(p_settings)
super(CrawlerProcess, self).__init__(
settings
) # 跳过CrawlerProcess的初始日志配置,由init.py处理
install_shutdown_handlers(self._signal_shutdown)
log_scrapy_info(self.settings)
self.__task_queue = None
self._pconn = PooledConn(AMQP_PARAM)
self._set_up()
def _set_up(self, _=None):
d = self._pconn.acquire()
d.addCallbacks(self._on_conn, self._on_err_conn)
d.addErrback(self._on_err)
@defer.inlineCallbacks
def _on_conn(self, conn):
# in case the connection is lost; mostly closed by the mq server
conn.ready.addErrback(self.__clear)
conn.ready.addCallback(self._set_up)
self._conn = conn
channel = self._channel = yield conn.channel()
# do some setup
yield channel.exchange_declare(**EXCHANGE_PARAMS)
yield channel.queue_declare(**TASK_Q_PARAMS)
yield channel.queue_bind(**TASK_BIND_PARAMS)
self.__task_queue, consumer_tag = yield channel.basic_consume(
queue=TASK_Q_PARAMS["queue"], auto_ack=False
)
yield self._on_get()
@staticmethod
def _on_err_conn(err):
logger.fatal(err)
@staticmethod
def _on_err(err):
if err.type is ConnectionDone:
logger.info("connection lost when waiting, handled..")
else:
logger.error(err)
@defer.inlineCallbacks
def _on_get(self):
ch, method, properties, body = yield self.__task_queue.get()
d = self._on_msg(body)
yield ch.basic_ack(delivery_tag=method.delivery_tag)
if isinstance(d, defer.Deferred):
self._channel.close()
self._pconn.release(self._conn)
d.addCallback(self._set_up)
else:
d = self._on_get()
yield d
def _on_msg(self, body):
logger.info("_on_msg %s" % body)
try:
msg = json.loads(body)
self.settings.set("COOKIES_ENABLED", msg["is_login"], "spider")
d = self.crawl(
msg["spider"],
parser=msg["parser"],
task_id=msg["id"],
task_name=msg["name"],
)
# d.addCallback(lambda som: reactor.callLater(2, debug))
d.addErrback(lambda err: logger.error(err))
except Exception as e:
logger.error(repr(e))
if len(self._active) > 1:
return self.join()
def __clear(self, _=None):
if self.__task_queue is not None:
self.__task_queue.close(ConnectionDone("done"))
def crawl(self, spider_name, *args, **kwargs):
crawler = self._create_crawler(spider_name)
self.crawlers.add(crawler)
d = crawler.crawl(*args, **kwargs)
self._active.add(d)
def _done(result):
self.crawlers.discard(crawler)
self._active.discard(d)
# parser may hold large memory, release it manually
try:
del crawler.spider.parser
except AttributeError:
pass # spider may be None in case Failure
return result
return d.addBoth(_done)
def main():
from zspider import init
init.init("crawler")
if init.done:
p = CrawlerDaemon()
p.start(stop_after_crawl=False)
if __name__ == "__main__":
main()
| mit | -4,324,731,660,645,133,300 | 29.333333 | 87 | 0.593232 | false |
andymckay/zamboni | mkt/inapp/views.py | 1 | 3169 | import json
from django.db import transaction
from django.shortcuts import get_object_or_404
from rest_framework.permissions import AllowAny
from rest_framework.viewsets import ModelViewSet
import commonware.log
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowAuthor, ByHttpMethod
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.inapp.models import InAppProduct
from mkt.inapp.serializers import InAppProductSerializer
from mkt.prices.models import Price
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.inapp')
class InAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet):
serializer_class = InAppProductSerializer
cors_allowed_methods = ('get', 'post', 'put', 'patch', 'delete')
lookup_field = 'guid'
permission_classes = [ByHttpMethod({
'options': AllowAny, # Needed for CORS.
'get': AllowAny,
'post': AllowAuthor,
'put': AllowAuthor,
'patch': AllowAuthor,
})]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def destroy(self):
raise NotImplemented('destroy is not allowed')
def pre_save(self, in_app_product):
in_app_product.webapp = self.get_app()
def get_queryset(self):
return InAppProduct.objects.filter(webapp=self.get_app())
def get_app(self):
if not hasattr(self, 'app'):
self.app = get_object_or_404(Webapp,
app_domain=self.kwargs['origin'])
return self.app
def get_authors(self):
return self.get_app().authors.all()
class StubInAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet):
serializer_class = InAppProductSerializer
lookup_field = 'guid'
cors_allowed_methods = ('get',)
allowed_methods = ('GET',)
permission_classes = [AllowAny]
authentication_classes = []
def _queryset(self):
return InAppProduct.objects.filter(stub=True)
def get_queryset(self):
qs = self._queryset()
# Since caching count() is unreliable, this optimizes for the case of
# having already created stub products.
if not len(qs):
with transaction.atomic():
self._create_stub_products()
qs = self._queryset()
return qs
def _create_stub_products(self):
for name, amount in (('Kiwi', '0.99'),
('Unicorn', '1.99')):
log.info('Creating stub in-app product {n} {p}'
.format(n=name, p=amount))
# TODO: make this adjustable.
simulate = json.dumps({'result': 'postback'})
InAppProduct.objects.create(stub=True,
simulate=simulate,
name=name,
price=Price.objects.get(price=amount))
| bsd-3-clause | -7,194,169,135,948,572,000 | 35.011364 | 78 | 0.61218 | false |
do-mpc/do-mpc | documentation/source/release_overview.py | 1 | 1138 | import requests
import os
def get_overview():
# Use Github Rest API to get releases:
release_dict = requests.get('https://api.github.com/repos/do-mpc/do-mpc/releases').json()
text = ''
text += '# Release notes'
text += '\n'
text += 'This content is autogenereated from our Github [release notes](https://github.com/do-mpc/do-mpc/releases).'
text += '\n'
for release_i in release_dict:
name_i = release_i['name']
body_i = release_i['body']
body_i = body_i.replace('# ', '### ')
print(name_i)
text += '## {}'.format(name_i)
text += '\n'
text += body_i
text += '\n'
try:
if release_i['assets']:
text += '### Example files'.format(name_i)
text += '\n'
text += 'Please download the example files for release {} [here]({}).'.format(name_i, release_i['assets'][0]['browser_download_url'])
text += '\n'
except:
print('Couldnt provide download link for example files.')
with open('release_notes.md', 'w') as f:
f.write(text)
| lgpl-3.0 | 2,997,779,202,889,332,700 | 28.179487 | 149 | 0.529877 | false |
PetterS/easy-IP | examples/run_nurses.py | 1 | 1028 | #!/usr/bin/env python3
from glob import glob
import os
# Set this to the location of NSPLib.
nsplib = r"C:\Users\Petter\Dropbox\Datasets\NSPLib"
def run_solver(data_set, case):
case_file = os.path.join(nsplib, "Cases", str(case) + ".gen")
log_file = data_set + "." + str(case) + ".output.log"
files = glob(os.path.join(nsplib, data_set, "*.nsp"))
names = [f.split(".")[0] for f in files]
names = [n.split(os.path.sep)[-1] for n in names]
nums = sorted([int(n) for n in names])
files = [os.path.join(nsplib, data_set, str(n) + ".nsp") for n in nums]
try:
os.unlink(log_file)
except FileNotFoundError:
pass
for f in files:
print(case_file)
print(f)
print(log_file)
# This may need to change depending on shell.
os.system("nurses " + f + " " + case_file + " >> " + log_file)
for data_set in ["N25", "N50", "N75", "N100"]:
for case in [1, 2, 3, 4, 5, 6, 7, 8]:
run_solver(data_set, case)
for data_set in ["N30", "N60"]:
for case in [9, 10, 11, 12, 13, 14, 15, 16]:
run_solver(data_set, case)
| bsd-2-clause | 2,813,238,958,239,927,000 | 26.052632 | 72 | 0.620623 | false |
mxOBS/deb-pkg_trusty_chromium-browser | native_client/src/trusted/validator_ragel/proof_tools.py | 1 | 15125 | # Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tools and utilities for creating proofs about tries."""
import itertools
import multiprocessing
import optparse
import spec
import trie
import validator
class Operands(object):
"""Contains parts of the disassembly of a single instruction.
Also holds the implied restriction state.
input_rr means that register must have the MSB 32 bits 0 before the
instruction executes. Such a register can be used by this instruction
as the index register for a memory operation in x86_64. There can
only be one memory operand per instruction. Some AVX instructions allow
a vector register to be used as an index register, impling multiple
index values. However, we currently have no way to sandbox such instructions.
output_rr means that the instruction produces a restricted register, i.e
zeroes out the top 32 bits of a register.
Can also hold partial information about an instruction while incrementally
building up a full instruction.
e.g. vaddpd 0x0(%r15,%r11,8),%ymm3,%ymm2 in ATT syntax is represented as:
-> disasms: ('vaddpd', '0x0(%r15,%r11,8)', '%ymm3', '%ymm2')
-> input_rr: r11 (for x86_64) (or None for x86_32)
-> output_rr: None
When building up partial state, could be:
e.g. just (disasms: ('0x0(%r15,%r11,8)', '%ymm3),
input_rr: '%r111', output_rr: None) from example above.
"""
__slots__ = ('disasms', 'input_rr', 'output_rr')
def __init__(self, disasms=(), input_rr=None, output_rr=None):
assert isinstance(disasms, tuple), disasms
self.disasms = disasms
self.input_rr = input_rr
self.output_rr = output_rr
def __repr__(self):
return str((self.disasms, self.input_rr, self.output_rr))
def __eq__(self, other):
return (self.disasms == other.disasms and
self.input_rr == other.input_rr and
self.output_rr == other.output_rr)
def __hash__(self):
return hash((self.disasms,
self.input_rr,
self.output_rr))
def MergeOperands(ops1, ops2):
"""Combine two different Operands (disassembly parts and implications)."""
assert ops1.input_rr is None or ops2.input_rr is None
assert ops1.output_rr is None or ops2.output_rr is None
return Operands(ops1.disasms + ops2.disasms,
ops1.input_rr if ops1.input_rr else ops2.input_rr,
ops1.output_rr if ops1.output_rr else ops2.output_rr)
def AllXMMOperands(bitness):
"""Returns the set of all XMM registers as individual Operands objects."""
assert bitness in (32, 64), bitness
return set([Operands(disasms=('%xmm{}'.format(i),))
for i in xrange(8 if bitness == 32 else 16)])
def AllYMMOperands(bitness):
"""Returns the set of all YMM registers as individual Operands objects."""
assert bitness in (32, 64), bitness
return set([Operands(disasms=('%ymm{}'.format(i),))
for i in xrange(8 if bitness == 32 else 16)])
def GprOperands(bitness, operand_size, is_write_for_64_bit=True,
can_restrict=False):
"""Returns all gpr operands as an operand set.
Args:
bitness: architecture bitness to distinguish x86_32/x86_64: (32, 64)
operand_size: size of register to be used in write.
is_write_for_64_bit: if bitness == 64, and operand_size == 64,
exclude special registers rsp, rbp, r15 for sandbox
reasons. If bitness == 64 and operand_size == 32,
exclude 'esp', 'ebp', and 'r15d' if it's
not can_restrict. If can_restrict, then
just exclude 'r15d'
can_restrict: if true and bitness == 64, and operand_size == 32, and
is_write_for_64_bit == True, disallow r15 write, and
produce restricted register.
"""
regs = []
operand_to_restriction_map = {
'%eax': '%rax', '%ebx' : '%rbx', '%ecx' : '%rcx', '%edx': '%rdx',
'%ebp': '%rbp', '%edi': '%rdi', '%esi': '%rsi', '%esp': '%rsp',
'%r8d': '%r8', '%r9d': '%r9', '%r10d' : '%r10', '%r11d': '%r11',
'%r12d': '%r12', '%r13d': '%r13', '%r14d' : '%r14',
}
restricts = False
if operand_size == 16 and bitness == 32:
regs = ['%ax', '%bx', '%cx', '%dx', '%bp', '%sp', '%di', '%si']
elif operand_size == 32 and bitness == 32:
regs = ['%eax', '%ebp', '%ebx', '%ecx', '%edi', '%edx', '%esi', '%esp']
elif bitness == 64 and operand_size == 32:
regs = ['%eax', '%ebx', '%ecx', '%edi', '%edx', '%esi',
'%r8d', '%r9d', '%r10d', '%r11d', '%r12d', '%r13d', '%r14d']
# Don't include '%ebp', '%esp', '%r15d' in allowed registers when
# is_write_for_64_bit == True.
if is_write_for_64_bit == False:
regs += ['%esp', '%ebp', '%r15d']
elif can_restrict == True:
regs += ['%esp', '%ebp']
restricts = True
elif bitness == 64 and operand_size == 64:
regs = ['%rax', '%rbx', '%rcx', '%rdi', '%rdx', '%rsi',
'%r8', '%r9', '%r10', '%r11', '%r12', '%r13', '%r14']
# Don't include '%ebp', '%esp', '%r15d' in allowed registers when
# is_write_for_64_bit == True.
if is_write_for_64_bit == False:
regs += ['%rsp', '%rbp', '%r15']
else:
raise AssertionError("Unimplemented")
if restricts:
return set([
Operands(disasms=(reg,), output_rr=operand_to_restriction_map[reg])
for reg in regs])
else:
return set([Operands(disasms=(reg,)) for reg in regs])
def MnemonicOp(name):
"""Returns the mnemonic as an operand set."""
assert isinstance(name, str)
return set([Operands(disasms=(name,))])
def ImmOp():
"""Returns an immediate as an operand set."""
# When walking the DFA, immediates are currently returned as 0x0.
return set([Operands(disasms=('$0x0',))])
def LockPrefix():
"""Returns the lock prefix as an operand set."""
return set([Operands(disasms=('lock',))])
def MemoryOperandsTemplate(disp, base, index, scale, bitness):
"""Returns all the possible different memory operands using given parameters.
Returns list of Operands instances.
e.g. for disp='0x0', base='%eax', index='%ebx', scale=2
[ '(%ebx)', # Base Register Only
'0x0', # Displacement Only
'(%ebx,%eax',2)', # Base Register + Index register * scale.
'0x0(,%eax,2)', # Displacement + Index Register * scale.
'0x0(%ebx)', # Displacement + Base Register.
'0x0(%ebx,%eax,2), # Displacement + Base Register + Index Register * scale
]
Note that Base register must be used for x86_64.
Within the returned Operands objects, the input RR is set to the
index register if the index is used for x86_64.
Args:
disp: displacement to use in memory operand.
base: string register name to use for base register in addressing.
index: string register name to use for index register in addressing.
scale: integer scale to use to multiply index register by in addressing.
bitness: 32 or 64
Returns:
list of Operands instances representing all ways to use the parameters.
"""
assert bitness in (32, 64), bitness
input_rr = None
# Note: %riz is a fake register that always reads 0. It is allowed as an
# index register (though it is redundant). However, because it is always
# 0, we don't encode that it needs to be restricted.
if bitness == 64 and index != '%riz':
input_rr = index
base_only_encoding = []
# There is no way to encode base without displacement with ebp/rbp.
# Have to use 0x0+%ebp.
if base not in ('%ebp', '%rbp'):
base_only_encoding = [Operands(disasms=('({})'.format(base),))]
base_plus_index_scale_encoding = []
# There is no way to encode base without displacement with ebp/rbp.
# Have to use 0x0+%ebp.
if base not in ('%ebp', '%rbp'):
base_plus_index_scale_encoding = [
Operands(disasms=('({},{},{})'.format(base, index, scale),),
input_rr=input_rr)]
disp_only_encoding = [Operands(disasms=(disp,))]
disp_plus_index_scale_encoding = [
Operands(disasms=('{}(,{},{})'.format(disp, index, scale),),
input_rr=input_rr)]
disp_plus_base_encoding = [
Operands(disasms=('{}({})'.format(disp, base),))]
disp_plus_base_plus_index_scale_encoding = [
Operands(
disasms=('{}({},{},{})'.format(disp, base, index, scale),),
input_rr=input_rr)]
# Redundant %eiz/%riz encoding isn't available with scale == 1.
if (base in ('%esp', '%rsp') and
index in ('%eiz', '%riz') and
scale == 1):
return []
if bitness == 32:
return (base_only_encoding +
disp_only_encoding +
base_plus_index_scale_encoding +
disp_plus_index_scale_encoding +
disp_plus_base_encoding +
disp_plus_base_plus_index_scale_encoding)
else:
# Note: x86_64 allows rip relative addressing (x86_32 doesn't_).
# However, not all of the different addressing modes are available
# for rip relative addressing (only disp + rip). This is
# MOD==b'00, RM==b'101
if base == '%rip':
return disp_plus_base_encoding
else:
# x86_64 memory disasms must always include base register, so the
# Disp() and DispPlusIndexScale() options available for x86_32 aren't
# permitted.
return (base_only_encoding +
disp_plus_base_encoding +
base_plus_index_scale_encoding +
disp_plus_base_plus_index_scale_encoding)
def AllMemoryOperands(bitness):
"""The set of all possible memory operands as individual Operands objects."""
assert bitness in (32, 64), bitness
displacements = ['0x0']
scales = [1, 2, 4, 8]
if bitness == 32:
bases = set(['%eax', '%ebp', '%ebx', '%ecx',
'%edi', '%edx', '%esi', '%esp'])
indexes = (bases | set(['%eiz'])) - set(['%esp'])
elif bitness == 64:
indexes = set(['%rax', '%rbx', '%rcx', '%rdi', '%rdx',
'%rsi', '%r8', '%r9', '%r10', '%r11',
'%r12', '%r13', '%r14', '%r15', '%riz'])
bases = set(['%rsp', '%rbp', '%r15', '%rip'])
result = set()
for (d, b, i, s) in itertools.product(displacements, bases, indexes, scales):
result.update(MemoryOperandsTemplate(disp=d, base=b, index=i, scale=s,
bitness=bitness))
return result
def OpsProd(*args):
"""A version of itertools.product that builds Operands.
e.g.
XMM = (Operands(disasms=('%xmm1',)), Operands(disasms=('%xmm2',)))
REG = (Operands(disasms=('%rax',)), Operands(disasms=('%rbx',)))
OpsProd(XMM, REG) ->
set([Operands(disasms=('%xmm1', '%rax')),
Operands(disasms=('%xmm1', '%rbx')),
Operands(disasms=('%xmm2', '%rax')),
Operands(disasms=('%xmm2', '%rbx'))])
Args:
*args: each input is a collection of Operands.
Returns:
set of Operands instances, where each instance is a merge of Operands
objects, one taken from each input iterator.
"""
result = set([Operands()])
for pool in args:
result = set([MergeOperands(x, y) for (x,y) in
itertools.product(result, pool)])
return result
def GetRRInfoFromTrie(trie_state, bitness):
"""Convert rr info from trie to format suitable for Operands instance."""
input_rr = trie_state.input_rr
output_rr = trie_state.output_rr
if bitness == 32:
assert not input_rr, input_rr
assert not output_rr, output_rr
if input_rr == 'any_nonspecial' or not input_rr:
input_rr = None
if output_rr == 'None' or not output_rr:
output_rr = None
return input_rr, output_rr
def Disassemble((bitness, (byte_tuple, accept_info1, accept_info2))):
"""Disassembles byte sequence and returns it in old or new trie."""
global the_validator
old_trie_set = set()
new_trie_set = set()
disassembly = the_validator.DisassembleChunk(
''.join([chr(int(x)) for x in byte_tuple]),
bitness=bitness)
assert len(disassembly) == 1
prefixes, mnemonic, operands = (spec.ParseInstruction(disassembly[0]))
full_operands = tuple(prefixes + [mnemonic] + operands)
if accept_info1 is not None:
input_rr, output_rr = GetRRInfoFromTrie(accept_info1, bitness)
old_trie_set.add(Operands(disasms=full_operands,
input_rr=input_rr,
output_rr=output_rr))
if accept_info2 is not None:
input_rr, output_rr = GetRRInfoFromTrie(accept_info2, bitness)
new_trie_set.add(Operands(disasms=full_operands,
input_rr=input_rr,
output_rr=output_rr))
return old_trie_set, new_trie_set
def ParseStandardOpts():
"""Parses a standard set of options for validator proofs from command line."""
parser = optparse.OptionParser(
usage='%prog --bitness=[32,64] --old=path1 --new=path2')
parser.add_option('--old', help='Path of the old trie')
parser.add_option('--new', help='Path of the new trie')
parser.add_option('--bitness', choices=['32', '64'])
parser.add_option('--validator_dll', help='Path of the validator library')
parser.add_option('--decoder_dll', help='Path of the decoder library')
options, _ = parser.parse_args()
return options
def RunProof(standard_opts, proof_func):
"""Validates that trie diffs conform to to a proof.
Args:
standard_opts: command line options describing the two tries to be diffed,
arch type, etc. (as returned by ParseStandardOpts)
proof_func: Callback of (TrieDiffSet, bitness) to run to prove the diff.
Returns:
None
"""
# The validator itself must be passed to the other processes as a global
# as it is c object that must be passed via forking and not as an argument
# which means the validator must support being via pickled.
global the_validator
the_validator = validator.Validator(
validator_dll=standard_opts.validator_dll,
decoder_dll=standard_opts.decoder_dll)
bitness = int(standard_opts.bitness)
adds = set()
removes = set()
tasks = itertools.izip(itertools.repeat(bitness),
trie.DiffTrieFiles(standard_opts.new,
standard_opts.old))
pool = multiprocessing.Pool()
results = pool.imap_unordered(Disassemble, tasks, chunksize=10000)
for new, old in results:
adds |= new
removes |= old
proof_func((adds, removes), bitness)
def AssertDiffSetEquals((adds, removes),
expected_adds, expected_removes):
"""Assert that diffs is composed of expected_adds and expected_removes."""
if adds != expected_adds:
raise AssertionError('falsely added instructions: ',
adds - expected_adds,
'unadded instructions: ',
expected_adds - adds)
if removes != expected_removes:
raise AssertionError('falsely removed instructions: ',
removes - expected_removes,
'missing instructions: ',
expected_removes - removes)
| bsd-3-clause | -8,105,469,482,561,294,000 | 37.388325 | 80 | 0.618182 | false |
NicoSantangelo/sublime-gulp | status_bar.py | 1 | 1362 | import sublime
is_sublime_text_3 = int(sublime.version()) >= 3000
if is_sublime_text_3:
from .settings import Settings
from .caches import ProcessCache
from .timeout import defer_sync
else:
from settings import Settings
from caches import ProcessCache
from timeout import defer_sync
class StatusBar():
def __init__(self, window):
self.window = window
self.settings = Settings()
def update(self):
if ProcessCache.empty():
return self.erase()
status_bar_tasks = self.settings.get('status_bar_tasks', False)
if status_bar_tasks:
task_names = set([process.get_task_name() for process in ProcessCache.get()])
if status_bar_tasks != True:
if not isinstance(status_bar_tasks, list):
status_bar_tasks = [status_bar_tasks]
task_names = task_names.intersection(set(status_bar_tasks))
if task_names:
defer_sync(lambda: self.set(', '.join(task_names)))
def set(self, text):
text_format = self.settings.get('status_bar_format', '{task_name}')
status = text_format.format(task_name=text)
self.window.active_view().set_status(Settings.PACKAGE_NAME, status)
def erase(self):
self.window.active_view().erase_status(Settings.PACKAGE_NAME)
| mit | 7,182,725,561,763,034,000 | 29.266667 | 89 | 0.623348 | false |
fedora-conary/rbuild | plugins/buildpackages.py | 1 | 4531 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from rbuild import errors
from rbuild import pluginapi
from rbuild.pluginapi import command
from rbuild_plugins.build import packages
from rbuild_plugins.build import refresh
class BuildPackagesCommand(command.BaseCommand):
"""
Builds or rebuilds specified packages, or all checked-out packages
if none are specified.
Additionally, rebuilds any other packages in the product group that
depend on the built packages.
"""
help = 'Build edited packages for this stage'
paramHelp = '[package]*'
docs = {'refresh' : 'refreshes the source of specified packages, or all '
'checked-out packages if none are specified',
'message' : 'message describing why the commit was performed',
'no-watch' : 'do not watch the job after starting the build',
'no-commit' : 'do not automatically commit successful builds',
'no-recurse' : 'default behavior left for backwards compatibility',
'recurse' : 'build every package listed on the '
'command line plus all of its dependencies',
}
def addLocalParameters(self, argDef):
argDef['no-watch'] = command.NO_PARAM
argDef['no-commit'] = command.NO_PARAM
argDef['no-recurse'] = command.NO_PARAM
argDef['recurse'] = command.NO_PARAM
argDef['refresh'] = command.NO_PARAM
argDef['message'] = '-m', command.ONE_PARAM
#pylint: disable-msg=R0201,R0903
# could be a function, and too few public methods
def runCommand(self, handle, argSet, args):
watch = not argSet.pop('no-watch', False)
commit = not argSet.pop('no-commit', False)
recurse = argSet.pop('recurse', False)
argSet.pop('no-recurse', False) # ignored, now the default
refreshArg = argSet.pop('refresh', False)
message = argSet.pop('message', None)
success = True
_, packageList, = self.requireParameters(args, allowExtra=True)
if not packageList:
if refreshArg:
handle.BuildPackages.refreshAllPackages()
jobId = handle.BuildPackages.buildAllPackages()
else:
if refreshArg:
handle.BuildPackages.refreshPackages(packageList)
jobId = handle.BuildPackages.buildPackages(packageList, recurse)
if watch and commit:
success = handle.Build.watchAndCommitJob(jobId, message)
elif watch:
success = handle.Build.watchJob(jobId)
if not success:
raise errors.PluginError('Package build failed')
class BuildPackages(pluginapi.Plugin):
def initialize(self):
self.handle.Commands.getCommandClass('build').registerSubCommand(
'packages', BuildPackagesCommand,
aliases=['package', ])
def buildAllPackages(self):
self.handle.Build.warnIfOldProductDefinition('building all packages')
job = self.createJobForAllPackages()
jobId = self.handle.facade.rmake.buildJob(job)
self.handle.productStore.setPackageJobId(jobId)
return jobId
def buildPackages(self, packageList, recurse=True):
self.handle.Build.warnIfOldProductDefinition('building packages')
job = self.createJobForPackages(packageList, recurse)
jobId = self.handle.facade.rmake.buildJob(job)
self.handle.productStore.setPackageJobId(jobId)
return jobId
def createJobForAllPackages(self):
return packages.createRmakeJobForAllPackages(self.handle)
def createJobForPackages(self, packageList, recurse=True):
return packages.createRmakeJobForPackages(self.handle, packageList,
recurse)
def refreshPackages(self, packageList=None):
return refresh.refreshPackages(self.handle, packageList)
def refreshAllPackages(self):
return refresh.refreshAllPackages(self.handle)
| apache-2.0 | -3,516,606,443,660,765,000 | 38.4 | 79 | 0.672699 | false |
Autodesk/molecular-design-toolkit | moldesign/helpers/qmmm.py | 1 | 3277 | # Copyright 2016 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import moldesign as mdt
LINKBONDRATIO = 0.709 # fixed ratio of C-C to C-H bond length for link atoms
def create_link_atoms(mol, qmatoms):
""" Create hydrogen caps for bonds between QM and MM regions.
Each link atom will have ``metadata.mmatom``, ``metadata.mmpartner`` attributes to identify the
atom it replaces and the atom it's bonded to in the MM system.
Raises:
ValueError: if any MM/QM atom is bonded to more than one QM/MM atom, or the bond
order is not one
Returns:
List[mdt.Atom]: list of link atoms
"""
linkatoms = []
qmset = set(qmatoms)
for qmatom in qmatoms:
mmatom = _get_mm_nbr(mol, qmatom, qmset)
if mmatom is None:
continue
la = mdt.Atom(atnum=1, name='HL%d' % len(linkatoms),
metadata={'mmatom': mmatom, 'mmpartner': qmatom})
linkatoms.append(la)
set_link_atom_positions(linkatoms)
return linkatoms
def _get_mm_nbr(mol, qmatom, qmset):
mm_nbrs = [nbr for nbr in qmatom.bonded_atoms
if nbr not in qmset]
if len(mm_nbrs) == 0:
return None
# everything below is sanity checks
mmatom = mm_nbrs[0]
if len(mm_nbrs) != 1:
raise ValueError('QM atom %s is bonded to more than one MM atom' % qmatom)
if mol.bond_graph[qmatom][mmatom] != 1:
raise ValueError('Bond crossing QM/MM boundary (%s - %s) does not have order 1'
% (qmatom, mmatom))
if qmatom.atnum != 6 or mmatom.atnum != 6:
print ('WARNING: QM/MM bond involving non-carbon atoms: %s - %s' %
(qmatom, mmatom))
mm_qm_nbrs = [qmnbr for qmnbr in mmatom.bonded_atoms
if qmnbr in qmset]
if len(mm_qm_nbrs) != 1:
raise ValueError('MM atom %s is bonded to more than one QM atom'%mmatom)
return mmatom
def set_link_atom_positions(linkatoms):
"""
Set link atom positions using a fixed ratio of MM bond length to QM bond length
Warnings:
- This is only valid for
- Presumably, the most "correct" way to do this is to place the hydrogen in order to
match the force exterted on the QM atom by the MM atom. This is not currently supported.
Args:
linkatoms (List[mdt.Atom]): list of link atoms to set positions for
References:
http://www.nwchem-sw.org/index.php/Qmmm_link_atoms
"""
for atom in linkatoms:
nbr = atom.metadata.mmpartner
proxy = atom.metadata.mmatom
dist = LINKBONDRATIO * nbr.distance(proxy)
atom.position = (nbr.position +
dist * mdt.mathutils.normalized(proxy.position - nbr.position))
| apache-2.0 | 3,375,231,192,028,924,000 | 34.619565 | 99 | 0.641745 | false |
USGSDenverPychron/pychron | pychron/hardware/fusions/fusions_motor_configurer.py | 1 | 1639 | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
'''
@author: Jake Ross
@copyright: 2009
@license: Educational Community License 1.0
'''
# =============enthought library imports=======================
from traits.api import HasTraits, List
from traitsui.api import View, Item, Group
# =============standard library imports ========================
# =============local library imports ==========================
class FusionsMotorConfigurer(HasTraits):
'''
G{classtree}
'''
motors = List
def traits_view(self):
'''
'''
motorgroup = Group(layout='tabbed')
for m in self.motors:
n = m.name
self.add_trait(n, m)
i = Item(n, style='custom', show_label=False)
motorgroup.content.append(i)
return View(motorgroup, resizable=True, title='Configure Motors',
buttons=['OK', 'Cancel', 'Revert'],
)
| apache-2.0 | 3,609,808,186,760,802,300 | 29.351852 | 81 | 0.546065 | false |
bccp/nbodykit | nbodykit/source/catalog/subvolumes.py | 1 | 2079 | from nbodykit.base.catalog import CatalogSource
from pmesh.domain import GridND
from nbodykit.utils import split_size_3d
import numpy
class SubVolumesCatalog(CatalogSource):
""" A catalog that distributes the particles spatially into subvolumes per
MPI rank.
Attributes
----------
domain : :class:`pmesh.domain.GridND`;
The domain objects for decomposition. If None, generate
a domain to decompose the catalog into a 3d grid.
layout : A large object that holds which particle belongs to which rank.
source : the original source object
Parameters
----------
columns: list
a list of columns to already exchange
"""
def __init__(self, source, domain=None, position='Position', columns=None):
comm = source.comm
if domain is None:
# determine processor division for domain decomposition
np = split_size_3d(comm.size)
if comm.rank == 0:
self.logger.info("using cpu grid decomposition: %s" %str(np))
grid = [
numpy.linspace(0, source.attrs['BoxSize'][0], np[0] + 1, endpoint=True),
numpy.linspace(0, source.attrs['BoxSize'][1], np[1] + 1, endpoint=True),
numpy.linspace(0, source.attrs['BoxSize'][2], np[2] + 1, endpoint=True),
]
domain = GridND(grid, comm=comm)
self.domain = domain
self.source = source
layout = domain.decompose(source[position].compute())
self._size = layout.recvlength
CatalogSource.__init__(self, comm=comm)
self.attrs.update(source.attrs)
self._frozen = {}
if columns is None: columns = source.columns
for column in columns:
data = source[column].compute()
self._frozen[column] = self.make_column(layout.exchange(data))
@property
def hardcolumns(self):
return sorted(list(self._frozen.keys()))
def get_hardcolumn(self, col):
return self._frozen[col]
| gpl-3.0 | 3,959,098,747,508,798,500 | 31.484375 | 88 | 0.599327 | false |
DANS-KNAW/dariah-contribute | dariah_static_data/migrations/0003_auto__del_field_country_iso3166_2__del_field_country_uri__add_field_co.py | 1 | 4571 | # -*- coding: utf-8 -*-
"""
DARIAH Contribute - DARIAH-EU Contribute: edit your DARIAH contributions.
Copyright 2014 Data Archiving and Networked Services
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Country.iso3166_2'
db.delete_column(u'dariah_static_data_country', 'iso3166_2')
# Deleting field 'Country.uri'
db.delete_column(u'dariah_static_data_country', 'uri')
# Adding field 'Country.geonameid'
db.add_column(u'dariah_static_data_country', 'geonameid',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Adding field 'Country.iso3166_2'
db.add_column(u'dariah_static_data_country', 'iso3166_2',
self.gf('django.db.models.fields.CharField')(default='', max_length=2),
keep_default=False)
# Adding field 'Country.uri'
db.add_column(u'dariah_static_data_country', 'uri',
self.gf('django.db.models.fields.URLField')(default='', max_length=200),
keep_default=False)
# Deleting field 'Country.geonameid'
db.delete_column(u'dariah_static_data_country', 'geonameid')
models = {
u'dariah_static_data.activitygroupname': {
'Meta': {'object_name': 'ActivityGroupName'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'dariah_static_data.country': {
'Meta': {'object_name': 'Country'},
'geonameid': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'dariah_static_data.tadirahactivity': {
'Meta': {'object_name': 'TADIRAHActivity'},
'activity_group_name': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tadirah_activities'", 'to': u"orm['dariah_static_data.ActivityGroupName']"}),
'activity_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'dariah_static_data.tadirahobject': {
'Meta': {'object_name': 'TADIRAHObject'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'dariah_static_data.tadirahtechnique': {
'Meta': {'object_name': 'TADIRAHTechnique'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'dariah_static_data.vcc': {
'Meta': {'object_name': 'VCC'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['dariah_static_data'] | apache-2.0 | 5,867,931,267,306,300,000 | 46.134021 | 182 | 0.589805 | false |
GNOME/pygobject | tests/test_cairo.py | 1 | 11911 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
import unittest
import pytest
import gi
try:
gi.require_foreign('cairo')
import cairo
has_cairo = True
except ImportError:
has_cairo = False
has_region = has_cairo and hasattr(cairo, "Region")
try:
from gi.repository import Gtk, Gdk
Gtk, Gdk # pyflakes
except:
Gtk = None
Gdk = None
from gi.repository import GObject, Regress
@unittest.skipUnless(has_cairo, 'built without cairo support')
class Test(unittest.TestCase):
def test_gvalue_converters(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
matrix = cairo.Matrix()
objects = {
'CairoContext': context,
'CairoSurface': surface,
'CairoFontFace': context.get_font_face(),
'CairoScaledFont': context.get_scaled_font(),
'CairoPattern': context.get_source(),
'CairoMatrix': matrix,
}
for type_name, cairo_obj in objects.items():
gtype = GObject.type_from_name(type_name)
v = GObject.Value()
assert v.init(gtype) is None
assert v.get_value() is None
v.set_value(None)
assert v.get_value() is None
v.set_value(cairo_obj)
assert v.get_value() == cairo_obj
def test_cairo_context(self):
context = Regress.test_cairo_context_full_return()
self.assertTrue(isinstance(context, cairo.Context))
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
Regress.test_cairo_context_none_in(context)
def test_cairo_context_full_in(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
Regress.test_cairo_context_full_in(context)
with pytest.raises(TypeError):
Regress.test_cairo_context_full_in(object())
def test_cairo_context_none_return(self):
context = Regress.test_cairo_context_none_return()
self.assertTrue(isinstance(context, cairo.Context))
def test_cairo_path_full_return(self):
path = Regress.test_cairo_path_full_return()
if hasattr(cairo, "Path"): # pycairo 1.15.1+
assert isinstance(path, cairo.Path)
def test_cairo_path_none_in(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
path = context.copy_path()
Regress.test_cairo_path_none_in(path)
surface.finish()
with pytest.raises(TypeError):
Regress.test_cairo_path_none_in(object())
def test_cairo_path_full_in_full_return(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
context.move_to(10, 10)
context.curve_to(10, 10, 3, 4, 5, 6)
path = context.copy_path()
new_path = Regress.test_cairo_path_full_in_full_return(path)
assert list(path) == list(new_path)
surface.finish()
def test_cairo_font_options_full_return(self):
options = Regress.test_cairo_font_options_full_return()
assert isinstance(options, cairo.FontOptions)
def test_cairo_font_options_none_return(self):
options = Regress.test_cairo_font_options_none_return()
assert isinstance(options, cairo.FontOptions)
def test_cairo_font_options_full_in(self):
options = cairo.FontOptions()
Regress.test_cairo_font_options_full_in(options)
with pytest.raises(TypeError):
Regress.test_cairo_font_options_full_in(object())
def test_cairo_font_options_none_in(self):
options = cairo.FontOptions()
Regress.test_cairo_font_options_none_in(options)
def test_cairo_pattern_full_in(self):
pattern = cairo.SolidPattern(1, 1, 1, 1)
Regress.test_cairo_pattern_full_in(pattern)
with pytest.raises(TypeError):
Regress.test_cairo_pattern_full_in(object())
def test_cairo_pattern_none_in(self):
pattern = cairo.SolidPattern(1, 1, 1, 1)
Regress.test_cairo_pattern_none_in(pattern)
def test_cairo_pattern_full_return(self):
pattern = Regress.test_cairo_pattern_full_return()
self.assertTrue(isinstance(pattern, cairo.Pattern))
self.assertTrue(isinstance(pattern, cairo.SolidPattern))
def test_cairo_pattern_none_return(self):
pattern = Regress.test_cairo_pattern_none_return()
self.assertTrue(isinstance(pattern, cairo.Pattern))
self.assertTrue(isinstance(pattern, cairo.SolidPattern))
def test_cairo_region_full_in(self):
region = cairo.Region()
Regress.test_cairo_region_full_in(region)
with pytest.raises(TypeError):
Regress.test_cairo_region_full_in(object())
def test_cairo_matrix_none_in(self):
matrix = cairo.Matrix()
Regress.test_cairo_matrix_none_in(matrix)
with pytest.raises(TypeError):
Regress.test_cairo_matrix_none_in(object())
def test_cairo_matrix_none_return(self):
matrix = Regress.test_cairo_matrix_none_return()
assert matrix == cairo.Matrix()
def test_cairo_matrix_out_caller_allocates(self):
matrix = Regress.test_cairo_matrix_out_caller_allocates()
assert matrix == cairo.Matrix()
def test_cairo_surface(self):
surface = Regress.test_cairo_surface_none_return()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
surface = Regress.test_cairo_surface_full_return()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
Regress.test_cairo_surface_none_in(surface)
surface = Regress.test_cairo_surface_full_out()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
def test_cairo_surface_full_in(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
Regress.test_cairo_surface_full_in(surface)
with pytest.raises(TypeError):
Regress.test_cairo_surface_full_in(object())
def test_require_foreign(self):
self.assertEqual(gi.require_foreign('cairo'), None)
self.assertEqual(gi.require_foreign('cairo', 'Context'), None)
self.assertRaises(ImportError, gi.require_foreign, 'invalid_module')
self.assertRaises(ImportError, gi.require_foreign, 'invalid_module', 'invalid_symbol')
self.assertRaises(ImportError, gi.require_foreign, 'cairo', 'invalid_symbol')
@unittest.skipUnless(has_cairo, 'built without cairo support')
@unittest.skipUnless(has_region, 'built without cairo.Region support')
@unittest.skipUnless(Gdk, 'Gdk not available')
class TestRegion(unittest.TestCase):
def test_region_to_py(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
context.paint()
region = Gdk.cairo_region_create_from_surface(surface)
r = region.get_extents()
self.assertEqual((r.height, r.width), (10, 10))
def test_region_from_py(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
region = cairo.Region(cairo.RectangleInt(0, 0, 42, 42))
Gdk.cairo_region(context, region)
self.assertTrue("42" in repr(list(context.copy_path())))
@unittest.skipUnless(has_cairo, 'built without cairo support')
@unittest.skipUnless(Gtk, 'Gtk not available')
class TestPango(unittest.TestCase):
def test_cairo_font_options(self):
window = Gtk.Window()
if Gtk._version == "4.0":
window.set_font_options(cairo.FontOptions())
font_opts = window.get_font_options()
else:
screen = window.get_screen()
font_opts = screen.get_font_options()
assert font_opts is not None
self.assertTrue(isinstance(font_opts.get_subpixel_order(), int))
if has_cairo:
from gi.repository import cairo as CairoGObject
# Use PyGI signals to test non-introspected foreign marshaling.
class CairoSignalTester(GObject.Object):
sig_context = GObject.Signal(arg_types=[CairoGObject.Context])
sig_surface = GObject.Signal(arg_types=[CairoGObject.Surface])
sig_font_face = GObject.Signal(arg_types=[CairoGObject.FontFace])
sig_scaled_font = GObject.Signal(arg_types=[CairoGObject.ScaledFont])
sig_pattern = GObject.Signal(arg_types=[CairoGObject.Pattern])
@unittest.skipUnless(has_cairo, 'built without cairo support')
class TestSignalMarshaling(unittest.TestCase):
# Tests round tripping of cairo objects through non-introspected signals.
def setUp(self):
self.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
self.context = cairo.Context(self.surface)
self.tester = CairoSignalTester()
def pass_object_through_signal(self, obj, signal):
"""Pass the given `obj` through the `signal` emission storing the
`obj` passed through the signal and returning it."""
passthrough_result = []
def callback(instance, passthrough):
passthrough_result.append(passthrough)
signal.connect(callback)
signal.emit(obj)
return passthrough_result[0]
def test_context(self):
result = self.pass_object_through_signal(self.context, self.tester.sig_context)
self.assertTrue(isinstance(result, cairo.Context))
with pytest.raises(TypeError):
self.pass_object_through_signal(object(), self.tester.sig_context)
def test_surface(self):
result = self.pass_object_through_signal(self.surface, self.tester.sig_surface)
self.assertTrue(isinstance(result, cairo.Surface))
def test_font_face(self):
font_face = self.context.get_font_face()
result = self.pass_object_through_signal(font_face, self.tester.sig_font_face)
self.assertTrue(isinstance(result, cairo.FontFace))
with pytest.raises(TypeError):
self.pass_object_through_signal(object(), self.tester.sig_font_face)
def test_scaled_font(self):
scaled_font = cairo.ScaledFont(self.context.get_font_face(),
cairo.Matrix(),
cairo.Matrix(),
self.context.get_font_options())
result = self.pass_object_through_signal(scaled_font, self.tester.sig_scaled_font)
self.assertTrue(isinstance(result, cairo.ScaledFont))
with pytest.raises(TypeError):
result = self.pass_object_through_signal(object(), self.tester.sig_scaled_font)
def test_pattern(self):
pattern = cairo.SolidPattern(1, 1, 1, 1)
result = self.pass_object_through_signal(pattern, self.tester.sig_pattern)
self.assertTrue(isinstance(result, cairo.Pattern))
self.assertTrue(isinstance(result, cairo.SolidPattern))
with pytest.raises(TypeError):
result = self.pass_object_through_signal(object(), self.tester.sig_pattern)
| lgpl-2.1 | -7,246,807,414,580,883,000 | 37.672078 | 94 | 0.657543 | false |
gsarma/PyOpenWorm | tests/EvidenceQualityTest.py | 1 | 2772 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .DataTestTemplate import _DataTest
import PyOpenWorm
from PyOpenWorm.evidence import Evidence
from PyOpenWorm.document import Document
from PyOpenWorm.website import Website
from PyOpenWorm.context import Context
from six.moves.urllib.parse import urlparse
import pytest
import re
# Regular expressions copied from:
# https://www.crossref.org/blog/dois-and-matching-regular-expressions/
DOI_REGEXEN = [re.compile(x, re.I) for x in (r'^10.\d{4,9}/[-._;()/:A-Z0-9]+$',
r'^10.1002/\S+$')]
@pytest.mark.inttest
class EvidenceQualityTests(_DataTest):
'''
Tests for the quality of evidence. As distinct from coverage, these test things like whether accession information
is included and usable, whether certain fields are properly formatted, etc.
'''
def setUp(self):
PyOpenWorm.connect(configFile='tests/data_integrity_test.conf')
self.g = PyOpenWorm.config("rdf.graph")
self.context = Context()
self.qctx = self.context.stored
def tearDown(self):
PyOpenWorm.disconnect()
def test_has_valid_resource(self):
"""Checks if the object has either a valid DOI or URL"""
ev = self.qctx(Evidence)()
allEvidence = set(ev.load())
qualityEvidence = set()
for evobj in allEvidence:
ref = evobj.reference()
if isinstance(ref, Document):
doi = ref.doi()
if doi:
for pat in DOI_REGEXEN:
if pat.match(doi):
qualityEvidence.add(evobj)
break
else: # no break
continue
urls = ref.uri.get()
good_uris = True
for uri in urls:
parsed = urlparse(uri)
if not parsed.scheme or not parsed.netloc:
good_uris = False
break
if not good_uris:
continue
elif isinstance(ref, Website):
urls = ref.url.get()
urls = list(urls)
print(urls)
good_uris = True
for uri in urls:
parsed = urlparse(uri)
if not parsed.scheme or not parsed.netloc:
good_uris = False
break
if not good_uris:
continue
qualityEvidence.add(evobj)
self.assertSetEqual(allEvidence, qualityEvidence,
msg='\n'.join(str(x.reference()) for x in (allEvidence - qualityEvidence)))
| mit | -8,552,154,659,828,182,000 | 33.65 | 118 | 0.5386 | false |
puttarajubr/commcare-hq | custom/succeed/reports/patient_task_list.py | 1 | 14071 | from datetime import datetime
import logging
from django.core.urlresolvers import reverse
from django.utils import html
from django.utils.translation import ugettext as _, ugettext_noop
import json
from corehq.apps.api.es import ReportCaseES
from corehq.apps.cloudcare.api import get_cloudcare_app, get_cloudcare_form_url
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn
from corehq.apps.reports.filters.search import SearchFilter
from corehq.apps.reports.generic import ElasticProjectInspectionReport
from corehq.apps.reports.standard import CustomProjectReport, ProjectReportParametersMixin
from corehq.apps.reports.standard.cases.data_sources import CaseDisplay
from corehq.elastic import es_query
from corehq.pillows.base import restore_property_dict
from corehq.pillows.mappings.reportcase_mapping import REPORT_CASE_INDEX
from custom.succeed.reports.patient_Info import PatientInfoReport
from custom.succeed.reports import VISIT_SCHEDULE, LAST_INTERACTION_LIST, EMPTY_FIELD, \
INPUT_DATE_FORMAT, OUTPUT_DATE_FORMAT, CM_APP_UPDATE_VIEW_TASK_MODULE, CM_UPDATE_TASK, TASK_RISK_FACTOR, TASK_ACTIVITY
from custom.succeed.utils import is_succeed_admin, has_any_role, SUCCEED_CM_APPNAME, get_app_build
from casexml.apps.case.models import CommCareCase
from dimagi.utils.decorators.memoized import memoized
class PatientTaskListReportDisplay(CaseDisplay):
def __init__(self, report, case_dict):
next_visit = VISIT_SCHEDULE[0]
last_inter = None
for action in case_dict['actions']:
if action['xform_xmlns'] in LAST_INTERACTION_LIST:
last_inter = action
for visit_key, visit in enumerate(VISIT_SCHEDULE):
for key, action in enumerate(case_dict['actions']):
if visit['xmlns'] == action['xform_xmlns']:
try:
next_visit = VISIT_SCHEDULE[visit_key + 1]
del case_dict['actions'][key]
break
except IndexError:
next_visit = 'last'
self.next_visit = next_visit
if last_inter:
self.last_interaction = last_inter['date']
self.domain = report.domain
self.app_dict = get_cloudcare_app(self.domain, SUCCEED_CM_APPNAME)
self.latest_build = get_app_build(self.app_dict)
super(PatientTaskListReportDisplay, self).__init__(report, case_dict)
def get_property(self, key):
if key in self.case:
return self.case[key]
else:
return EMPTY_FIELD
def get_link(self, url, field):
if url:
return html.mark_safe("<a class='ajax_dialog' href='%s' target='_blank'>%s</a>" % (url, html.escape(field)))
else:
return "%s (bad ID format)" % self.case["indices"][0]["referenced_id"]
def get_form_url(self, app_dict, app_build_id, module_idx, form, case_id=None):
try:
module = app_dict['modules'][module_idx]
form_idx = [ix for (ix, f) in enumerate(module['forms']) if f['xmlns'] == form][0]
except IndexError:
form_idx = None
return html.escape(get_cloudcare_form_url(domain=self.domain,
app_build_id=app_build_id,
module_id=module_idx,
form_id=form_idx,
case_id=case_id) + '/enter/')
@property
@memoized
def full_name(self):
return CommCareCase.get(self.get_property("indices")[0]["referenced_id"])["full_name"]
@property
def full_name_url(self):
return html.escape(
PatientInfoReport.get_url(*[self.case["domain"]]) + "?patient_id=%s" % self.case["indices"][0]["referenced_id"])
@property
def full_name_link(self):
return self.get_link(self.full_name_url, self.full_name)
@property
def name(self):
return self.get_property("name")
@property
def name_url(self):
if self.status == "Closed":
url = reverse('case_details', args=[self.domain, self.get_property("_id")])
return url + '#!history'
else:
return self.get_form_url(self.app_dict, self.latest_build, CM_APP_UPDATE_VIEW_TASK_MODULE, CM_UPDATE_TASK, self.get_property("_id"))
@property
def name_link(self):
return self.get_link(self.name_url, self.name)
@property
def task_responsible(self):
return self.get_property("task_responsible")
@property
def case_filter(self):
filters = []
care_site = self.request_params.get('task_responsible', '')
if care_site != '':
filters.append({'term': {'task_responsible.#value': care_site.lower()}})
return {'and': filters} if filters else {}
@property
def status(self):
return self.get_property("closed") and "Closed" or "Open"
@property
def task_due(self):
rand_date = self.get_property("task_due")
if rand_date and rand_date != EMPTY_FIELD:
date = datetime.strptime(rand_date, INPUT_DATE_FORMAT)
return date.strftime(OUTPUT_DATE_FORMAT)
else:
return EMPTY_FIELD
@property
def last_modified(self):
rand_date = self.get_property("last_updated")
if rand_date and rand_date != EMPTY_FIELD:
date = datetime.strptime(rand_date, INPUT_DATE_FORMAT)
return date.strftime(OUTPUT_DATE_FORMAT)
else:
return EMPTY_FIELD
@property
def task_activity(self):
key = self.case.get("task_activity", EMPTY_FIELD)
return TASK_ACTIVITY.get(key, key)
@property
def task_risk_factor(self):
key = self.case.get("task_risk_factor", EMPTY_FIELD)
return TASK_RISK_FACTOR.get(key, key)
@property
def task_details(self):
return self.get_property("task_details")
class PatientTaskListReport(CustomProjectReport, ElasticProjectInspectionReport, ProjectReportParametersMixin):
ajax_pagination = True
name = ugettext_noop('Patient Tasks')
slug = 'patient_task_list'
default_sort = {'task_due.#value': 'asc'}
base_template_filters = 'succeed/report.html'
case_type = 'task'
fields = ['custom.succeed.fields.ResponsibleParty',
'custom.succeed.fields.PatientName',
'custom.succeed.fields.TaskStatus',
'corehq.apps.reports.standard.cases.filters.CaseSearchFilter']
@classmethod
def show_in_navigation(cls, domain=None, project=None, user=None):
return True
@property
@memoized
def rendered_report_title(self):
return self.name
@property
@memoized
def case_es(self):
return ReportCaseES(self.domain)
@property
def case_filter(self):
filters = []
care_site = self.request_params.get('care_site', '')
if care_site != '':
filters.append({'term': {'care_site.#value': care_site.lower()}})
return {'and': filters} if filters else {}
@property
def headers(self):
headers = DataTablesHeader(
DataTablesColumn(_("Patient Name"), sortable=False),
DataTablesColumn(_("Task Name"), prop_name="name"),
DataTablesColumn(_("Responsible Party"), prop_name="task_responsible", sortable=False),
DataTablesColumn(_("Status"), prop_name='status', sortable=False),
DataTablesColumn(_("Action Due"), prop_name="task_due.#value"),
DataTablesColumn(_("Last Update"), prop_name='last_updated.#value'),
DataTablesColumn(_("Task Type"), prop_name="task_activity.#value"),
DataTablesColumn(_("Associated Risk Factor"), prop_name="task_risk_factor.#value"),
DataTablesColumn(_("Details"), prop_name="task_details", sortable=False),
)
return headers
@property
@memoized
def es_results(self):
q = { "query": {
"filtered": {
"query": {
"match_all": {}
},
"filter": {
"and": [
{"term": { "domain.exact": "succeed" }},
]
}
}
},
'sort': self.get_sorting_block(),
'from': self.pagination.start if self.pagination else None,
'size': self.pagination.count if self.pagination else None,
}
search_string = SearchFilter.get_value(self.request, self.domain)
es_filters = q["query"]["filtered"]["filter"]
responsible_party = self.request_params.get('responsible_party', '')
if responsible_party != '':
if responsible_party == 'Care Manager':
es_filters["and"].append({"term": {"task_responsible.#value": "cm"}})
else:
es_filters["and"].append({"term": {"task_responsible.#value": "chw"}})
task_status = self.request_params.get('task_status', '')
if task_status != '':
if task_status == 'closed':
es_filters["and"].append({"term": {"closed": True}})
else:
es_filters["and"].append({"term": {"closed": False}})
patient_id = self.request_params.get('patient_id', '')
if patient_id != '':
es_filters["and"].append({"term": {"indices.referenced_id": patient_id}})
def _filter_gen(key, flist):
return {"terms": {
key: [item.lower() for item in flist if item]
}}
user = self.request.couch_user
if not user.is_web_user():
owner_ids = user.get_group_ids()
user_ids = [user._id]
owner_filters = _filter_gen('owner_id', owner_ids)
user_filters = _filter_gen('user_id', user_ids)
filters = filter(None, [owner_filters, user_filters])
subterms = []
subterms.append({'or': filters})
es_filters["and"].append({'and': subterms} if subterms else {})
if self.case_type:
es_filters["and"].append({"term": {"type.exact": 'task'}})
if search_string:
query_block = {"queryString": {"query": "*" + search_string + "*"}}
q["query"]["filtered"]["query"] = query_block
sorting_block = self.get_sorting_block()[0].keys()[0] if len(self.get_sorting_block()) != 0 else None
order = self.get_sorting_block()[0].values()[0] if len(self.get_sorting_block()) != 0 else None
if sorting_block == 'task_risk_factor.#value':
sort = {
"_script": {
"script":
"""
foreach(String key : task_risk_factor_list.keySet()) {
String value = _source.task_risk_factor.get('#value');
if (value == null) {
return '';
} else {
return task_risk_factor_list.get(value);
}
}
return ''
""",
"type": "string",
"params": {
"task_risk_factor_list": TASK_RISK_FACTOR
},
"order": order
}
}
q['sort'] = sort
if sorting_block == 'task_activity.#value':
sort = {
"_script": {
"script":
"""
foreach(String key : task_activity_list.keySet()) {
String value = _source.task_activity.get('#value');
if (value == null) {
return value;
} else {
return task_activity_list.get(value);
}
}
return ''
""",
"type": "string",
"params": {
"task_activity_list": TASK_ACTIVITY
},
"order": order
}
}
q['sort'] = sort
logging.info("ESlog: [%s.%s] ESquery: %s" % (self.__class__.__name__, self.domain, json.dumps(q)))
if self.pagination:
return es_query(q=q, es_url=REPORT_CASE_INDEX + '/_search', dict_only=False, start_at=self.pagination.start)
else:
return es_query(q=q, es_url=REPORT_CASE_INDEX + '/_search', dict_only=False)
@property
def get_all_rows(self):
return self.rows
@property
def rows(self):
case_displays = (PatientTaskListReportDisplay(self, restore_property_dict(self.get_case(case)))
for case in self.es_results['hits'].get('hits', []))
for disp in case_displays:
yield [
disp.full_name_link,
disp.name_link,
disp.task_responsible,
disp.status,
disp.task_due,
disp.last_modified,
disp.task_activity,
disp.task_risk_factor,
disp.task_details
]
@property
def user_filter(self):
return super(PatientTaskListReport, self).user_filter
def get_case(self, row):
if '_source' in row:
case_dict = row['_source']
else:
raise ValueError("Case object is not in search result %s" % row)
if case_dict['domain'] != self.domain:
raise Exception("case.domain != self.domain; %r and %r, respectively" % (case_dict['domain'], self.domain))
return case_dict
| bsd-3-clause | -7,832,998,788,708,410,000 | 37.763085 | 144 | 0.542108 | false |
mahmoudShaheen/PyMedox | packages/arduino.py | 1 | 1286 | #!/usr/bin/env python
#################################
# @author: Mahmoud Shaheen #
# MedicalBox IOT Project #
# Arduino #
#################################
#functions for serial communication with Arduino
#called from controlHardware module
import serial
import data
import time
ser = serial.Serial(data.arduinoPort)
ser.baudrate = data.baudRate
time.sleep(5) #wait for serial communication to start
#encodes string and sends it on serial port for Arduino
def sendSerial(serialString): #checks if the port is closed to re-open it
if(not ser.isOpen):
ser.open()
time.sleep(5)
serialString = str(serialString) #makes sure that the data is string "convert any to string"
serialString = serialString.encode() #encodes the string "converts string to byte array"
print "serial to write: " + serialString
ser.write(serialString)
#gets a line from serial port from Arduino
def getSerial():
if(not ser.isOpen): #checks if the port is closed to re-open it
ser.open()
time.sleep(5)
line = ser.readline() #get a line from serial terminated by \n
line = line.strip() #removers \r\n at the end of the string
line = line.decode("utf-8") #removes b at the start of the string "converts byte to string"
print "serial received: ", line
return line
| mit | -8,242,292,681,083,054,000 | 31.974359 | 93 | 0.691291 | false |
Ladeia/ProjectEuler | Problem145/Python/solution_1.py | 1 | 3513 | #!/usr/bin/env python3
#coding=utf-8
"""
How many reversible numbers are there below one-billion?
Problem 145
Some positive integers n have the property that the sum [ n + reverse(n) ] consists entirely of odd (decimal) digits.
For instance, 36 + 63 = 99 and 409 + 904 = 1313. We will call such numbers reversible; so 36, 63, 409, and 904 are reversible. Leading zeroes are not allowed in either n or reverse(n).
There are 120 reversible numbers below one-thousand.
How many reversible numbers are there below one-billion (10^9)?
"""
from functools import reduce
from itertools import permutations, combinations_with_replacement
def other_function(n):
for i in xrange(11, n):
if str(i)[0] == '0' or str(i)[-1] == '0':
continue
yield i
# combs = reduce(list.__add__, [[(x, y) for y in range(0, 10) if (x ^ y) & 1 and not (x == 0 or y == 0)] for x in range(0, 10)])
# for comb in combs:
# start, end = comb
# num = int(str(start) + str(end))
# yield num
# for i in range(n):
# for comb in combs:
# start, end = comb
# num = int(str(start) + str(i) + str(end))
# if num > n:
# break
# yield num
def gen_filtred(n, start = 1): #n - potencia de 10
#combinações de números possíveis simetricamente
combs = reduce(list.__add__, [[(x, y) for y in range(0, 10) if (x ^ y) & 1] for x in range(0, 10)])
exp = start
while exp < n:
tamanho = len(str(10 ** exp))//2
if exp & 1 == 1: #expoente impar na base 10 -> tamanho par
for comb in combinations_with_replacement(combs, tamanho):
for perm in set(permutations(comb)):
first = perm[0]
head, tail = first
if head == 0 or tail == 0:
continue
index = exp
newnum = 0
for mostnum, lessnum in perm:
newnum += mostnum * 10 ** index + lessnum * 10 ** abs(index - exp)
index -= 1
yield newnum
else: #expoente par na base 10 -> tamanho impar
for comb in combinations_with_replacement(combs, tamanho):
for perm in set(permutations(comb)):
first = perm[0]
head, tail = first
if head == 0 or tail == 0:
continue
for middle in range(10):
#print('Comb: {}| Perm: {}'.format(comb, perm))
index = exp
newnum = middle * 10 ** (exp // 2)
for mostnum, lessnum in perm:
newnum += mostnum * 10 ** index + lessnum * 10 ** abs(index - exp)
index -= 1
yield newnum
exp += 1
def sumreverse(num):
return str((int(str(num)[::-1]) + num))
def reversible(num):
return reduce(lambda a, b: a*b, [int(y) for y in sumreverse(num)]) & 1
range_x = lambda x: gen_filtred(len(str(x)) - 1)
range_y = lambda y: other_function(y)
test = 10 ** 9
print('Testando até: %s ' %test)
gen_list = []
total = 0
for i in range_x(test):
#print(i)
if reversible(i) == 1:
#print('%d + %s = %s' %(i, str(i)[::-1], sumreverse(i)))
total += 1
#gen_list.append(i)
#print(total)
#else:
#print('Não é reversível: %s' %i)
print('Total range_x: %d' %total)
#Usado para testes
# other_list = []
# total = 0
# for i in range_y(test):
# if reversible(i) == 1:
# #print('%d + %s = %s' %(i, str(i)[::-1], sumreverse(i)))
# total += 1
# other_list.append(i)
# #print(total)
# #else:
# #print('Não é reversível: %s' %i)
# print('Total range_y: %d' %total)
for gen, other in zip(gen_list, other_list):
if gen not in other_list:
print('A função other não está gerando o reversível: ' %gen)
if other not in gen_list:
print('A função gen_filtred não está gerando o reversível: ' %other) | mit | 1,478,400,310,391,212,000 | 28.601695 | 184 | 0.613688 | false |
a3qz/networked_platformer | editor/editor.py | 1 | 4284 | import random
import time
import data
import sprite
import sys
import pygame
import constants
import collectable
import client
import wall
from struct import *
import board
class Game:
def __init__(self, s):
self.objects = [] #start with a list of no objects
self.screen = s #get the screen surface
#make a player ship to use to control the view
self.player = Ship(self, 100, 100, "91913")
#load a font for drawing our typed string
self.bigfont = pygame.font.Font("./fonts/megaman_2.ttf", 32)
#make a board and load in the level editor level
self.board = board.Board(self)
self.board.parse("./levels/beta.lvl")
def tick(self): #handle just our player for the editor
self.player.tick()
def draw(self):
#draw the background
self.screen.fill(constants.GREEN)
#draw the objects in reversed order, for depth reasons
for b in reversed(self.objects):
b.draw()
#handle player actions
def handleKeyDown(self, k):
self.player.handleKeyDown(k)
def handleKeyUp(self, k):
self.player.handleKeyUp(k)
#no longer used
def handleMUP(self, xxt, yyt):
pass
def handleMDOWN(self, xxt, yyt, event):
#figure out which grid space the player clicked on
x = int((16 + xxt - self.player.view[0])/32)*32
y = int((16 + yyt - self.player.view[1])/32)*32
#check if they are left clicking or not
if event.button == 1:
#if left click, add a thing to the board based off
#where you clicked and what the user typed
self.board.ref[int(self.player.toadd)](self, x, y,
int(self.player.toadd))
else:
#otherwise, make a rectangle and figure out who you clicked on
rect = pygame.Rect(0, 0, 1, 1)
l1 = self.objects
l2 = [w.rect for w in l1]
#check the objects for collision
i = rect.move(x, y).collidelist(l2)
#if we clicked on a valid thing to remove, remove it
if i != -1 and not isinstance(l1[i], Ship):
self.objects = [o for o in self.objects if o != l1[i]]
class Ship(sprite.Sprite):
def __init__(self, game, x, y, descriptor):
super(Ship, self).__init__(game)
self.rect.move_ip(x, y) #move to the correct coordinates
#load an image
self.img = pygame.image.load('imgs/cards/smaller_pngs/{}'.format(data.num_as_key[descriptor])).convert_alpha()
#set up our game's viewport
self.view = (0, 0)
#start a string for typing
self.toadd = ''
#make us our correct size
self.rect.inflate_ip(100, 145)
#we aren't pressing anything
self.keys = 0
def tick(self):
#move us based off our velocity
self.rect.move_ip(self.vx, self.vy)
#move our view to the right place
self.view = (constants.WIDTH/2 - self.rect.x,
(constants.HEIGHT*3)/4 - self.rect.y)
#handle keys
self.fly()
def draw(self):
self.game.screen.blit(self.img, self.rect.move(*self.view))
label = self.game.bigfont.render(self.toadd, 1, (255, 255, 255))
self.game.screen.blit(label, (10, 10))
def handleKeyDown(self, k):
#asdw control flight
if k == 'a':
self.keys |= 1
elif k == 'd':
self.keys |= 2
elif k == 'w':
self.keys |= 4
elif k == 's':
self.keys |= 8
elif k.isdigit() and len(k) == 1: #if we did a single digit, type it
self.toadd = self.toadd + k
elif k == 'backspace': #if we backspaced, delete a char from our string
self.toadd = self.toadd[:-1]
#stop flying when releasing keys
def handleKeyUp(self, k):
if k == 'a':
self.keys &= ~1
elif k == 'd':
self.keys &= ~2
elif k == 'w':
self.keys &= ~4
elif k == 's':
self.keys &= ~8
def fly(self):
#handle our velocities
self.vx = (((self.keys & 2)>>1) - ((self.keys & 1)>>0)) * 7
self.vy = (((self.keys & 4)>>2) - ((self.keys & 8)>>3)) * -7
| gpl-3.0 | -2,789,428,229,882,804,700 | 33.272 | 118 | 0.558123 | false |
bwhite/hadoopy | examples/l4-vision-and-image-processing-with-hadoop/ex0-face-finder/face_finder.py | 1 | 3141 | #!/usr/bin/env python
# (C) Copyright 2011 Brandyn A. White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Hadoopy Face Finding Demo"""
__author__ = 'Brandyn A. White <[email protected]>'
__license__ = 'GPL V3'
import hadoopy
import Image
import imfeat
import cStringIO as StringIO
import os
import cv
class Mapper(object):
def __init__(self):
path = 'haarcascade_frontalface_default.xml'
if os.path.exists(path):
self._cascade = cv.Load(path)
else:
path = 'fixtures/haarcascade_frontalface_default.xml'
if os.path.exists(path):
self._cascade = cv.Load(path)
else:
raise ValueError("Can't find .xml file!")
def _detect_faces(self, img):
min_size = (20, 20)
image_scale = 2
haar_scale = 1.2
min_neighbors = 2
haar_flags = 0
if img.nChannels == 3:
gray = cv.CreateImage((img.width, img.height), 8, 1)
cv.CvtColor(img, gray, cv.CV_BGR2GRAY)
else:
gray = img
small_img = cv.CreateImage((cv.Round(img.width / image_scale),
cv.Round(img.height / image_scale)), 8, 1)
cv.Resize(gray, small_img, cv.CV_INTER_LINEAR)
cv.EqualizeHist(small_img, small_img)
faces = cv.HaarDetectObjects(small_img, self._cascade,
cv.CreateMemStorage(0),
haar_scale, min_neighbors, haar_flags,
min_size)
return [((x * image_scale, y * image_scale,
w * image_scale, h * image_scale), n)
for (x, y, w, h), n in faces]
def _load_cv_image(self, value):
return imfeat.convert_image(Image.open(StringIO.StringIO(value)),
[('opencv', 'rgb', 8)])
def map(self, key, value):
"""
Args:
key: Image name
value: Image as jpeg byte data
Yields:
A tuple in the form of (key, value)
key: Image name
value: (image, faces) where image is the input value and faces is
a list of ((x, y, w, h), n)
"""
try:
image = self._load_cv_image(value)
except:
hadoopy.counter('DATA_ERRORS', 'ImageLoadError')
return
faces = self._detect_faces(image)
if faces:
yield key, (value, faces)
if __name__ == "__main__":
hadoopy.run(Mapper, doc=__doc__)
| gpl-3.0 | -3,343,407,461,399,199,000 | 32.774194 | 78 | 0.562241 | false |
ctools/ctools | cscripts/csphagen.py | 1 | 44281 | #! /usr/bin/env python
# ==========================================================================
# Computes the PHA spectra for source/background and ARF/RMF files using the
# reflected region method
#
# Copyright (C) 2017-2021 Luigi Tibaldo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ==========================================================================
import gammalib
import ctools
import math
import sys
from cscripts import mputils
# =============== #
# csfindobs class #
# =============== #
class csphagen(ctools.csobservation):
"""
Generate PHA, ARF and RMF files for classical IACT spectral analysis
"""
# Constructor
def __init__(self, *argv):
"""
Constructor
"""
# Initialise application by calling the appropriate class constructor
self._init_csobservation(self.__class__.__name__, ctools.__version__, argv)
# Initialise other variables
self._obs_off = gammalib.GObservations()
self._ebounds = gammalib.GEbounds()
self._etruebounds = gammalib.GEbounds()
self._src_dir = gammalib.GSkyDir()
self._src_reg = gammalib.GSkyRegions()
self._models = gammalib.GModels()
self._srcname = ''
self._bkg_regs = []
self._excl_reg = None
self._has_exclusion = False
self._srcshape = ''
self._rad = 0.0
self._reg_width = 0.0
self._reg_height = 0.0
self._reg_posang = 0.0
self._nthreads = 0
# Return
return
# State methods por pickling
def __getstate__(self):
"""
Extend ctools.csobservation getstate method to include some members
Returns
-------
state : dict
Pickled instance
"""
# Set pickled dictionary
state = {'base' : ctools.csobservation.__getstate__(self),
'obs_off' : self._obs_off,
'ebounds' : self._ebounds,
'etruebounds' : self._etruebounds,
'src_dir' : self._src_dir,
'src_reg' : self._src_reg,
'models' : self._models,
'srcname' : self._srcname,
'bkg_regs' : self._bkg_regs,
'excl_reg' : self._excl_reg,
'has_exclusion' : self._has_exclusion,
'srcshape' : self._srcshape,
'rad' : self._rad,
'reg_width' : self._reg_width,
'reg_height' : self._reg_height,
'reg_posang' : self._reg_posang,
'nthreads' : self._nthreads}
# Return pickled dictionary
return state
def __setstate__(self, state):
"""
Extend ctools.csobservation setstate method to include some members
Parameters
----------
state : dict
Pickled instance
"""
ctools.csobservation.__setstate__(self, state['base'])
self._obs_off = state['obs_off']
self._ebounds = state['ebounds']
self._etruebounds = state['etruebounds']
self._src_dir = state['src_dir']
self._src_reg = state['src_reg']
self._models = state['models']
self._srcname = state['srcname']
self._bkg_regs = state['bkg_regs']
self._excl_reg = state['excl_reg']
self._has_exclusion = state['has_exclusion']
self._srcshape = state['srcshape']
self._rad = state['rad']
self._reg_width = state['reg_width']
self._reg_height = state['reg_height']
self._reg_posang = state['reg_posang']
self._nthreads = state['nthreads']
# Return
return
# Private methods
def _query_src_direction(self):
"""
Set up the source direction parameter
"""
# Initialise source direction
self._src_dir = gammalib.GSkyDir()
# Get coordinate systel
coordsys = self['coordsys'].string()
# If coordinate system is celestial then query "ra" and "dec"
if coordsys == 'CEL':
ra = self['ra'].real()
dec = self['dec'].real()
self._src_dir.radec_deg(ra, dec)
# ... otherwise, if coordinate system is galactic then query "glon"
# and "glat"
elif coordsys == 'GAL':
glon = self['glon'].real()
glat = self['glat'].real()
self._src_dir.lb_deg(glon, glat)
# Return
return
def _compute_posang(self, pnt_dir, a, b):
"""
Compute the difference in position angle wrt the pointing in degrees
Parameters
----------
pnt_dir : `~gammalib.GSkyDir`
Pointing direction
a : `~gammalib.GSkyDir`
First sky direction
a : `~gammalib.GSkyDir`
Second sky direction
Returns
-------
posang : float
Position angle (degrees)
"""
# Compute position angles
posang_a = pnt_dir.posang_deg(a) % 360
posang_b = pnt_dir.posang_deg(b) % 360
# Compute difference
posang = abs(posang_a - posang_b)
# Return position angle
return posang
def _get_regions(self, filename):
"""
Get regions from DS9 file or FITS file
Parameters
----------
filename : `~gammalib.GFilename`
Filename
Returns
-------
regs : `~gammalib.GSkyRegions`
Region container
"""
# If filename is a FITS file then load region map and append to
# list of regions
if filename.is_fits():
map = gammalib.GSkyRegionMap(filename)
regs = gammalib.GSkyRegions()
regs.append(map)
# ... otherwise load DS9 file
else:
regs = gammalib.GSkyRegions(filename)
# Return region container
return regs
def _get_source_parameters(self):
"""
Get parameters to define source/On region
"""
# Get source shape
self._srcshape = self['srcshape'].string()
# Query source direction
self._query_src_direction()
# If source shape is a circle the append GSkyRegionCircle
if self._srcshape == 'CIRCLE':
# Set circular source region
self._rad = self['rad'].real()
self._src_reg.append(gammalib.GSkyRegionCircle(self._src_dir, self._rad))
# ... otherwise if source shape is a rectangle then append
# GSkyRegionRectangle
elif self._srcshape == 'RECT':
# Set rectangular source region
self._reg_width = self['width'].real()
self._reg_height = self['height'].real()
self._reg_posang = self['posang'].real()
self._src_reg.append(gammalib.GSkyRegionRectangle(self._src_dir,
self._reg_width,
self._reg_height,
self._reg_posang))
# Return
return
def _get_parameters_bkgmethod_reflected(self):
"""
Get parameters for REFLECTED background method
"""
# Query parameters for source/On region definition
self._get_source_parameters()
# Query minimum number of background regions and
# number of background regions to skip next to On region
self['bkgregmin'].integer()
self['bkgregskip'].integer()
# Return
return
def _get_parameters_bkgmethod_custom(self):
"""
Get parameters for CUSTOM background method
Raises
------
RuntimeError
Only one On region is allowed
"""
# Set up source region
filename = self['srcregfile'].filename()
self._src_reg = self._get_regions(filename)
# Raise an exception if there is more than one source region
if len(self._src_reg) != 1:
raise RuntimeError('Only one On region is allowed')
# Set up source direction. Query parameters if neccessary.
if self._models.is_empty():
if isinstance(self._src_reg[0], gammalib.GSkyRegionCircle):
self._src_dir = self._src_reg[0].centre()
self._rad = self._src_reg[0].radius()
else:
self._query_src_direction()
# Make sure that all CTA observations have an Off region by loading the
# Off region region the parameter 'bkgregfile' for all CTA observations
# without Off region
for obs in self.obs():
if obs.classname() == 'GCTAObservation':
if obs.off_regions().is_empty():
filename = self['bkgregfile'].filename()
regions = self._get_regions(filename)
obs.off_regions(regions)
# Return
return
def _get_parameters_bkgmethod_off(self):
"""
Get parameters for OFF background method
Raises
------
RuntimeError
On and Off observations must have same size
RuntimeError
Off observations must be event lists
"""
# Set up Off observations. If there are no Off observations in the
# container then load them via user parameters
if self.obs_off().is_empty():
# Get Off observation file name
filename = self['inobsoff'].filename()
# If Off observation is a FITS file then load observation and
# append it to the Off observation container
if gammalib.GFilename(filename).is_fits():
self._obs_off.append(gammalib.GCTAObservation(filename))
# ... otherwise load XML file into Off observation container
else:
self._obs_off.load(filename)
# Check that size of On and Off observations are the same, otherwise
# raise error
if self.obs().size() != self.obs_off().size():
raise RuntimeError('On and Off observations must have the same size')
# Loop through observations
for obs in self.obs_off():
# Check that observation is event list, otherwise throw error
if obs.eventtype() != "EventList":
raise RuntimeError('Off observations must be event lists')
# Check that they have response, otherwise assign based on user parameter
if obs.has_response() == False:
# Get database and IRF
database = self["caldb"].string()
irf = self["irf"].string()
# Create an XML element for response
parameter = "parameter name=\"Calibration\"" +\
" database=\"" + database + "\"" +\
" response=\"" + irf + "\""
xml = gammalib.GXmlElement()
xml.append(parameter)
# Create CTA response
response = gammalib.GCTAResponseIrf(xml)
# Attach response to observation
obs.response(response)
# Add models from Off observations to model container
for model in self.obs_off().models():
self._models.append(model)
# Query parameters for source/On region definition
self._get_source_parameters()
# Return
return
def _get_parameters_bkgmethod(self):
"""
Get background method parameters
"""
# Get background method
bkgmethod = self['bkgmethod'].string()
# Get background method dependent parameters
if bkgmethod == 'REFLECTED':
self._get_parameters_bkgmethod_reflected()
elif bkgmethod == 'CUSTOM':
self._get_parameters_bkgmethod_custom()
elif bkgmethod == 'OFF':
self._get_parameters_bkgmethod_off()
# Query parameters that are needed for all background methods
self['maxoffset'].real()
self['use_model_bkg'].boolean()
# Return
return
def _get_parameters(self):
"""
Get parameters from parfile and setup observations
"""
# Clear source models
self._models.clear()
# Setup observations (require response and allow event list, don't
# allow counts cube)
self._setup_observations(self.obs(), True, True, False)
# Get source model and source name. First try to extract models from
# observation container. If this does not work then try creating
# model from the inmodel parameter
if self.obs().models().size() > 0:
self._models = self.obs().models().clone()
self._srcname = self['srcname'].string()
elif self['inmodel'].is_valid():
inmodel = self['inmodel'].filename()
self._models = gammalib.GModels(inmodel)
self._srcname = self['srcname'].string()
# Set energy bounds
self._ebounds = self._create_ebounds()
# Initialize empty src regions container
self._src_reg = gammalib.GSkyRegions()
# Exclusion map
if (self._excl_reg is not None) and (self._excl_reg.map().npix() > 0):
# Exclusion map set and is not empty
self._has_exclusion = True
elif self['inexclusion'].is_valid():
inexclusion = self['inexclusion'].filename()
# If the user has not specified the extension to use
# and there is an extension called 'EXCLUSION' ...
if not inexclusion.has_extname()\
and not inexclusion.has_extno()\
and gammalib.GFits(inexclusion).contains('EXCLUSION'):
# ... choose it for the exclusion map
extname = inexclusion.url() + '[EXCLUSION]'
inexclusion = gammalib.GFilename(extname)
# Otherwise will pick the default (primary) HDU
self._excl_reg = gammalib.GSkyRegionMap(inexclusion)
self._has_exclusion = True
else:
self._has_exclusion = False
# Get background method parameters (have to come after setting up of
# observations and models)
self._get_parameters_bkgmethod()
# If there are multiple observations query whether to stack them
if self.obs().size() > 1:
self['stack'].boolean()
# Query ahead output parameters
if (self._read_ahead()):
self['outobs'].filename()
self['outmodel'].filename()
self['prefix'].string()
# Write input parameters into logger
self._log_parameters(gammalib.TERSE)
# Set number of processes for multiprocessing
self._nthreads = mputils.nthreads(self)
# If we have no model then create now a dummy model
if self._models.is_empty():
spatial = gammalib.GModelSpatialPointSource(self._src_dir)
spectral = gammalib.GModelSpectralPlaw(1.0e-18, -2.0,
gammalib.GEnergy(1.0, 'TeV'))
model = gammalib.GModelSky(spatial, spectral)
model.name('Dummy')
self._models.append(model)
self._srcname = 'Dummy'
self['use_model_bkg'].boolean(False)
# Return
return
def _compute_region_separation(self, pnt_dir):
"""
Compute the separation angle for reflected off regions in radians
Returns
-------
angle : float
Separation angle of two off regions (radians)
"""
# Initialise the result
separation = -1.0
# Compute offset of reflected regions to pointing position
offset = pnt_dir.dist_deg(self._src_dir)
# If shape is a circle then compute apparent diameter of the circle
# as separation
if self._srcshape == 'CIRCLE':
separation = 2.0 * self._rad / offset
# ... otherwise if shape is a rectangle then compute the opening angle
# towards combinations of rectangle corners. This method overestimates
# the real need of space between the ectangles, so the method may be
# optimised to gain more off regions! Anyway, it is assured that the
# off regions will never overlap.
elif self._srcshape == 'RECT':
# Get the sky directions of the corners of the rectangle
cs = [self._src_reg[0].corner(icorner) for icorner in range(4)]
# Compute the 6 opening angles
combinations = [[0,1], [0,2], [0,3], [1,2], [1,3], [2,3]]
angles = [self._compute_posang(pnt_dir, cs[i], cs[j]) \
for i,j in combinations]
# The desired separation is the maximum opening angle
separation = max(angles) * gammalib.deg2rad
# Return
return separation
def _reflected_regions(self, obs):
"""
Calculate list of reflected regions for a single observation (pointing)
Parameters
----------
obs : `~gammalib.GCTAObservation()`
CTA observation
Returns
-------
regions : `~gammalib.GSkyRegions`
List of reflected regions
"""
# Initialise list of reflected regions
regions = gammalib.GSkyRegions()
# Get offset angle of source
pnt_dir = obs.pointing().dir()
offset = pnt_dir.dist_deg(self._src_dir)
# Skip observation if it is too close to source
if self._src_reg.contains(pnt_dir):
msg = ' Skip because observation is pointed at %.3f deg from source'\
% (offset)
if self._srcshape == 'CIRCLE':
msg += ' (circle rad=%.3f).' % (self._rad)
self._log_string(gammalib.NORMAL, msg)
# ... otherwise
else:
posang = pnt_dir.posang_deg(self._src_dir)
if (self._srcshape == 'CIRCLE') or (self._srcshape == 'RECT'):
# Determine number of background regions to skip
N_skip = self['bkgregskip'].integer()
N_lim = 1 + 2 * N_skip
# Compute the angular separation of reflected regions wrt
# camera center. The factor 1.05 ensures background regions
# do not overlap due to numerical precision issues
alpha = 1.05 * self._compute_region_separation(pnt_dir)
# Compute number of reflected regions by dividing the angular
# separation by 2 pi.
N = int(2.0 * math.pi / alpha)
# If there are not enough reflected regions then skip the
# observation ...
if N < self['bkgregmin'].integer() + N_lim:
msg = ' Skip because the number %d of reflected regions '\
'for background estimation is smaller than '\
'"bkgregmin"=%d.' % (N-N_lim, self['bkgregmin'].integer())
self._log_string(gammalib.NORMAL, msg)
# ... otherwise loop over position angle to create reflected
# regions
else:
# Append reflected regions
alpha = 360.0 / N
dphi_max = 360.0 - alpha * (1 + N_skip)
dphi = alpha * (1 + N_skip)
while dphi <= dphi_max:
ctr_dir = pnt_dir.clone()
ctr_dir.rotate_deg(posang + dphi, offset)
if self._srcshape == 'CIRCLE':
region = gammalib.GSkyRegionCircle(ctr_dir, self._rad)
elif self._srcshape == 'RECT':
# Adjust the posang of the rectangle correspondingly
region = gammalib.GSkyRegionRectangle(ctr_dir,
self._reg_width,
self._reg_height,
self._reg_posang + dphi)
if self._has_exclusion:
if self._excl_reg.overlaps(region):
# Signal region overlap
msg = ' Reflected region overlaps with '\
'exclusion region.'
self._log_string(gammalib.EXPLICIT, msg)
# If region overlaps with exclusion region
# try to increment by 10% of angular step
dphi += 0.1 * alpha
else:
regions.append(region)
dphi += alpha
else:
regions.append(region)
dphi += alpha
# Check again that we have enough background regions
# now that we have checked for overlap with exclusion region
if regions.size() >= self['bkgregmin'].integer():
# Log number of reflected regions
msg = ' Use %d reflected regions.' % (regions.size())
self._log_string(gammalib.NORMAL, msg)
# Otherwise log observation skipped and return empty region container
else:
msg = ' Skip because the number %d of regions' \
'for background estimation not overlapping ' \
'with the exclusion region is smaller than ' \
'"bkgregmin"=%d.' % \
(regions.size(), self['bkgregmin'].integer())
self._log_string(gammalib.NORMAL, msg)
regions = gammalib.GSkyRegions()
# Return reflected regions
return regions
def _instrument_regions(self, obs, obs_off):
"""
Compute background regions for Off observation
Calculate background region in Off observation that corresponds to the
source region in the On observation in instrument coordinates
Parameters
----------
obs : `~gammalib.GCTAObservation()`
On CTA observation
obs_off : `~gammalib.GCTAObservation()`
Off CTA observation
Returns
-------
regions : `~gammalib.GSkyRegions`
Container with background region
"""
# Initialise region container
regions = gammalib.GSkyRegions()
# Convert source position in On observation to instrument coordinates
instdir = obs.pointing().instdir(self._src_dir)
# Convert instrument position to sky direction for Off observation
off_dir = obs_off.pointing().skydir(instdir)
# Build region according to shape specified by user
# If circle
if self._srcshape == 'CIRCLE':
region = gammalib.GSkyRegionCircle(off_dir, self._rad)
# ... otherwise if rectangle
elif self._srcshape == 'RECT':
# Instrument coordinates take sky direction as reference
# so no need to change the position angle
region = gammalib.GSkyRegionRectangle(off_dir,
self._reg_width,
self._reg_height,
self._reg_posang)
# Check if background region overlaps with exclusion region
is_valid = True
if self._has_exclusion:
if self._excl_reg.overlaps(region):
# Signal region overlap
msg = ' Background region overlaps with exclusion region.'
self._log_string(gammalib.EXPLICIT, msg)
is_valid = False
# If region is valid then append it to container
if is_valid:
regions.append(region)
# Return
return regions
def _set_models(self, results):
"""
Set models for On/Off fitting
The method does the following
- append "OnOff" to the instrument name of all background models
- fix all spatial and temporal parameters
Parameters
----------
results : list of dict
Result dictionaries
Returns
-------
models : `~gammalib.GModels()`
Model container
"""
# Write header
self._log_header1(gammalib.NORMAL, 'Set models')
# Initialise model container
models = gammalib.GModels()
# Initialies stacked model flag
has_stacked_model = False
# Loop over all models in observation and append "OnOff" to instrument
# names
for model in self._models:
# Initialise model usage
use_model = False
# If model is a background model then check if it will be
# used
if 'GCTA' in model.classname():
# Skip model if background model should not be used
if not self['use_model_bkg'].boolean():
self._log_string(gammalib.NORMAL, ' Skip "%s" model "%s" (%s)' % \
(model.instruments(), model.name(), model.ids()))
continue
# Check if model corresponds to one of the relevant
# observations
for result in results:
if model.is_valid(result['instrument'], result['id']):
if result['bkg_reg'].size() > 0:
use_model = True
break
# If stacked analysis is requested then just use for model
# and remove instrument ID
if self['stack'].boolean():
# If there is already a model for stacked analysis then
# skip this one
if has_stacked_model:
msg = ' Skip "%s" model "%s" (%s). There is already ' \
'a model for stacked analysis.' % \
(model.instruments(), model.name(), model.ids())
self._log_string(gammalib.NORMAL, msg)
continue
# ... otherwise use model for stacked analysis
else:
has_stacked_model = True
use_model = True
model.ids('')
# Append "OnOff" to instrument name
model.instruments(model.instruments()+'OnOff')
# ... otherwise, if model is not a background model then use it
else:
use_model = True
# If model is relevant then append it now to the model
# container
if use_model:
# Log model usage
self._log_string(gammalib.NORMAL, ' Use "%s" model "%s" (%s)' % \
(model.instruments(), model.name(), model.ids()))
# Append model to container
models.append(model)
# ... otherwise signal that model is skipped
else:
self._log_string(gammalib.NORMAL, ' Skip "%s" model "%s" (%s)' % \
(model.instruments(), model.name(), model.ids()))
# Return model container
return models
def _set_statistic(self, obs):
"""
Set statistic for observation
If the "use_model_bkg" is true then set statistic to "cstat",
otherwise set it to "wstat"
Parameters
----------
obs : `~gammalib.GObservation()`
Observation
Returns
-------
obs : `~gammalib.GObservation()`
Observation
"""
# Set statistic according to background model usage
if self['use_model_bkg'].boolean():
obs.statistic('cstat')
else:
obs.statistic('wstat')
# Return observation
return obs
def _etrue_ebounds(self):
"""
Set true energy boundaries
Returns
-------
ebounds : `~gammalib.GEbounds()`
True energy boundaries
"""
# Determine minimum and maximum energies
emin = self._ebounds.emin() * 0.5
emax = self._ebounds.emax() * 1.2
if emin.TeV() < self['etruemin'].real():
emin = gammalib.GEnergy(self['etruemin'].real(), 'TeV')
if emax.TeV() > self['etruemax'].real():
emax = gammalib.GEnergy(self['etruemax'].real(), 'TeV')
# Determine number of energy bins
n_decades = (emax.log10TeV() - emin.log10TeV())
n_bins = int(n_decades * float(self['etruebins'].integer()) + 0.5)
if n_bins < 1:
n_bins = 1
# Set energy boundaries
ebounds = gammalib.GEbounds(n_bins, emin, emax)
# Write header
self._log_header1(gammalib.TERSE, 'True energy binning')
# Log true energy bins
for i in range(ebounds.size()):
value = '%s - %s' % (str(ebounds.emin(i)), str(ebounds.emax(i)))
self._log_value(gammalib.TERSE, 'Bin %d' % (i+1), value)
# Return energy boundaries
return ebounds
def _set_background_regions(self, obs, obs_off=None):
"""
Set background regions for an observation
Parameters
----------
obs : `~gammalib.GCTAObservation()`
CTA observation
Returns
-------
regions : `~gammalib.GSkyRegions()`
Background regions
"""
# Initialise empty background regions for this observation
bkg_reg = gammalib.GSkyRegions()
# If reflected background is requested then create reflected
# background regions
if self['bkgmethod'].string() == 'REFLECTED':
bkg_reg = self._reflected_regions(obs)
# ... otherwise if custom background is requested then get the
# background regions from the observation. We use a copy here since
# otherwise the background regions go out of scope once the observations
# are replaced by the On/Off observations.
elif self['bkgmethod'].string() == 'CUSTOM':
bkg_reg = obs.off_regions().copy()
# ... otherwise if dedicated Off runs are use then
# use background region that correspond to the same instrument coordinates
if self['bkgmethod'].string() == 'OFF':
bkg_reg = self._instrument_regions(obs,obs_off)
# Return background regions
return bkg_reg
def _process_observation(self,i):
"""
Generate On/Off spectra for individual observation
Parameters
----------
i : int
Observation number
Returns
-------
result : dict
On/Off spectra, background regions, observation id
"""
# Retrieve observation from container
onoff = None
bkg_reg = None
obs = self.obs()[i]
# Retrieve dedicated Off observation if it exists
if not self.obs_off().is_empty():
obs_off = self.obs_off()[i]
# Otherwise use the same as On
else:
obs_off = self.obs()[i]
# Log header
self._log_header3(gammalib.NORMAL,'%s observation "%s"' % \
(obs.instrument(), obs.id()))
# Skip non CTA observations
if obs.classname() != 'GCTAObservation':
self._log_string(gammalib.NORMAL, ' Skip because not a "GCTAObservation"')
# Otherwise calculate On/Off spectra
else:
# Get background model usage flag and log flag
use_model_bkg = self['use_model_bkg'].boolean()
if use_model_bkg:
msg = ' Use background model.'
else:
msg = ' Background model not used, assume constant backround rate.'
self._log_string(gammalib.NORMAL, msg)
# Get offset angle of source
pnt_dir = obs.pointing().dir()
offset = pnt_dir.dist_deg(self._src_dir)
# Skip observation if it is pointed too far from the source
if offset >= self['maxoffset'].real():
msg = ' Skip because observation is pointed at %.3f deg >= ' \
'"maxoffset=%.3f" from source.' \
% (offset, self['maxoffset'].real())
self._log_string(gammalib.NORMAL, msg)
# ... otherwise continue to process
else:
# Set background regions for this observation
bkg_reg = self._set_background_regions(obs,obs_off)
# If there are any background regions then create On/Off observation
# and append it to the output container
if bkg_reg.size() >= 0:
# Create On/Off observation
onoff = gammalib.GCTAOnOffObservation(obs, obs_off,
self._models,
self._srcname,
self._etruebounds,
self._ebounds,
self._src_reg,
bkg_reg,
use_model_bkg)
# Set On/Off observation ID
onoff.id(obs.id())
# Otherwise log observation skipped
else:
msg = ' Skip because no valid Off regions could be determined'
self._log_string(gammalib.NORMAL, msg)
# Construct dictionary with results
result = {'onoff' : onoff,
'bkg_reg' : bkg_reg,
'instrument': obs.instrument(),
'id' : obs.id()}
# Return results
return result
def _unpack_result(self, outobs, result):
"""
Unpack result from calculation of On/Off regions
Parameters
----------
outobs : `~gammalib.GObservations`
Observation container
result : dict
On/Off spectra, background regions, observation id
Returns
-------
outobs : `~gammalib.GObservations`
Observation container with result appended
"""
# Continue only if result is valid
if result['onoff'] != None:
# If the results contain an On/Off observation
if result['onoff'].classname() == 'GCTAOnOffObservation':
# Set statistic according to background model usage
obs = self._set_statistic(result['onoff'])
# Append observation to observation container
outobs.append(obs)
# Append background regions
self._bkg_regs.append({'regions': result['bkg_reg'],
'id': result['id']})
# Return observation container
return outobs
# Public methods
def run(self):
"""
Run the script
"""
# Switch screen logging on in debug mode
if self._logDebug():
self._log.cout(True)
# Get parameters
self._get_parameters()
# Write observation into logger
self._log_observations(gammalib.NORMAL, self.obs(), 'Observation')
if not self.obs_off().is_empty():
self._log_observations(gammalib.NORMAL, self._obs_off, 'Off Observation')
# Set true energy bins
self._etruebounds = self._etrue_ebounds()
# Write header
self._log_header1(gammalib.TERSE, 'Spectral binning')
# Log reconstructed energy bins
for i in range(self._ebounds.size()):
value = '%s - %s' % (str(self._ebounds.emin(i)),
str(self._ebounds.emax(i)))
self._log_value(gammalib.TERSE, 'Bin %d' % (i+1), value)
# Write header
self._log_header1(gammalib.NORMAL,
'Generation of source and background spectra')
# Initialise run variables
outobs = gammalib.GObservations()
self._bkg_regs = []
results = []
# If there is more than one observation and we use multiprocessing
if self._nthreads > 1 and self.obs().size() > 1:
# Compute observations
args = [(self, '_process_observation', i)
for i in range(self.obs().size())]
poolresults = mputils.process(self._nthreads, mputils.mpfunc, args)
# Construct results
for i in range(self.obs().size()):
result = poolresults[i][0]
outobs = self._unpack_result(outobs, result)
results.append(result)
self._log_string(gammalib.TERSE, poolresults[i][1]['log'], False)
# Otherwise, loop through observations and generate pha, arf, rmf files
else:
for i in range(self.obs().size()):
# Process individual observation
result = self._process_observation(i)
outobs = self._unpack_result(outobs, result)
results.append(result)
# Stack observations
if outobs.size() > 1 and self['stack'].boolean():
# Write header
self._log_header1(gammalib.NORMAL, 'Stacking %d observations' %
(outobs.size()))
# Stack observations
stacked_obs = gammalib.GCTAOnOffObservation(outobs)
# Set statistic according to background model usage
stacked_obs = self._set_statistic(stacked_obs)
# Put stacked observations in output container
outobs = gammalib.GObservations()
outobs.append(stacked_obs)
# Create models that allow On/Off fitting
models = self._set_models(results)
# Set models in output container
outobs.models(models)
# Set observation container
self.obs(outobs)
# Return
return
def save(self):
"""
Save data
"""
# Write header
self._log_header1(gammalib.TERSE, 'Save data')
# Get XML output filename, prefix and clobber
outobs = self['outobs'].filename()
outmodel = self['outmodel'].filename()
prefix = self['prefix'].string()
clobber = self['clobber'].boolean()
# Loop over all observation in container
for obs in self.obs():
# Set filenames
if self['stack'].boolean():
onname = prefix + '_stacked_pha_on.fits'
offname = prefix + '_stacked_pha_off.fits'
arfname = prefix + '_stacked_arf.fits'
rmfname = prefix + '_stacked_rmf.fits'
elif self.obs().size() > 1:
onname = prefix + '_%s_pha_on.fits' % (obs.id())
offname = prefix + '_%s_pha_off.fits' % (obs.id())
arfname = prefix + '_%s_arf.fits' % (obs.id())
rmfname = prefix + '_%s_rmf.fits' % (obs.id())
else:
onname = prefix + '_pha_on.fits'
offname = prefix + '_pha_off.fits'
arfname = prefix + '_arf.fits'
rmfname = prefix + '_rmf.fits'
# Set background and response file names in On spectrum
obs.on_spec().backfile(offname)
obs.on_spec().respfile(rmfname)
obs.on_spec().ancrfile(arfname)
# Save files
obs.on_spec().save(onname, clobber)
obs.off_spec().save(offname, clobber)
obs.arf().save(arfname, clobber)
obs.rmf().save(rmfname, clobber)
# Stamp files
self._stamp(onname)
self._stamp(offname)
self._stamp(arfname)
self._stamp(rmfname)
# Log file names
self._log_value(gammalib.NORMAL, 'PHA on file', onname)
self._log_value(gammalib.NORMAL, 'PHA off file', offname)
self._log_value(gammalib.NORMAL, 'ARF file', arfname)
self._log_value(gammalib.NORMAL, 'RMF file', rmfname)
# Save observation definition XML file
self.obs().save(outobs)
# Save model definition XML file
self.obs().models().save(outmodel)
# Log file names
self._log_value(gammalib.NORMAL, 'Obs. definition XML file', outobs.url())
self._log_value(gammalib.NORMAL, 'Model definition XML file', outmodel.url())
# Save ds9 On region file
regname = prefix + '_on.reg'
self._src_reg.save(regname)
self._log_value(gammalib.NORMAL, 'On region file', regname)
# Save ds9 Off region files
for bkg_reg in self._bkg_regs:
# Set filename
if len(self._bkg_regs) > 1:
regname = prefix + '_%s_off.reg' % (bkg_reg['id'])
else:
regname = prefix + '_off.reg'
# Save ds9 region file
bkg_reg['regions'].save(regname)
# Log file name
self._log_value(gammalib.NORMAL, 'Off region file', regname)
# Return
return
def exclusion_map(self, object=None):
"""
Return and optionally set the exclusion regions map
Parameters
----------
object : `~gammalib.GSkyRegion` or `~gammalib.GSkyMap` or `~gammalib.GFilename`
Exclusion regions object
Returns
-------
region : `~gammalib.GSkyRegionMap`
Exclusion regions map
"""
# If a regions object is provided then set the regions ...
if object is not None:
self._excl_reg = gammalib.GSkyRegionMap(object)
# Return
return self._excl_reg
def obs_off(self, obs=None):
"""
Return and optionally set the Off observations
Parameters
----------
obs : `~gammalib.GCTAObservations`
Off observations container
Returns
-------
observation container : `~gammalib.GCTAObservations`
Off observations container
"""
# If an observation container is provided then set the Off observations ...
if obs is not None:
self._obs_off = obs
# Return
return self._obs_off
# ======================== #
# Main routine entry point #
# ======================== #
if __name__ == '__main__':
# Create instance of application
app = csphagen(sys.argv)
# Execute application
app.execute()
| gpl-3.0 | -5,513,716,661,560,983,000 | 34.768174 | 99 | 0.522549 | false |
uclouvain/osis | base/tests/views/test_learning_unit_proposal.py | 1 | 55572 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import datetime
from unittest import mock
from django.contrib import messages
from django.contrib.messages import get_messages
from django.contrib.messages.storage.fallback import FallbackStorage
from django.http import HttpResponseNotFound, HttpResponse, HttpResponseForbidden
from django.test import TestCase, RequestFactory
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from waffle.testutils import override_flag
from attribution.tests.factories.attribution_charge_new import AttributionChargeNewFactory
from attribution.tests.factories.attribution_new import AttributionNewFactory
from base.business import learning_unit_proposal as proposal_business
from base.business.learning_unit_proposal import INITIAL_DATA_FIELDS, copy_learning_unit_data
from base.forms.learning_unit.edition import LearningUnitProposalEndDateForm
from base.forms.learning_unit_proposal import ProposalLearningUnitForm
from base.models import proposal_learning_unit
from base.models.academic_year import AcademicYear
from base.models.enums import groups
from base.models.enums import learning_component_year_type
from base.models.enums import learning_unit_year_periodicity
from base.models.enums import organization_type, entity_type, \
learning_unit_year_subtypes, proposal_type, learning_container_year_types, proposal_state
from base.models.enums.proposal_state import ProposalState, LimitedProposalState
from base.models.enums.proposal_type import ProposalType
from base.tests.factories import campus as campus_factory, organization as organization_factory, \
person as person_factory
from base.tests.factories.academic_calendar import generate_proposal_calendars, \
generate_proposal_calendars_without_start_and_end_date
from base.tests.factories.academic_year import create_current_academic_year, \
AcademicYearFactory
from base.tests.factories.business.learning_units import GenerateContainer
from base.tests.factories.campus import CampusFactory
from base.tests.factories.entity import EntityFactory
from base.tests.factories.entity_version import EntityVersionFactory
from base.tests.factories.group import CentralManagerGroupFactory, FacultyManagerGroupFactory
from base.tests.factories.learning_component_year import LearningComponentYearFactory
from base.tests.factories.learning_container_year import LearningContainerYearFactory
from base.tests.factories.learning_unit import LearningUnitFactory
from base.tests.factories.learning_unit_year import LearningUnitYearFactory
from base.tests.factories.learning_unit_year import LearningUnitYearFakerFactory
from base.tests.factories.organization import OrganizationFactory
from base.tests.factories.person import PersonFactory
from base.tests.factories.proposal_learning_unit import ProposalLearningUnitFactory
from base.tests.factories.tutor import TutorFactory
from base.tests.factories.user import UserFactory
from base.views.learning_units.proposal.update import update_learning_unit_proposal, \
learning_unit_modification_proposal, learning_unit_suppression_proposal
from base.views.learning_units.search.proposal import ACTION_CONSOLIDATE, ACTION_BACK_TO_INITIAL, ACTION_FORCE_STATE
from learning_unit.tests.factories.central_manager import CentralManagerFactory
from learning_unit.tests.factories.faculty_manager import FacultyManagerFactory
from reference.tests.factories.language import LanguageFactory, FrenchLanguageFactory
LABEL_VALUE_BEFORE_PROPOSAL = _('Value before proposal')
@override_flag('learning_unit_proposal_update', active=True)
class TestLearningUnitModificationProposal(TestCase):
@classmethod
def setUpTestData(cls):
academic_years = AcademicYearFactory.produce(number_past=3, number_future=10)
an_organization = OrganizationFactory(type=organization_type.MAIN)
current_academic_year = create_current_academic_year()
generate_proposal_calendars_without_start_and_end_date(academic_years)
cls.entity_version = EntityVersionFactory(
entity__organization=an_organization,
entity_type=entity_type.FACULTY,
start_date=current_academic_year.start_date,
end_date=current_academic_year.end_date
)
learning_container_year = LearningContainerYearFactory(
acronym="LOSIS1212",
academic_year=current_academic_year,
container_type=learning_container_year_types.COURSE,
requirement_entity=cls.entity_version.entity,
allocation_entity=cls.entity_version.entity,
additional_entity_1=cls.entity_version.entity,
additional_entity_2=cls.entity_version.entity,
)
cls.learning_unit_year = LearningUnitYearFakerFactory(
acronym=learning_container_year.acronym,
subtype=learning_unit_year_subtypes.FULL,
academic_year=current_academic_year,
learning_container_year=learning_container_year,
quadrimester=None,
specific_title_english="title english",
campus=CampusFactory(organization=an_organization, is_administration=True),
internship_subtype=None
)
cls.person = FacultyManagerFactory(entity=cls.entity_version.entity).person
cls.url = reverse(learning_unit_modification_proposal, args=[cls.learning_unit_year.id])
cls.form_data = {
"academic_year": cls.learning_unit_year.academic_year.id,
"acronym_0": cls.learning_unit_year.acronym[0],
"acronym_1": cls.learning_unit_year.acronym[1:],
"common_title": cls.learning_unit_year.learning_container_year.common_title,
"common_title_english": cls.learning_unit_year.learning_container_year.common_title_english,
"specific_title": cls.learning_unit_year.specific_title,
"specific_title_english": cls.learning_unit_year.specific_title_english,
"container_type": cls.learning_unit_year.learning_container_year.container_type,
"internship_subtype": "",
"credits": cls.learning_unit_year.credits,
"periodicity": cls.learning_unit_year.periodicity,
"status": cls.learning_unit_year.status,
"language": cls.learning_unit_year.language.pk,
"quadrimester": "",
"campus": cls.learning_unit_year.campus.id,
"session": cls.learning_unit_year.session,
"entity": cls.entity_version.id,
"folder_id": "1",
"state": proposal_state.ProposalState.FACULTY.name,
'requirement_entity': cls.entity_version.id,
'allocation_entity': cls.entity_version.id,
'additional_entity_1': cls.entity_version.id,
'additionanl_entity_2': cls.entity_version.id,
# Learning component year data model form
'component-TOTAL_FORMS': '2',
'component-INITIAL_FORMS': '0',
'component-MAX_NUM_FORMS': '2',
'component-0-hourly_volume_total_annual': 20,
'component-0-hourly_volume_partial_q1': 10,
'component-0-hourly_volume_partial_q2': 10,
'component-0-planned_classes': 1,
'component-1-hourly_volume_total_annual': 20,
'component-1-hourly_volume_partial_q1': 10,
'component-1-hourly_volume_partial_q2': 10,
'component-1-planned_classes': 1,
}
cls.academic_year_for_suppression_proposal = AcademicYear.objects.filter(
year=cls.learning_unit_year.academic_year.year - 1)
def setUp(self):
self.client.force_login(self.person.user)
def test_user_not_logged(self):
self.client.logout()
response = self.client.get(self.url)
self.assertRedirects(response, '/login/?next={}'.format(self.url))
def test_user_has_not_permission(self):
person = person_factory.PersonFactory()
self.client.force_login(person.user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_get_request(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponse.status_code)
self.assertTemplateUsed(response, 'learning_unit/proposal/create_modification.html')
self.assertEqual(response.context['learning_unit_year'], self.learning_unit_year)
self.assertEqual(response.context['person'], self.person)
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
luy_initial = response.context['learning_unit_year_form'].initial
lcy_initial = response.context['learning_container_year_form'].initial
self.assertEqual(luy_initial['academic_year'], self.learning_unit_year.academic_year.id)
self.assertEqual(luy_initial['acronym'], [
self.learning_unit_year.acronym[0], self.learning_unit_year.acronym[1:]])
self.assertEqual(luy_initial['specific_title'], self.learning_unit_year.specific_title)
self.assertEqual(lcy_initial['container_type'], self.learning_unit_year.
learning_container_year.container_type)
self.assertEqual(luy_initial['credits'], self.learning_unit_year.credits)
self.assertEqual(luy_initial['periodicity'], self.learning_unit_year.periodicity)
self.assertEqual(luy_initial['status'], self.learning_unit_year.status)
self.assertEqual(luy_initial['language'], self.learning_unit_year.language.pk)
self.assertEqual(luy_initial['campus'], self.learning_unit_year.campus.id)
def test_post_request_with_invalid_form(self):
response = self.client.post(self.url, data={})
self.assertEqual(response.status_code, HttpResponse.status_code)
self.assertTemplateUsed(response, 'learning_unit/proposal/create_modification.html')
self.assertEqual(response.context['learning_unit_year'], self.learning_unit_year)
self.assertEqual(response.context['person'], self.person)
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
def test_post_request(self):
response = self.client.post(self.url, data=self.form_data)
redirected_url = reverse("learning_unit", args=[self.learning_unit_year.id])
self.assertRedirects(response, redirected_url, fetch_redirect_response=False)
a_proposal_learning_unit = proposal_learning_unit.find_by_learning_unit_year(self.learning_unit_year)
self.assertTrue(a_proposal_learning_unit)
self.assertEqual(a_proposal_learning_unit.author, self.person)
messages_list = [str(message) for message in get_messages(response.wsgi_request)]
self.assertIn(
_("You proposed a modification of type %(type)s for the learning unit %(acronym)s.") % {
'type': proposal_type.ProposalType.MODIFICATION.value,
'acronym': self.learning_unit_year.acronym
},
list(messages_list))
def test_initial_data_fields(self):
expected_initial_data_fields = {
'learning_container_year': [
"id", "acronym", "common_title", "container_type", "in_charge", "common_title_english", "team",
'requirement_entity', 'allocation_entity', 'additional_entity_1', 'additional_entity_2',
],
'learning_unit': [
"id", "end_year",
],
'learning_unit_year': [
"id", "acronym", "specific_title", "internship_subtype", "credits", "campus", "language", "periodicity",
"status", "professional_integration", "specific_title", "specific_title_english", "quadrimester",
"session", "faculty_remark", "other_remark", "other_remark_english"
],
'learning_component_year': [
"id", "acronym", "hourly_volume_total_annual", "hourly_volume_partial_q1", "hourly_volume_partial_q2",
"planned_classes", "type", "repartition_volume_requirement_entity",
"repartition_volume_additional_entity_1", "repartition_volume_additional_entity_2"
],
}
self.assertEqual(expected_initial_data_fields, INITIAL_DATA_FIELDS)
def test_proposal_already_exists(self):
ProposalLearningUnitFactory(learning_unit_year=self.learning_unit_year)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
@override_flag('learning_unit_proposal_update', active=True)
class TestLearningUnitSuppressionProposal(TestCase):
@classmethod
def setUpTestData(cls):
cls.academic_years = AcademicYearFactory.produce(number_past=3, number_future=10)
an_organization = OrganizationFactory(type=organization_type.MAIN)
cls.current_academic_year = cls.academic_years[4]
cls.next_academic_year = cls.academic_years[5]
cls.previous_academic_year = cls.academic_years[3]
generate_proposal_calendars(cls.academic_years)
cls.entity_version = EntityVersionFactory(
entity__organization=an_organization,
entity_type=entity_type.FACULTY,
start_date=cls.academic_years[0].start_date,
end_date=cls.academic_years[-1].end_date
)
learning_container_years = [
LearningContainerYearFactory(
academic_year=year,
container_type=learning_container_year_types.COURSE,
requirement_entity=cls.entity_version.entity,
allocation_entity=cls.entity_version.entity,
) for year in [cls.previous_academic_year, cls.current_academic_year]
]
cls.learning_unit = LearningUnitFactory(
start_year=AcademicYear.objects.first(),
end_year=None
)
cls.learning_unit_year = LearningUnitYearFakerFactory(
acronym="LOSIS1212",
subtype=learning_unit_year_subtypes.FULL,
academic_year=cls.current_academic_year,
learning_container_year=learning_container_years[1],
quadrimester=None,
learning_unit=cls.learning_unit,
campus=CampusFactory(
organization=an_organization,
is_administration=True
),
periodicity=learning_unit_year_periodicity.ANNUAL
)
cls.previous_learning_unit_year = LearningUnitYearFakerFactory(
acronym="LOSIS1212",
subtype=learning_unit_year_subtypes.FULL,
academic_year=cls.previous_academic_year,
learning_container_year=learning_container_years[0],
quadrimester=None,
learning_unit=cls.learning_unit,
campus=cls.learning_unit_year.campus,
periodicity=learning_unit_year_periodicity.ANNUAL
)
cls.person = CentralManagerFactory(entity=cls.entity_version.entity).person
cls.url = reverse(learning_unit_suppression_proposal, args=[cls.learning_unit_year.id])
cls.academic_year_for_suppression_proposal = AcademicYear.objects.filter(
year=cls.learning_unit_year.academic_year.year - 1)
cls.form_data = {
"academic_year": cls.academic_year_for_suppression_proposal.first().id,
"entity": cls.entity_version.id,
"folder_id": "1",
"state": ProposalState.FACULTY.name
}
def setUp(self):
self.client.force_login(self.person.user)
def test_get_request(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponse.status_code)
self.assertTemplateUsed(response, 'learning_unit/proposal/create_suppression.html')
self.assertEqual(response.context['learning_unit_year'], self.learning_unit_year)
self.assertEqual(response.context['person'], self.person)
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
self.assertIsInstance(response.context['form_end_date'], LearningUnitProposalEndDateForm)
self.assertCountEqual(
list(response.context['form_end_date'].fields['academic_year'].queryset),
list(self.academic_year_for_suppression_proposal)
)
form_proposal = response.context['form_proposal']
form_end_date = response.context['form_end_date']
self.assertEqual(form_end_date.fields['academic_year'].initial, None)
self.assertTrue(form_end_date.fields['academic_year'].required)
self.assertEqual(form_proposal.fields['folder_id'].initial, None)
self.assertEqual(form_proposal.fields['entity'].initial, None)
def test_get_request_first_year_of_UE(self):
url = reverse(learning_unit_suppression_proposal, args=[self.previous_learning_unit_year.id])
response = self.client.get(url)
redirected_url = reverse("learning_unit", args=[self.previous_learning_unit_year.id])
self.assertRedirects(response, redirected_url)
msgs = [str(message) for message in get_messages(response.wsgi_request)]
self.assertEqual(
msgs[0],
_("You cannot put in proposal for ending date on the first year of the learning unit.")
)
def test_get_request_on_UE_with_end_date(self):
self.learning_unit.end_year = self.next_academic_year
self.learning_unit.save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponse.status_code)
def test_get_request_academic_year_list_in_form_for_central_manager(self):
person_factory.add_person_to_groups(self.person, [groups.CENTRAL_MANAGER_GROUP])
response = self.client.get(self.url)
self.assertCountEqual(
list(response.context['form_end_date'].fields['academic_year'].queryset),
list(self.academic_year_for_suppression_proposal)
)
def test_post_request(self):
response = self.client.post(self.url, data=self.form_data)
redirected_url = reverse("learning_unit", args=[self.learning_unit_year.id])
self.assertRedirects(response, redirected_url, fetch_redirect_response=False)
a_proposal_learning_unit = proposal_learning_unit.find_by_learning_unit_year(self.learning_unit_year)
self.assertTrue(a_proposal_learning_unit)
self.assertEqual(a_proposal_learning_unit.author, self.person)
messages = [str(message) for message in get_messages(response.wsgi_request)]
self.assertIn(
_("You proposed a modification of type %(type)s for the learning unit %(acronym)s.") % {
'type': proposal_type.ProposalType.SUPPRESSION.value,
'acronym': self.learning_unit_year.acronym
},
list(messages)
)
self.learning_unit.refresh_from_db()
self.assertEqual(self.learning_unit.end_year, self.academic_year_for_suppression_proposal.first())
class TestLearningUnitProposalSearch(TestCase):
@classmethod
def setUpTestData(cls):
AcademicYearFactory.produce(number_past=3, number_future=10)
cls.person = person_factory.PersonWithPermissionsFactory("can_propose_learningunit", "can_access_learningunit")
ac_years = AcademicYearFactory.produce_in_future(quantity=3)
cls.an_entity = EntityFactory()
cls.entity_version = EntityVersionFactory(entity=cls.an_entity, entity_type=entity_type.SCHOOL,
start_date=ac_years[0].start_date,
end_date=ac_years[1].end_date)
cls.proposals = [_create_proposal_learning_unit("LOSIS1211"),
_create_proposal_learning_unit("LOSIS1212"),
_create_proposal_learning_unit("LOSIS1213")]
def setUp(self):
self.client.force_login(self.person.user)
def test_learning_units_proposal_search(self):
url = reverse("learning_units_proposal")
response = self.client.get(url, data={'acronym': self.proposals[0].learning_unit_year.acronym})
self.assertEqual(response.context['learning_units_count'], 1)
def test_learning_units_proposal_search_by_tutor(self):
proposal = _create_proposal_learning_unit("LOSIS1214")
tutor = TutorFactory(person=self.person)
attribution = AttributionNewFactory(tutor=tutor)
learning_unit_component = LearningComponentYearFactory(learning_unit_year=proposal.learning_unit_year)
AttributionChargeNewFactory(attribution=attribution,
learning_component_year=learning_unit_component)
url = reverse("learning_units_proposal")
response = self.client.get(url, data={'tutor': self.person.first_name})
self.assertEqual(response.context['learning_units_count'], 1)
def test_learning_units_proposal_force_state_available_choices_as_faculty_manager(self):
url = reverse("learning_units_proposal")
self.person.user.groups.add(FacultyManagerGroupFactory())
response = self.client.get(url, data={'acronym': self.proposals[0].learning_unit_year.acronym})
state_choices = response.context['form_proposal_state'].fields['state'].choices
self.assertEqual(state_choices, list(LimitedProposalState.choices()))
def test_learning_units_proposal_force_state_available_choices_as_central_manager(self):
url = reverse("learning_units_proposal")
self.person.user.groups.add(CentralManagerGroupFactory())
response = self.client.get(url, data={'acronym': self.proposals[0].learning_unit_year.acronym})
state_choices = response.context['form_proposal_state'].fields['state'].choices
self.assertEqual(state_choices, list(ProposalState.choices()))
class TestGroupActionsOnProposals(TestCase):
@classmethod
def setUpTestData(cls):
AcademicYearFactory.produce(number_past=3, number_future=10)
cls.person = person_factory.PersonWithPermissionsFactory("can_access_learningunit")
cls.proposals = [_create_proposal_learning_unit("LOSIS1211"),
_create_proposal_learning_unit("LOSIS1212"),
_create_proposal_learning_unit("LOSIS1213")]
cls.url = reverse("learning_units_proposal")
AcademicYearFactory.produce_in_future(quantity=3)
def setUp(self):
self.client.force_login(self.person.user)
def test_when_no_proposals_selected(self):
response = self.client.post(self.url, data={"action": ACTION_BACK_TO_INITIAL}, follow=True)
messages = [str(message) for message in response.context["messages"]]
self.assertIn(_("No proposals was selected."), messages)
@mock.patch("base.business.learning_unit_proposal.cancel_proposals_and_send_report",
side_effect=lambda proposals, author, research_criteria: {})
def test_when_action_is_back_to_initial(self, mock_cancel_proposals):
post_data = {
"action": ACTION_BACK_TO_INITIAL,
"selected_action": [self.proposals[0].learning_unit_year.acronym]
}
self.client.post(self.url, data=post_data, follow=True)
proposals, author, research_criteria = mock_cancel_proposals.call_args[0]
self.assertEqual(list(proposals), [self.proposals[0]])
self.assertEqual(author, self.person)
self.assertFalse(research_criteria)
@mock.patch("base.business.learning_unit_proposal.consolidate_proposals_and_send_report",
side_effect=lambda proposals, author, research_criteria: {})
def test_when_action_is_consolidate(self, mock_consolidate):
post_data = {
"action": ACTION_CONSOLIDATE,
"selected_action": [self.proposals[0].learning_unit_year.acronym]
}
self.client.post(self.url, data=post_data, follow=True)
proposals, author, research_criteria = mock_consolidate.call_args[0]
self.assertEqual(list(proposals), [self.proposals[0]])
self.assertEqual(author, self.person)
self.assertFalse(research_criteria)
@mock.patch("base.business.learning_unit_proposal.force_state_of_proposals",
side_effect=lambda proposals, author, research_criteria: {})
def test_when_action_is_force_state_but_no_new_state(self, mock_force_state):
post_data = {
"action": ACTION_FORCE_STATE,
"selected_action": [self.proposals[0].learning_unit_year.acronym]
}
self.client.post(self.url, data=post_data, follow=True)
self.assertFalse(mock_force_state.called)
@mock.patch("base.business.learning_unit_proposal.force_state_of_proposals",
side_effect=lambda proposals, author, research_criteria: {})
def test_when_action_is_force_state(self, mock_force_state):
post_data = {
"action": ACTION_FORCE_STATE,
"selected_action": [self.proposals[0].learning_unit_year.acronym,
self.proposals[2].learning_unit_year.acronym],
"state": proposal_state.ProposalState.ACCEPTED.name
}
self.client.post(self.url, data=post_data, follow=True)
proposals, author, new_state = mock_force_state.call_args[0]
self.assertCountEqual(list(proposals), [self.proposals[0], self.proposals[2]])
self.assertEqual(author, self.person)
self.assertEqual(new_state, proposal_state.ProposalState.ACCEPTED.name)
@override_flag('learning_unit_proposal_delete', active=True)
class TestLearningUnitProposalCancellation(TestCase):
@classmethod
def setUpTestData(cls):
academic_year = create_current_academic_year()
generate_proposal_calendars_without_start_and_end_date([academic_year])
cls.learning_unit_proposal = _create_proposal_learning_unit("LOSIS1211")
cls.learning_unit_year = cls.learning_unit_proposal.learning_unit_year
cls.person = FacultyManagerFactory(
entity=cls.learning_unit_year.learning_container_year.requirement_entity
).person
def setUp(self):
self.url = reverse('learning_unit_cancel_proposal', args=[self.learning_unit_year.id])
self.client.force_login(self.person.user)
def test_user_not_logged(self):
self.client.logout()
response = self.client.get(self.url)
self.assertRedirects(response, '/login/?next={}'.format(self.url))
def test_user_has_not_permission(self):
person = PersonFactory()
self.client.force_login(person.user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_with_non_existent_learning_unit_year(self):
self.learning_unit_proposal_to_delete = _create_proposal_learning_unit("LOSIS1211D")
self.learning_unit_year_to_delete = self.learning_unit_proposal_to_delete.learning_unit_year
self.person_to_delete = FacultyManagerFactory(
entity=self.learning_unit_year_to_delete.learning_container_year.requirement_entity
).person
self.url = reverse('learning_unit_cancel_proposal', args=[self.learning_unit_year_to_delete.id])
self.client.force_login(self.person_to_delete.user)
self.learning_unit_year_to_delete.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseNotFound.status_code)
self.assertTemplateUsed(response, "page_not_found.html")
def test_with_none_person(self):
user = UserFactory()
self.client.force_login(user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_with_no_proposal(self):
self.learning_unit_proposal_to_delete = _create_proposal_learning_unit("LOSIS1211D")
self.learning_unit_year_to_delete = self.learning_unit_proposal_to_delete.learning_unit_year
self.person_to_delete = FacultyManagerFactory(
entity=self.learning_unit_year_to_delete.learning_container_year.requirement_entity
).person
self.url = reverse('learning_unit_cancel_proposal', args=[self.learning_unit_year_to_delete.id])
self.client.force_login(self.person_to_delete.user)
self.learning_unit_proposal_to_delete.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_with_proposal_of_state_different_than_faculty(self):
self.learning_unit_proposal.state = proposal_state.ProposalState.CENTRAL.name
self.learning_unit_proposal.save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_user_not_linked_to_current_requirement_entity(self):
person = PersonFactory()
self.client.force_login(person.user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_context_after_valid_get_request(self):
response = self.client.get(self.url)
redirected_url = reverse('learning_unit', args=[self.learning_unit_year.id])
self.assertRedirects(response, redirected_url, fetch_redirect_response=False)
messages = [str(message) for message in get_messages(response.wsgi_request)]
self.assertIn(_("Proposal %(acronym)s (%(academic_year)s) successfully canceled.") % {
"acronym": self.learning_unit_year.acronym,
"academic_year": self.learning_unit_year.academic_year
}, messages)
def test_models_after_cancellation_of_proposal(self):
_modify_learning_unit_year_data(self.learning_unit_year)
_modify_entities_linked_to_learning_container_year(self.learning_unit_year.learning_container_year)
new_entity = self.learning_unit_year.learning_container_year.requirement_entity
FacultyManagerFactory(entity=new_entity, person=self.person)
self.client.get(self.url)
self.learning_unit_year.refresh_from_db()
self.learning_unit_year.learning_container_year.refresh_from_db()
initial_data = self.learning_unit_proposal.initial_data
self.assertTrue(_test_attributes_equal(self.learning_unit_year, initial_data["learning_unit_year"]))
self.assertTrue(_test_attributes_equal(self.learning_unit_year.learning_unit, initial_data["learning_unit"]))
self.assertTrue(_test_attributes_equal(self.learning_unit_year.learning_container_year,
initial_data["learning_container_year"]))
def _test_attributes_equal(obj, attribute_values_dict):
for key, value in attribute_values_dict.items():
attr_value = getattr(obj, key)
foreign_key_fields = [
"campus", "language", 'requirement_entity', 'allocation_entity',
'additional_entity_1', 'additional_entity_2'
]
if key == "credits":
if float(attr_value) != float(value):
return False
elif attr_value and key in foreign_key_fields:
if attr_value.pk != value:
return False
elif attr_value != value:
return False
return True
def _create_proposal_learning_unit(acronym):
a_learning_unit_year = LearningUnitYearFactory(
acronym=acronym,
subtype=learning_unit_year_subtypes.FULL,
learning_container_year__requirement_entity=EntityVersionFactory().entity,
)
learning_component_lecturing = LearningComponentYearFactory(
learning_unit_year=a_learning_unit_year,
type=learning_component_year_type.LECTURING
)
learning_component_practical = LearningComponentYearFactory(
learning_unit_year=a_learning_unit_year,
type=learning_component_year_type.PRACTICAL_EXERCISES)
container_year = a_learning_unit_year.learning_container_year
initial_data = {
"learning_container_year": {
"id": container_year.id,
"acronym": a_learning_unit_year.acronym,
"common_title": a_learning_unit_year.specific_title,
"common_title_english": a_learning_unit_year.specific_title_english,
"container_type": container_year.container_type,
"in_charge": container_year.in_charge,
"requirement_entity": container_year.requirement_entity.id,
"allocation_entity": None,
"additional_entity_1": None,
"additional_entity_2": None,
},
"learning_unit_year": {
"id": a_learning_unit_year.id,
"acronym": a_learning_unit_year.acronym,
"specific_title": a_learning_unit_year.specific_title,
"specific_title_english": a_learning_unit_year.specific_title_english,
"internship_subtype": a_learning_unit_year.internship_subtype,
"credits": float(a_learning_unit_year.credits),
"language": a_learning_unit_year.language.pk,
"campus": a_learning_unit_year.campus.id,
"periodicity": a_learning_unit_year.periodicity
},
"learning_unit": {
"id": a_learning_unit_year.learning_unit.id,
},
"learning_component_years": [
{"id": learning_component_lecturing.id, "planned_classes": learning_component_lecturing.planned_classes,
"hourly_volume_partial_q1": learning_component_lecturing.hourly_volume_partial_q1,
"hourly_volume_partial_q2": learning_component_lecturing.hourly_volume_partial_q2,
"hourly_volume_total_annual": learning_component_lecturing.hourly_volume_total_annual
},
{"id": learning_component_practical.id, "planned_classes": learning_component_practical.planned_classes,
"hourly_volume_partial_q1": learning_component_practical.hourly_volume_partial_q1,
"hourly_volume_partial_q2": learning_component_practical.hourly_volume_partial_q2,
"hourly_volume_total_annual": learning_component_practical.hourly_volume_total_annual
}
]
}
return ProposalLearningUnitFactory(learning_unit_year=a_learning_unit_year,
type=proposal_type.ProposalType.MODIFICATION.name,
state=proposal_state.ProposalState.FACULTY.name,
initial_data=initial_data,
entity=container_year.requirement_entity)
def _modify_learning_unit_year_data(a_learning_unit_year):
a_learning_unit_year.specific_title = "New title"
a_learning_unit_year.specific_title_english = "New english title"
a_learning_unit_year.acronym = "LNEW456"
a_learning_unit_year.credits = 123
a_learning_unit_year.language = LanguageFactory()
a_learning_unit_year.save()
a_learning_container = a_learning_unit_year.learning_container_year
a_learning_container.campus = CampusFactory()
a_learning_container.save()
def _modify_entities_linked_to_learning_container_year(a_learning_container_year):
a_learning_container_year.requirement_entity = EntityFactory()
a_learning_container_year.save()
@override_flag('learning_unit_proposal_update', active=True)
class TestEditProposal(TestCase):
@classmethod
def setUpTestData(cls):
today = datetime.date.today()
cls.academic_years = AcademicYearFactory.produce_in_future(quantity=5)
cls.current_academic_year = cls.academic_years[0]
end_year = AcademicYearFactory(year=cls.current_academic_year.year + 10)
generate_proposal_calendars(cls.academic_years)
cls.language = FrenchLanguageFactory()
cls.organization = organization_factory.OrganizationFactory(type=organization_type.MAIN)
cls.campus = campus_factory.CampusFactory(organization=cls.organization, is_administration=True)
cls.entity = EntityFactory(organization=cls.organization)
cls.entity_version = EntityVersionFactory(entity=cls.entity, entity_type=entity_type.FACULTY,
start_date=today.replace(year=1900),
end_date=None)
cls.generated_container = GenerateContainer(cls.current_academic_year, end_year, parent_entity=cls.entity)
cls.generated_container_first_year = cls.generated_container.generated_container_years[1]
cls.learning_unit_year = cls.generated_container_first_year.learning_unit_year_full
cls.requirement_entity_of_luy = cls.generated_container_first_year.requirement_entity_container_year
cls.person = FacultyManagerFactory(entity=cls.entity, with_child=True).person
cls.url = reverse(update_learning_unit_proposal, args=[cls.learning_unit_year.id])
cls.academic_year_for_suppression_proposal = AcademicYear.objects.filter(
year=cls.learning_unit_year.academic_year.year - 1)
def setUp(self):
self.proposal = ProposalLearningUnitFactory(learning_unit_year=self.learning_unit_year,
state=ProposalState.FACULTY.name,
folder_id=1,
entity=self.entity,
type=proposal_type.ProposalType.MODIFICATION.name)
self.client.force_login(self.person.user)
def test_edit_proposal_get_no_permission(self):
person = person_factory.PersonFactory()
self.client.force_login(person.user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, 'access_denied.html')
def test_edit_proposal_get_regular_user_with_permission(self):
person = FacultyManagerFactory().person
self.client.force_login(person.user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, 'access_denied.html')
def test_edit_proposal_get_as_faculty_manager(self):
response = self.client.get(self.url)
self.assertTemplateUsed(response, 'learning_unit/proposal/update_modification.html')
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
def test_edit_proposal_get_as_central_manager_with_instance(self):
central_manager = person_factory.CentralManagerForUEFactory("can_edit_learning_unit_proposal")
self.client.logout()
self.client.force_login(central_manager.user)
response = self.client.get(self.url)
self.assertTemplateUsed(response, 'learning_unit/proposal/update_modification.html')
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
self.assertEqual(response.context['form_proposal'].initial['state'], str(ProposalState.FACULTY.name))
def get_valid_data(self):
return {
'acronym_0': 'L',
'acronym_1': 'TAU2000',
"subtype": learning_unit_year_subtypes.FULL,
"container_type": learning_container_year_types.COURSE,
"academic_year": self.current_academic_year.id,
"status": True,
"credits": "5",
"campus": self.campus.id,
"common_title": "Common UE title",
"language": self.language.pk,
"periodicity": learning_unit_year_periodicity.ANNUAL,
"entity": self.entity_version.pk,
"folder_id": 1,
'requirement_entity':
self.entity_version.pk,
'allocation_entity':
self.entity_version.pk,
'additional_entity_1': '',
# Learning component year data model form
'component-TOTAL_FORMS': '2',
'component-INITIAL_FORMS': '0',
'component-MAX_NUM_FORMS': '2',
'component-0-hourly_volume_total_annual': 20,
'component-0-hourly_volume_partial_q1': 10,
'component-0-hourly_volume_partial_q2': 10,
'component-0-planned_classes': 1,
'component-1-hourly_volume_total_annual': 20,
'component-1-hourly_volume_partial_q1': 10,
'component-1-hourly_volume_partial_q2': 10,
'component-1-planned_classes': 1,
}
def get_modify_data(self):
modifydict = dict(self.get_valid_data())
modifydict["state"] = ProposalState.CENTRAL.value
return modifydict
def get_faulty_data(self):
faultydict = dict(self.get_valid_data())
faultydict["state"] = "bad_choice"
return faultydict
def test_edit_proposal_post_as_faculty_manager(self):
initial_data = copy_learning_unit_data(self.learning_unit_year)
self.proposal.initial_data = initial_data
request_factory = RequestFactory()
request = request_factory.post(self.url, data=self.get_modify_data())
request.user = self.person.user
request.session = self.client.session
request._messages = FallbackStorage(request)
update_learning_unit_proposal(request, learning_unit_year_id=self.learning_unit_year.id)
msg = [m.message for m in get_messages(request)]
msg_level = [m.level for m in get_messages(request)]
self.assertIn(messages.SUCCESS, msg_level, msg)
self.assertEqual(len(msg), 1)
self.proposal.refresh_from_db()
self.assertEqual(self.proposal.state, 'FACULTY')
def test_edit_proposal_post_wrong_data(self):
self.person.user.groups.add(CentralManagerGroupFactory())
response = self.client.post(self.url, data=self.get_faulty_data())
self.assertTemplateUsed(response, 'learning_unit/proposal/update_modification.html')
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
form = response.context['form_proposal']
self.assertEqual(len(form.errors), 1)
self.proposal.refresh_from_db()
self.assertEqual(self.proposal.state, ProposalState.FACULTY.name)
def test_edit_suppression_proposal_get(self):
self.proposal.type = ProposalType.SUPPRESSION.name
self.proposal.save()
response = self.client.get(self.url)
self.assertTemplateUsed(response, 'learning_unit/proposal/update_suppression.html')
self.assertIsInstance(response.context['form_end_date'], LearningUnitProposalEndDateForm)
self.assertCountEqual(
list(response.context['form_end_date'].fields['academic_year'].queryset),
list(self.academic_year_for_suppression_proposal)
)
self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm)
def test_edit_suppression_proposal_post(self):
self.proposal.type = ProposalType.SUPPRESSION.name
self.proposal.save()
request_factory = RequestFactory()
request = request_factory.post(self.url,
data={"academic_year": self.academic_year_for_suppression_proposal.first().id,
"entity": self.entity_version.id,
"folder_id": 12})
request.user = self.person.user
request.session = 'session'
request._messages = FallbackStorage(request)
update_learning_unit_proposal(request, learning_unit_year_id=self.learning_unit_year.id)
msg = [m.message for m in get_messages(request)]
msg_level = [m.level for m in get_messages(request)]
self.assertEqual(len(msg), 1)
self.assertIn(messages.SUCCESS, msg_level)
self.proposal.refresh_from_db()
self.assertEqual(self.proposal.folder_id, 12)
def test_edit_suppression_proposal_wrong_post(self):
self.proposal.type = ProposalType.SUPPRESSION.name
self.proposal.save()
response = self.client.post(self.url, data={"academic_year": self.academic_years[3].id,
"entity": self.entity_version.id})
self.assertEqual(self.url, response.request['PATH_INFO'])
class TestLearningUnitProposalDisplay(TestCase):
@classmethod
def setUpTestData(cls):
cls.language_pt = LanguageFactory(code='PT', name="Portugais")
cls.language_it = LanguageFactory(code='IT', name="Italien")
cls.campus = CampusFactory()
cls.academic_year = create_current_academic_year()
cls.l_container_year = LearningContainerYearFactory(
acronym="LBIR1212",
academic_year=cls.academic_year,
)
cls.learning_unit = LearningUnitFactory(learning_container=cls.l_container_year.learning_container)
cls.learning_unit_yr = LearningUnitYearFactory(
acronym="LBIR1212",
learning_unit=cls.learning_unit,
learning_container_year=cls.l_container_year,
academic_year=cls.academic_year,
subtype=learning_unit_year_subtypes.FULL,
status=True,
quadrimester="Q3",
credits=4,
campus=cls.campus,
language=cls.language_pt,
periodicity=learning_unit_year_periodicity.BIENNIAL_EVEN
)
cls.proposal_learning_unit = ProposalLearningUnitFactory(learning_unit_year=cls.learning_unit_yr)
cls.initial_credits = 3.0
cls.initial_quadrimester = 'Q1'
cls.initial_language = cls.language_it.pk
cls.initial_periodicity = learning_unit_year_periodicity.ANNUAL
cls.initial_data_learning_unit_year = {'credits': cls.initial_credits, 'periodicity': cls.initial_periodicity}
organization_main = OrganizationFactory(type=organization_type.MAIN)
cls.entity_from_main_organization = EntityFactory(organization=organization_main)
cls.entity_version = EntityVersionFactory(entity=cls.entity_from_main_organization)
organization_not_main = OrganizationFactory(type=organization_type.ACADEMIC_PARTNER)
cls.entity_from_not_main_organization = EntityFactory(organization=organization_not_main)
cls.entity_version_not_main = EntityVersionFactory(entity=cls.entity_from_not_main_organization)
cls.initial_language_en = cls.language_it
end_year = AcademicYearFactory(year=cls.academic_year.year + 1)
cls.generator_learning_container = GenerateContainer(start_year=cls.academic_year, end_year=end_year)
cls.l_container_year_with_entities = cls.generator_learning_container.generated_container_years[0]
def test_is_foreign_key(self):
current_data = {"language{}".format(proposal_business.END_FOREIGN_KEY_NAME): self.language_it.pk}
self.assertTrue(proposal_business._is_foreign_key("language", current_data))
def test_is_not_foreign_key(self):
current_data = {"credits": self.language_it.pk}
self.assertFalse(proposal_business._is_foreign_key("credits", current_data))
def test_check_differences(self):
proposal = ProposalLearningUnitFactory()
proposal.initial_data = {'learning_unit_year': {
'credits': self.initial_credits
}}
proposal.learning_unit_year.credits = self.learning_unit_yr.credits
differences = proposal_business.get_difference_of_proposal(proposal, proposal.learning_unit_year)
self.assertEqual(float(differences.get('credits')), self.initial_credits)
def test_get_the_old_value(self):
differences = proposal_business._get_the_old_value('credits',
{"credits": self.initial_credits + 1},
{'credits': self.initial_credits})
self.assertEqual(differences, "{}".format(self.initial_credits))
def test_get_the_old_value_no_initial_value(self):
differences = proposal_business._get_the_old_value('credits',
{"credits": self.initial_credits + 1},
{})
self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE)
def test_get_the_old_value_for_foreign_key(self):
initial_data_learning_unit_year = {'language': self.language_pt.pk}
current_data = {"language_id": self.language_it.pk}
differences = proposal_business._get_the_old_value('language',
current_data,
initial_data_learning_unit_year)
self.assertEqual(differences, str(self.language_pt))
def test_get_the_old_value_for_foreign_key_no_previous_value(self):
initial_data = {"language": None}
current_data = {"language_id": self.language_it.pk}
differences = proposal_business._get_the_old_value('language', current_data, initial_data)
self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE)
initial_data = {}
differences = proposal_business._get_the_old_value('language', current_data, initial_data)
self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE)
def test_get_the_old_value_with_translation(self):
key = proposal_business.VALUES_WHICH_NEED_TRANSLATION[0]
initial_data = {key: learning_unit_year_periodicity.ANNUAL}
current_data = {key: learning_unit_year_periodicity.BIENNIAL_EVEN}
differences = proposal_business._get_the_old_value(key, current_data, initial_data)
self.assertEqual(differences, _(learning_unit_year_periodicity.ANNUAL))
def test_get_str_representing_old_data_from_foreign_key(self):
differences = proposal_business._get_str_representing_old_data_from_foreign_key('campus', self.campus.id)
self.assertEqual(differences, str(self.campus.name))
def test_get_str_representing_old_data_from_foreign_key_equals_no_value(self):
differences = proposal_business._get_str_representing_old_data_from_foreign_key(
'campus',
proposal_business.NO_PREVIOUS_VALUE)
self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE)
def test_get_old_value_of_foreign_key_for_campus(self):
differences = proposal_business._get_old_value_of_foreign_key('campus', self.campus.id)
self.assertEqual(differences, str(self.campus.name))
def test_get_old_value_of_foreign_key_for_language(self):
differences = proposal_business._get_old_value_of_foreign_key('language', self.language_it.pk)
self.assertEqual(differences, str(self.language_it))
def test_get_old_value_of_foreign_key_for_additional_requirement_entity_main_organization(self):
differences = proposal_business._get_old_value_of_foreign_key('ADDITIONAL_REQUIREMENT_ENTITY_1',
self.entity_from_main_organization.pk)
self.assertEqual(differences, str(self.entity_from_main_organization.most_recent_entity_version.acronym))
def test_get_old_value_of_foreign_key_for_additional_requirement_entity_not_main_organization(self):
differences = proposal_business._get_old_value_of_foreign_key('ADDITIONAL_REQUIREMENT_ENTITY_1',
self.entity_from_not_main_organization.pk)
self.assertEqual(differences, str(self.entity_from_not_main_organization.most_recent_entity_version.title))
def test_get_status_initial_value(self):
self.assertEqual(proposal_business._get_status_initial_value(True),
proposal_business.LABEL_ACTIVE)
self.assertEqual(proposal_business._get_status_initial_value(False),
proposal_business.LABEL_INACTIVE)
def test_get_old_value_for_periodicity(self):
differences = proposal_business._get_the_old_value('periodicity',
{"periodicity": self.learning_unit_yr.periodicity},
{'periodicity': self.initial_periodicity})
self.assertEqual(differences,
dict(learning_unit_year_periodicity.PERIODICITY_TYPES)[self.initial_periodicity])
@override_flag('learning_unit_proposal_delete', active=True)
class TestCreationProposalCancel(TestCase):
@mock.patch('base.utils.send_mail.send_mail_cancellation_learning_unit_proposals')
def test_cancel_proposal_of_learning_unit(self, mock_send_mail):
a_proposal = _create_proposal_learning_unit("LOSIS1211")
luy = a_proposal.learning_unit_year
url = reverse('learning_unit_cancel_proposal', args=[luy.id])
generate_proposal_calendars_without_start_and_end_date([luy.academic_year])
self.central_manager = CentralManagerFactory(entity=luy.learning_container_year.requirement_entity)
self.client.force_login(self.central_manager.person.user)
response = self.client.post(url, data={})
redirected_url = reverse('learning_unit', args=[luy.id])
msgs = [str(message) for message in get_messages(response.wsgi_request)]
self.assertRedirects(response, redirected_url, fetch_redirect_response=False)
self.assertEqual(len(msgs), 2)
self.assertTrue(mock_send_mail.called)
| agpl-3.0 | -6,050,080,561,337,178,000 | 49.473206 | 120 | 0.670134 | false |
christianwengert/mailclient | src/bin/__init__.py | 1 | 2230 | # coding=utf-8
import subprocess
from imapclient import IMAPClient
HOST = 'mail.netzone.ch'
USERNAME = '[email protected]'
PASSWORD = subprocess.check_output(["/usr/local/bin/pass", "mail/[email protected]"])
PASSWORD = PASSWORD.split()[0].decode('utf8')
KEYMAPPING = {}
ssl = True
class Signature():
pass
class Account():
#username, password, First name, Name, host, port, ssl
pass
class Mailbox():
#name, account
pass
class Search():
flags = ''
searchtemrs = ''
date = ''
class Message():
id = ''
flags = '' # is replied and forwarded here?
attachments = ''
subject = ''
content = ''
date = ''
mailbox = ''
label = ''
def save_search():
pass
def new_mailbox():
pass
def delete_mailbox():
pass
def rename_mailbox():
pass
def reply():
pass
def forward():
pass
def mark_unread():
pass
def label():
pass
def move():
pass
def search():
pass
def flag():
pass
def delete():
pass
def compose():
pass
def clean_database():
pass
def sync_database():
#fetch
pass
def main():
server = IMAPClient(HOST, use_uid=True, ssl=ssl)
server.login(USERNAME, PASSWORD)
select_info = server.select_folder('INBOX')
print('%d messages in INBOX' % select_info[b'EXISTS'])
messages = server.search(['NOT', 'DELETED'])
print("%d messages that aren't deleted" % len(messages))
print()
print("Messages:")
response = server.fetch(messages, ['FLAGS', 'RFC822', 'RFC822.SIZE', 'INTERNALDATE'])
for msgid, data in response.items():
print(' ID %d: %d bytes, flags=%s' % (msgid,
data[b'RFC822.SIZE'],
data[b'FLAGS']))
if __name__ == "__main__":
# parser = argparse.ArgumentParser(description='Command line mail client.')
#
# parser.add_argument('--host', dest='accumulate', action='store_const',
# const=sum, default=max,
# help='sum the integers (default: find the max)')
#
# args = parser.parse_args()
# print(args.accumulate(args.integers))
main()
| bsd-3-clause | -5,255,035,904,629,647,000 | 14.594406 | 89 | 0.565471 | false |
ningirsu/stepmania-server | smserver/controllers/routes.py | 1 | 1366 | """ Routes files """
from smserver.smutils.smpacket import smcommand
from smserver.controllers import legacy
ROUTES = {
# Legacy controller for compatibility with Stepmania 5.X
smcommand.SMClientCommand.NSCPingR: legacy.ping_response.PINGRController,
smcommand.SMClientCommand.NSCHello: legacy.hello.HelloController,
smcommand.SMClientCommand.NSCCM: legacy.chat.ChatController,
smcommand.SMClientCommand.NSCFormatted: legacy.discovery.DiscoveryController,
smcommand.SMClientCommand.NSCGON: legacy.game_over.GameOverController,
smcommand.SMClientCommand.NSCGSR: legacy.game_start_request.StartGameRequestController,
smcommand.SMClientCommand.NSCGSU: legacy.game_status_update.GameStatusUpdateController,
smcommand.SMClientCommand.NSCRSG: legacy.request_start_game.RequestStartGameController,
smcommand.SMClientCommand.NSSMONL:legacy.smo.SMOController,
smcommand.SMClientCommand.NSCSU: legacy.user_profil.UserProfilController,
smcommand.SMClientCommand.NSSCSMS: legacy.user_screen.UserStatusController,
smcommand.SMOClientCommand.LOGIN: legacy.login.LoginController,
smcommand.SMOClientCommand.ENTERROOM: legacy.enter_room.EnterRoomController,
smcommand.SMOClientCommand.CREATEROOM: legacy.create_room.CreateRoomController,
smcommand.SMOClientCommand.ROOMINFO: legacy.room_info.RoomInfoController,
}
| mit | 7,506,692,969,652,477,000 | 53.64 | 91 | 0.825769 | false |
ThomasMcVay/MediaApp | MediaAppKnobs/KnobElements/RectButton.py | 1 | 1663 | #===============================================================================
# @Author: Madison Aster
# @ModuleDescription:
# @License:
# MediaApp Library - Python Package framework for developing robust Media
# Applications with Qt Library
# Copyright (C) 2013 Madison Aster
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation;
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See LICENSE in the root directory of this library for copy of
# GNU Lesser General Public License and other license details.
#===============================================================================
from Qt import QtGui, QtCore, QtWidgets
class RectButton(QtWidgets.QPushButton):
def __init__(self, *args):
if type(args[0]) is str:
text = args[0]
else:
text = ''
super(RectButton, self).__init__(text)
self.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
def sizeHint(self):
return QtCore.QSize(600,16)
| lgpl-2.1 | 5,703,280,998,694,598,000 | 42.945946 | 83 | 0.607336 | false |
lvapeab/nmt-keras | tests/NMT_architectures/unidir_deep_GRU_ConditionalLSTM.py | 1 | 3021 | import argparse
import os
import pytest
from tests.test_config import load_tests_params, clean_dirs
from data_engine.prepare_data import build_dataset
from nmt_keras.training import train_model
from nmt_keras.apply_model import sample_ensemble, score_corpus
def test_NMT_Unidir_deep_GRU_ConditionalLSTM():
params = load_tests_params()
# Current test params: Single layered GRU - GRU
params['BIDIRECTIONAL_ENCODER'] = False
params['N_LAYERS_ENCODER'] = 2
params['BIDIRECTIONAL_DEEP_ENCODER'] = False
params['ENCODER_RNN_TYPE'] = 'GRU'
params['DECODER_RNN_TYPE'] = 'ConditionalLSTM'
params['N_LAYERS_DECODER'] = 2
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
print("Training model")
train_model(params)
params['RELOAD'] = 1
print("Done")
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = os.path.join(
params['DATASET_STORE_PATH'],
'Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl')
parser.text = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(1)]
parser.verbose = 0
parser.dest = None
parser.source = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.target = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['TRG_LAN'])
parser.weights = []
parser.glossary = None
for n_best in [True, False]:
parser.n_best = n_best
print("Sampling with n_best = %s " % str(n_best))
sample_ensemble(parser, params)
print("Done")
print("Scoring corpus")
score_corpus(parser, params)
print("Done")
clean_dirs(params)
if __name__ == '__main__':
pytest.main([__file__])
| mit | 8,802,438,062,096,458,000 | 40.958333 | 107 | 0.618338 | false |
kontron/python-ipmi | pyipmi/sensor.py | 1 | 7640 | # cOPYRIGht (c) 2014 Kontron Europe GmbH
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import absolute_import
from .utils import check_completion_code
from .msgs import create_request_by_name
from .helper import get_sdr_data_helper, get_sdr_chunk_helper
from . import sdr
# THRESHOLD BASED STATES
EVENT_READING_TYPE_CODE_THRESHOLD = 0x01
# DMI-based "Usage States" STATES
EVENT_READING_TYPE_CODE_DISCRETE = 0x02
# DIGITAL/DISCRETE EVENT STATES
EVENT_READING_TYPE_CODE_STATE = 0x03
EVENT_READING_TYPE_CODE_PREDICTIVE_FAILURE = 0x04
EVENT_READING_TYPE_CODE_LIMIT = 0x05
EVENT_READING_TYPE_CODE_PERFORMANCE = 0x06
# Sensor Types
SENSOR_TYPE_TEMPERATURE = 0x01
SENSOR_TYPE_VOLTAGE = 0x02
SENSOR_TYPE_CURRENT = 0x03
SENSOR_TYPE_FAN = 0x04
SENSOR_TYPE_CHASSIS_INTRUSION = 0x05
SENSOR_TYPE_PLATFORM_SECURITY = 0x06
SENSOR_TYPE_PROCESSOR = 0x07
SENSOR_TYPE_POWER_SUPPLY = 0x08
SENSOR_TYPE_POWER_UNIT = 0x09
SENSOR_TYPE_COOLING_DEVICE = 0x0a
SENSOR_TYPE_OTHER_UNITS_BASED_SENSOR = 0x0b
SENSOR_TYPE_MEMORY = 0x0c
SENSOR_TYPE_DRIVE_SLOT = 0x0d
SENSOR_TYPE_POST_MEMORY_RESIZE = 0x0e
SENSOR_TYPE_SYSTEM_FIRMWARE_PROGRESS = 0x0f
SENSOR_TYPE_EVENT_LOGGING_DISABLED = 0x10
SENSOR_TYPE_WATCHDOG_1 = 0x11
SENSOR_TYPE_SYSTEM_EVENT = 0x12
SENSOR_TYPE_CRITICAL_INTERRUPT = 0x13
SENSOR_TYPE_BUTTON = 0x14
SENSOR_TYPE_MODULE_BOARD = 0x15
SENSOR_TYPE_MICROCONTROLLER_COPROCESSOR = 0x16
SENSOR_TYPE_ADD_IN_CARD = 0x17
SENSOR_TYPE_CHASSIS = 0x18
SENSOR_TYPE_CHIP_SET = 0x19
SENSOR_TYPE_OTHER_FRU = 0x1a
SENSOR_TYPE_CABLE_INTERCONNECT = 0x1b
SENSOR_TYPE_TERMINATOR = 0x1c
SENSOR_TYPE_SYSTEM_BOOT_INITIATED = 0x1d
SENSOR_TYPE_BOOT_ERROR = 0x1e
SENSOR_TYPE_OS_BOOT = 0x1f
SENSOR_TYPE_OS_CRITICAL_STOP = 0x20
SENSOR_TYPE_SLOT_CONNECTOR = 0x21
SENSOR_TYPE_SYSTEM_ACPI_POWER_STATE = 0x22
SENSOR_TYPE_WATCHDOG_2 = 0x23
SENSOR_TYPE_PLATFORM_ALERT = 0x24
SENSOR_TYPE_ENTITY_PRESENT = 0x25
SENSOR_TYPE_MONITOR_ASIC_IC = 0x26
SENSOR_TYPE_LAN = 0x27
SENSOR_TYPE_MANGEMENT_SUBSYSTEM_HEALTH = 0x28
SENSOR_TYPE_BATTERY = 0x29
SENSOR_TYPE_SESSION_AUDIT = 0x2a
SENSOR_TYPE_VERSION_CHANGE = 0x2b
SENSOR_TYPE_FRU_STATE = 0x2c
SENSOR_TYPE_FRU_HOT_SWAP = 0xf0
SENSOR_TYPE_IPMB_PHYSICAL_LINK = 0xf1
SENSOR_TYPE_MODULE_HOT_SWAP = 0xf2
SENSOR_TYPE_POWER_CHANNEL_NOTIFICATION = 0xf3
SENSOR_TYPE_TELCO_ALARM_INPUT = 0xf4
SENSOR_TYPE_OEM_KONTRON_FRU_INFORMATION_AGENT = 0xc5
SENSOR_TYPE_OEM_KONTRON_POST_VALUE = 0xc6
SENSOR_TYPE_OEM_KONTRON_FW_UPGRADE = 0xc7
SENSOR_TYPE_OEM_KONTRON_DIAGNOSTIC = 0xc9
SENSOR_TYPE_OEM_KONTRON_SYSTEM_FIRMWARE_UPGRADE = 0xca
SENSOR_TYPE_OEM_KONTRON_POWER_DENIED = 0xcd
SENSOR_TYPE_OEM_KONTRON_RESET = 0xcf
class Sensor(object):
def reserve_device_sdr_repository(self):
rsp = self.send_message_with_name('ReserveDeviceSdrRepository')
return rsp.reservation_id
def _get_device_sdr_chunk(self, reservation_id, record_id, offset, length):
req = create_request_by_name('GetDeviceSdr')
req.reservation_id = reservation_id
req.record_id = record_id
req.offset = offset
req.bytes_to_read = length
rsp = get_sdr_chunk_helper(self.send_message, req,
self.reserve_device_sdr_repository)
return (rsp.next_record_id, rsp.record_data)
def get_device_sdr(self, record_id, reservation_id=None):
"""Collect all data from the sensor device to get the SDR.
`record_id` the Record ID.
`reservation_id=None` can be set. if None the reservation ID will
be determined.
"""
(next_id, record_data) = \
get_sdr_data_helper(self.reserve_device_sdr_repository,
self._get_device_sdr_chunk,
record_id, reservation_id)
return sdr.SdrCommon.from_data(record_data, next_id)
def device_sdr_entries(self):
"""A generator that returns the SDR list.
Starting with ID=0x0000 and
end when ID=0xffff is returned.
"""
reservation_id = self.reserve_device_sdr_repository()
record_id = 0
while True:
record = self.get_device_sdr(record_id, reservation_id)
yield record
if record.next_id == 0xffff:
break
record_id = record.next_id
def get_device_sdr_list(self, reservation_id=None):
"""Return the complete SDR list."""
return list(self.device_sdr_entries())
def rearm_sensor_events(self, sensor_number):
"""Rearm sensor events for the given sensor number."""
self.send_message_with_name('RearmSensorEvents',
sensor_number=sensor_number)
def get_sensor_reading(self, sensor_number, lun=0):
"""Return the sensor reading at the assertion states.
`sensor_number`
Returns a tuple with `raw reading`and `assertion states`.
"""
rsp = self.send_message_with_name('GetSensorReading',
sensor_number=sensor_number,
lun=lun)
reading = rsp.sensor_reading
if rsp.config.initial_update_in_progress:
reading = None
states = None
if rsp.states1 is not None:
states = rsp.states1
if rsp.states2 is not None:
states |= (rsp.states2 << 8)
return (reading, states)
def set_sensor_thresholds(self, sensor_number, lun=0,
unr=None, ucr=None, unc=None,
lnc=None, lcr=None, lnr=None):
"""Set the sensor thresholds that are not 'None'.
`sensor_number`
`unr` for upper non-recoverable
`ucr` for upper critical
`unc` for upper non-critical
`lnc` for lower non-critical
`lcr` for lower critical
`lnr` for lower non-recoverable
"""
req = create_request_by_name('SetSensorThresholds')
req.sensor_number = sensor_number
req.lun = lun
thresholds = dict(unr=unr, ucr=ucr, unc=unc, lnc=lnc, lcr=lcr, lnr=lnr)
for key, value in thresholds.items():
if value is not None:
setattr(req.set_mask, key, 1)
setattr(req.threshold, key, value)
rsp = self.send_message(req)
check_completion_code(rsp.completion_code)
def get_sensor_thresholds(self, sensor_number, lun=0):
rsp = self.send_message_with_name('GetSensorThresholds',
sensor_number=sensor_number,
lun=lun)
thresholds = {}
threshold_list = ('unr', 'ucr', 'unc', 'lnc', 'lcr', 'lnr')
for threshold in threshold_list:
if hasattr(rsp.readable_mask, threshold):
if getattr(rsp.readable_mask, threshold):
thresholds[threshold] = getattr(rsp.threshold, threshold)
return thresholds
| lgpl-2.1 | -3,930,262,043,866,632,700 | 34.868545 | 79 | 0.657984 | false |
henriquegemignani/randovania | randovania/gui/tracker_window.py | 1 | 32424 | import collections
import functools
import json
import typing
from pathlib import Path
from random import Random
from typing import Optional, Dict, Set, List, Tuple, Iterator, Union
import matplotlib.pyplot as plt
import networkx
from PySide2 import QtWidgets
from PySide2.QtCore import Qt
from PySide2.QtWidgets import QMainWindow, QTreeWidgetItem, QCheckBox, QLabel, QGridLayout, QWidget, QMessageBox
from matplotlib.axes import Axes
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
from randovania.game_description.area_location import AreaLocation
from randovania.game_description.game_description import GameDescription
from randovania.game_description.item.item_category import ItemCategory
from randovania.game_description.node import Node, ResourceNode, TranslatorGateNode, TeleporterNode, DockNode
from randovania.game_description.resources.item_resource_info import ItemResourceInfo
from randovania.game_description.resources.pickup_entry import PickupEntry
from randovania.game_description.resources.resource_info import add_resource_gain_to_current_resources
from randovania.game_description.resources.translator_gate import TranslatorGate
from randovania.game_description.world import World
from randovania.games.game import RandovaniaGame
from randovania.games.prime import patcher_file
from randovania.generator import generator
from randovania.gui.generated.tracker_window_ui import Ui_TrackerWindow
from randovania.gui.lib.common_qt_lib import set_default_window_icon
from randovania.gui.lib.custom_spin_box import CustomSpinBox
from randovania.layout import translator_configuration
from randovania.layout.echoes_configuration import EchoesConfiguration
from randovania.layout.teleporters import TeleporterShuffleMode
from randovania.layout.translator_configuration import LayoutTranslatorRequirement
from randovania.resolver.bootstrap import logic_bootstrap
from randovania.resolver.logic import Logic
from randovania.resolver.resolver_reach import ResolverReach
from randovania.resolver.state import State, add_pickup_to_state
class InvalidLayoutForTracker(Exception):
pass
def _load_previous_state(persistence_path: Path,
layout_configuration: EchoesConfiguration,
) -> Optional[dict]:
previous_layout_path = persistence_path.joinpath("layout_configuration.json")
try:
with previous_layout_path.open() as previous_layout_file:
previous_layout = EchoesConfiguration.from_json(json.load(previous_layout_file))
except (FileNotFoundError, TypeError, KeyError, ValueError, json.JSONDecodeError):
return None
if previous_layout != layout_configuration:
return None
previous_state_path = persistence_path.joinpath("state.json")
try:
with previous_state_path.open() as previous_state_file:
return json.load(previous_state_file)
except (FileNotFoundError, json.JSONDecodeError):
return None
class MatplotlibWidget(QtWidgets.QWidget):
ax: Axes
def __init__(self, parent=None):
super().__init__(parent)
fig = Figure(figsize=(7, 5), dpi=65, facecolor=(1, 1, 1), edgecolor=(0, 0, 0))
self.canvas = FigureCanvas(fig)
self.toolbar = NavigationToolbar(self.canvas, self)
lay = QtWidgets.QVBoxLayout(self)
lay.addWidget(self.toolbar)
lay.addWidget(self.canvas)
self.ax = fig.add_subplot(111)
self.line, *_ = self.ax.plot([])
class TrackerWindow(QMainWindow, Ui_TrackerWindow):
# Tracker state
_collected_pickups: Dict[PickupEntry, int]
_actions: List[Node]
# Tracker configuration
logic: Logic
game_description: GameDescription
layout_configuration: EchoesConfiguration
persistence_path: Path
_initial_state: State
_elevator_id_to_combo: Dict[int, QtWidgets.QComboBox]
_translator_gate_to_combo: Dict[TranslatorGate, QtWidgets.QComboBox]
_starting_nodes: Set[ResourceNode]
_undefined_item = ItemResourceInfo(-1, "Undefined", "Undefined", 0, None)
# UI tools
_asset_id_to_item: Dict[int, QTreeWidgetItem]
_node_to_item: Dict[Node, QTreeWidgetItem]
_widget_for_pickup: Dict[PickupEntry, Union[QCheckBox, CustomSpinBox]]
_during_setup = False
def __init__(self, persistence_path: Path, layout_configuration: EchoesConfiguration):
super().__init__()
self.setupUi(self)
set_default_window_icon(self)
self._collected_pickups = {}
self._widget_for_pickup = {}
self._actions = []
self._asset_id_to_item = {}
self._node_to_item = {}
self.layout_configuration = layout_configuration
self.persistence_path = persistence_path
player_pool = generator.create_player_pool(Random(0), self.layout_configuration, 0, 1)
pool_patches = player_pool.patches
self.game_description, self._initial_state = logic_bootstrap(layout_configuration,
player_pool.game,
pool_patches)
self.logic = Logic(self.game_description, layout_configuration)
self._initial_state.resources["add_self_as_requirement_to_resources"] = 1
self.menu_reset_action.triggered.connect(self._confirm_reset)
self.resource_filter_check.stateChanged.connect(self.update_locations_tree_for_reachable_nodes)
self.hide_collected_resources_check.stateChanged.connect(self.update_locations_tree_for_reachable_nodes)
self.undo_last_action_button.clicked.connect(self._undo_last_action)
self.configuration_label.setText("Trick Level: {}; Starts with:\n{}".format(
layout_configuration.trick_level.pretty_description,
", ".join(
resource.short_name
for resource in pool_patches.starting_items.keys()
)
))
self.setup_pickups_box(player_pool.pickups)
self.setup_possible_locations_tree()
self.setup_elevators()
self.setup_translator_gates()
self.matplot_widget = MatplotlibWidget(self.tab_graph_map)
self.tab_graph_map_layout.addWidget(self.matplot_widget)
self._world_to_node_positions = {}
self.map_tab_widget.currentChanged.connect(self._on_tab_changed)
for world in self.game_description.world_list.worlds:
self.graph_map_world_combo.addItem(world.name, world)
self.graph_map_world_combo.currentIndexChanged.connect(self.on_graph_map_world_combo)
persistence_path.mkdir(parents=True, exist_ok=True)
previous_state = _load_previous_state(persistence_path, layout_configuration)
if not self.apply_previous_state(previous_state):
self.setup_starting_location(None)
with persistence_path.joinpath("layout_configuration.json").open("w") as layout_file:
json.dump(layout_configuration.as_json, layout_file)
self._add_new_action(self._initial_state.node)
def apply_previous_state(self, previous_state: Optional[dict]) -> bool:
if previous_state is None:
return False
starting_location = None
needs_starting_location = len(self.layout_configuration.starting_location.locations) > 1
resource_db = self.game_description.resource_database
translator_gates = {}
try:
pickup_name_to_pickup = {pickup.name: pickup for pickup in self._collected_pickups.keys()}
quantity_to_change = {
pickup_name_to_pickup[pickup_name]: quantity
for pickup_name, quantity in previous_state["collected_pickups"].items()
}
previous_actions = [
self.game_description.world_list.all_nodes[index]
for index in previous_state["actions"]
]
if needs_starting_location:
starting_location = AreaLocation.from_json(previous_state["starting_location"])
elevators = {
int(elevator_id): AreaLocation.from_json(location) if location is not None else None
for elevator_id, location in previous_state["elevators"].items()
}
if self.layout_configuration.game == RandovaniaGame.PRIME2:
translator_gates = {
TranslatorGate(int(gate)): (resource_db.get_item(item)
if item is not None
else self._undefined_item)
for gate, item in previous_state["translator_gates"].items()
}
except KeyError:
return False
self.setup_starting_location(starting_location)
for elevator_id, area_location in elevators.items():
combo = self._elevator_id_to_combo[elevator_id]
if area_location is None:
combo.setCurrentIndex(0)
continue
for i in range(combo.count()):
if area_location == combo.itemData(i):
combo.setCurrentIndex(i)
break
for gate, item in translator_gates.items():
combo = self._translator_gate_to_combo[gate]
for i in range(combo.count()):
if item == combo.itemData(i):
combo.setCurrentIndex(i)
break
self.bulk_change_quantity(quantity_to_change)
self._add_new_actions(previous_actions)
return True
def reset(self):
self.bulk_change_quantity({
pickup: 0
for pickup in self._collected_pickups.keys()
})
while len(self._actions) > 1:
self._actions.pop()
self.actions_list.takeItem(len(self._actions))
for elevator in self._elevator_id_to_combo.values():
elevator.setCurrentIndex(0)
for elevator in self._translator_gate_to_combo.values():
elevator.setCurrentIndex(0)
self._refresh_for_new_action()
def _confirm_reset(self):
reply = QMessageBox.question(self, "Reset Tracker?", "Do you want to reset the tracker progression?",
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
self.reset()
@property
def _show_only_resource_nodes(self) -> bool:
return self.resource_filter_check.isChecked()
@property
def _hide_collected_resources(self) -> bool:
return self.hide_collected_resources_check.isChecked()
@property
def _collected_nodes(self) -> Set[ResourceNode]:
return self._starting_nodes | set(action for action in self._actions if action.is_resource_node)
def _pretty_node_name(self, node: Node) -> str:
world_list = self.game_description.world_list
return "{} / {}".format(world_list.area_name(world_list.nodes_to_area(node)), node.name)
def _refresh_for_new_action(self):
self.undo_last_action_button.setEnabled(len(self._actions) > 1)
self.current_location_label.setText("Current location: {}".format(self._pretty_node_name(self._actions[-1])))
self.update_locations_tree_for_reachable_nodes()
def _add_new_action(self, node: Node):
self._add_new_actions([node])
def _add_new_actions(self, nodes: Iterator[Node]):
for node in nodes:
self.actions_list.addItem(self._pretty_node_name(node))
self._actions.append(node)
self._refresh_for_new_action()
def _undo_last_action(self):
self._actions.pop()
self.actions_list.takeItem(len(self._actions))
self._refresh_for_new_action()
def _on_tree_node_double_clicked(self, item: QTreeWidgetItem, _):
node: Optional[Node] = getattr(item, "node", None)
if not item.isDisabled() and node is not None and node != self._actions[-1]:
self._add_new_action(node)
def _positions_for_world(self, world: World):
g = networkx.DiGraph()
world_list = self.game_description.world_list
state = self.state_for_current_configuration()
for area in world.areas:
g.add_node(area)
for area in world.areas:
nearby_areas = set()
for node in area.nodes:
if isinstance(node, DockNode):
try:
target_node = world_list.resolve_dock_node(node, state.patches)
nearby_areas.add(world_list.nodes_to_area(target_node))
except IndexError as e:
print(f"For {node.name} in {area.name}, received {e}")
continue
for other_area in nearby_areas:
g.add_edge(area, other_area)
return networkx.drawing.spring_layout(g)
def update_matplot_widget(self, nodes_in_reach: Set[Node]):
g = networkx.DiGraph()
world_list = self.game_description.world_list
state = self.state_for_current_configuration()
world = self.graph_map_world_combo.currentData()
for area in world.areas:
g.add_node(area)
for area in world.areas:
nearby_areas = set()
for node in area.nodes:
if node not in nodes_in_reach:
continue
if isinstance(node, DockNode):
# TODO: respect is_blast_shield: if already opened once, no requirement needed.
# Includes opening form behind with different criteria
try:
target_node = world_list.resolve_dock_node(node, state.patches)
dock_weakness = state.patches.dock_weakness.get((area.area_asset_id, node.dock_index),
node.default_dock_weakness)
if dock_weakness.requirement.satisfied(state.resources, state.energy):
nearby_areas.add(world_list.nodes_to_area(target_node))
except IndexError as e:
print(f"For {node.name} in {area.name}, received {e}")
continue
for other_area in nearby_areas:
g.add_edge(area, other_area)
self.matplot_widget.ax.clear()
cf = self.matplot_widget.ax.get_figure()
cf.set_facecolor("w")
if world.world_asset_id not in self._world_to_node_positions:
self._world_to_node_positions[world.world_asset_id] = self._positions_for_world(world)
pos = self._world_to_node_positions[world.world_asset_id]
networkx.draw_networkx_nodes(g, pos, ax=self.matplot_widget.ax)
networkx.draw_networkx_edges(g, pos, arrows=True, ax=self.matplot_widget.ax)
networkx.draw_networkx_labels(g, pos, ax=self.matplot_widget.ax,
labels={area: area.name for area in world.areas},
verticalalignment='top')
self.matplot_widget.ax.set_axis_off()
plt.draw_if_interactive()
self.matplot_widget.canvas.draw()
def on_graph_map_world_combo(self):
nodes_in_reach = self.current_nodes_in_reach(self.state_for_current_configuration())
self.update_matplot_widget(nodes_in_reach)
def current_nodes_in_reach(self, state):
if state is None:
nodes_in_reach = set()
else:
reach = ResolverReach.calculate_reach(self.logic, state)
nodes_in_reach = set(reach.nodes)
nodes_in_reach.add(state.node)
return nodes_in_reach
def _on_tab_changed(self):
if self.map_tab_widget.currentWidget() == self.tab_graph_map:
self.on_graph_map_world_combo()
def update_locations_tree_for_reachable_nodes(self):
state = self.state_for_current_configuration()
nodes_in_reach = self.current_nodes_in_reach(state)
if self.map_tab_widget.currentWidget() == self.tab_graph_map:
self.update_matplot_widget(nodes_in_reach)
all_nodes = self.game_description.world_list.all_nodes
for world in self.game_description.world_list.worlds:
for area in world.areas:
area_is_visible = False
for node in area.nodes:
is_collected = node in self._collected_nodes
is_visible = node in nodes_in_reach and not (self._hide_collected_resources
and is_collected)
if self._show_only_resource_nodes:
is_visible = is_visible and node.is_resource_node
node_item = self._node_to_item[node]
node_item.setHidden(not is_visible)
if node.is_resource_node:
resource_node = typing.cast(ResourceNode, node)
node_item.setDisabled(not resource_node.can_collect(state.patches, state.resources, all_nodes))
node_item.setCheckState(0, Qt.Checked if is_collected else Qt.Unchecked)
area_is_visible = area_is_visible or is_visible
self._asset_id_to_item[area.area_asset_id].setHidden(not area_is_visible)
# Persist the current state
self.persist_current_state()
def persist_current_state(self):
world_list = self.game_description.world_list
with self.persistence_path.joinpath("state.json").open("w") as state_file:
json.dump(
{
"actions": [
node.index
for node in self._actions
],
"collected_pickups": {
pickup.name: quantity
for pickup, quantity in self._collected_pickups.items()
},
"elevators": {
str(elevator_id): combo.currentData().as_json if combo.currentIndex() > 0 else None
for elevator_id, combo in self._elevator_id_to_combo.items()
},
"translator_gates": {
str(gate.index): combo.currentData().index if combo.currentIndex() > 0 else None
for gate, combo in self._translator_gate_to_combo.items()
},
"starting_location": world_list.node_to_area_location(self._initial_state.node).as_json,
},
state_file
)
def setup_possible_locations_tree(self):
"""
Creates the possible_locations_tree with all worlds, areas and nodes.
"""
self.possible_locations_tree.itemDoubleClicked.connect(self._on_tree_node_double_clicked)
# TODO: Dark World names
for world in self.game_description.world_list.worlds:
world_item = QTreeWidgetItem(self.possible_locations_tree)
world_item.setText(0, world.name)
world_item.setExpanded(True)
self._asset_id_to_item[world.world_asset_id] = world_item
for area in world.areas:
area_item = QTreeWidgetItem(world_item)
area_item.area = area
area_item.setText(0, area.name)
area_item.setHidden(True)
self._asset_id_to_item[area.area_asset_id] = area_item
for node in area.nodes:
node_item = QTreeWidgetItem(area_item)
if isinstance(node, TranslatorGateNode):
node_item.setText(0, "{} ({})".format(node.name, node.gate))
else:
node_item.setText(0, node.name)
node_item.node = node
if node.is_resource_node:
node_item.setFlags(node_item.flags() & ~Qt.ItemIsUserCheckable)
self._node_to_item[node] = node_item
def setup_elevators(self):
world_list = self.game_description.world_list
nodes_by_world: Dict[str, List[TeleporterNode]] = collections.defaultdict(list)
self._elevator_id_to_combo = {}
areas_to_not_change = {
2278776548, # Sky Temple Gateway
2068511343, # Sky Temple Energy Controller
3136899603, # Aerie Transport Station
1564082177, # Aerie
}
targets = {}
for world, area, node in world_list.all_worlds_areas_nodes:
if isinstance(node, TeleporterNode) and node.editable and area.area_asset_id not in areas_to_not_change:
name = world.correct_name(area.in_dark_aether)
nodes_by_world[name].append(node)
location = AreaLocation(world.world_asset_id, area.area_asset_id)
targets[patcher_file.elevator_area_name(world_list, location, True)] = location
if self.layout_configuration.elevators.mode == TeleporterShuffleMode.ONE_WAY_ANYTHING:
targets = {}
for world in world_list.worlds:
for area in world.areas:
name = world.correct_name(area.in_dark_aether)
targets[f"{name} - {area.name}"] = AreaLocation(world.world_asset_id, area.area_asset_id)
combo_targets = sorted(targets.items(), key=lambda it: it[0])
for world_name in sorted(nodes_by_world.keys()):
nodes = nodes_by_world[world_name]
nodes_locations = [AreaLocation(world_list.nodes_to_world(node).world_asset_id,
world_list.nodes_to_area(node).area_asset_id)
for node in nodes]
nodes_names = [patcher_file.elevator_area_name(world_list, location, True)
for location in nodes_locations]
nodes = sorted(nodes_by_world[world_name], key=lambda it: world_list.nodes_to_area(it).name)
group = QtWidgets.QGroupBox(self.elevators_scroll_contents)
group.setTitle(world_name)
self.elevators_scroll_layout.addWidget(group)
layout = QtWidgets.QGridLayout(group)
for i, (node, location, name) in enumerate(sorted(zip(nodes, nodes_locations, nodes_names),
key=lambda it: it[2])):
node_name = QtWidgets.QLabel(group)
node_name.setText(name)
layout.addWidget(node_name, i, 0)
combo = QtWidgets.QComboBox(group)
if self.layout_configuration.elevators.is_vanilla:
combo.addItem("Vanilla", node.default_connection)
combo.setEnabled(False)
else:
combo.addItem("Undefined", location)
for target_name, connection in combo_targets:
combo.addItem(target_name, connection)
combo.currentIndexChanged.connect(self.update_locations_tree_for_reachable_nodes)
self._elevator_id_to_combo[node.teleporter_instance_id] = combo
layout.addWidget(combo, i, 1)
def setup_translator_gates(self):
world_list = self.game_description.world_list
resource_db = self.game_description.resource_database
self._translator_gate_to_combo = {}
if self.layout_configuration.game != RandovaniaGame.PRIME2:
return
gates = {
f"{area.name} ({node.gate.index})": node.gate
for world, area, node in world_list.all_worlds_areas_nodes
if isinstance(node, TranslatorGateNode)
}
translator_requirement = self.layout_configuration.translator_configuration.translator_requirement
for i, (gate_name, gate) in enumerate(sorted(gates.items(), key=lambda it: it[0])):
node_name = QtWidgets.QLabel(self.translator_gate_scroll_contents)
node_name.setText(gate_name)
self.translator_gate_scroll_layout.addWidget(node_name, i, 0)
combo = QtWidgets.QComboBox(self.translator_gate_scroll_contents)
gate_requirement = translator_requirement[gate]
if gate_requirement in (LayoutTranslatorRequirement.RANDOM,
LayoutTranslatorRequirement.RANDOM_WITH_REMOVED):
combo.addItem("Undefined", self._undefined_item)
for translator, index in translator_configuration.ITEM_INDICES.items():
combo.addItem(translator.long_name, resource_db.get_item(index))
else:
combo.addItem(gate_requirement.long_name, resource_db.get_item(gate_requirement.item_index))
combo.setEnabled(False)
combo.currentIndexChanged.connect(self.update_locations_tree_for_reachable_nodes)
self._translator_gate_to_combo[gate] = combo
self.translator_gate_scroll_layout.addWidget(combo, i, 1)
def setup_starting_location(self, area_location: Optional[AreaLocation]):
world_list = self.game_description.world_list
if len(self.layout_configuration.starting_location.locations) > 1:
if area_location is None:
area_locations = sorted(self.layout_configuration.starting_location.locations,
key=lambda it: world_list.area_name(world_list.area_by_area_location(it)))
location_names = [world_list.area_name(world_list.area_by_area_location(it))
for it in area_locations]
selected_name = QtWidgets.QInputDialog.getItem(self, "Starting Location", "Select starting location",
location_names, 0, False)
area_location = area_locations[location_names.index(selected_name[0])]
self._initial_state.node = world_list.resolve_teleporter_connection(area_location)
self._starting_nodes = {
node
for node in world_list.all_nodes
if node.is_resource_node and node.resource() in self._initial_state.resources
}
def _change_item_quantity(self, pickup: PickupEntry, use_quantity_as_bool: bool, quantity: int):
if use_quantity_as_bool:
if bool(quantity):
quantity = 1
else:
quantity = 0
self._collected_pickups[pickup] = quantity
if not self._during_setup:
self.update_locations_tree_for_reachable_nodes()
def bulk_change_quantity(self, new_quantity: Dict[PickupEntry, int]):
self._during_setup = True
for pickup, quantity in new_quantity.items():
widget = self._widget_for_pickup[pickup]
if isinstance(widget, QCheckBox):
widget.setChecked(quantity > 0)
else:
widget.setValue(quantity)
self._during_setup = False
def _create_widgets_with_quantity(self,
pickup: PickupEntry,
parent_widget: QWidget,
parent_layout: QGridLayout,
row: int,
quantity: int,
):
label = QLabel(parent_widget)
label.setText(pickup.name)
parent_layout.addWidget(label, row, 0)
spin_bix = CustomSpinBox(parent_widget)
spin_bix.setMaximumWidth(50)
spin_bix.setMaximum(quantity)
spin_bix.valueChanged.connect(functools.partial(self._change_item_quantity, pickup, False))
self._widget_for_pickup[pickup] = spin_bix
parent_layout.addWidget(spin_bix, row, 1)
def setup_pickups_box(self, item_pool: List[PickupEntry]):
parent_widgets: Dict[ItemCategory, Tuple[QWidget, QGridLayout]] = {
ItemCategory.EXPANSION: (self.expansions_box, self.expansions_layout),
ItemCategory.ENERGY_TANK: (self.expansions_box, self.expansions_layout),
ItemCategory.TRANSLATOR: (self.translators_box, self.translators_layout),
ItemCategory.TEMPLE_KEY: (self.keys_box, self.keys_layout),
ItemCategory.SKY_TEMPLE_KEY: (self.keys_box, self.keys_layout),
}
major_pickup_parent_widgets = (self.upgrades_box, self.upgrades_layout)
row_for_parent = {
self.expansions_box: 0,
self.translators_box: 0,
self.upgrades_box: 0,
self.keys_box: 0,
}
column_for_parent = {
self.translators_box: 0,
self.upgrades_box: 0,
self.keys_box: 0,
}
k_column_count = 2
pickup_by_name = {}
pickup_with_quantity = {}
for pickup in item_pool:
if pickup.name in pickup_by_name:
pickup_with_quantity[pickup_by_name[pickup.name]] += 1
else:
pickup_by_name[pickup.name] = pickup
pickup_with_quantity[pickup] = 1
non_expansions_with_quantity = []
for pickup, quantity in pickup_with_quantity.items():
self._collected_pickups[pickup] = 0
parent_widget, parent_layout = parent_widgets.get(pickup.item_category, major_pickup_parent_widgets)
row = row_for_parent[parent_widget]
if parent_widget is self.expansions_box:
self._create_widgets_with_quantity(pickup, parent_widget, parent_layout, row, quantity)
row_for_parent[parent_widget] += 1
else:
if quantity > 1:
non_expansions_with_quantity.append((parent_widget, parent_layout, pickup, quantity))
else:
check_box = QCheckBox(parent_widget)
check_box.setText(pickup.name)
check_box.stateChanged.connect(functools.partial(self._change_item_quantity, pickup, True))
self._widget_for_pickup[pickup] = check_box
column = column_for_parent[parent_widget]
parent_layout.addWidget(check_box, row, column)
column += 1
if column >= k_column_count:
column = 0
row += 1
row_for_parent[parent_widget] = row
column_for_parent[parent_widget] = column
for parent_widget, parent_layout, pickup, quantity in non_expansions_with_quantity:
if column_for_parent[parent_widget] != 0:
column_for_parent[parent_widget] = 0
row_for_parent[parent_widget] += 1
self._create_widgets_with_quantity(pickup, parent_widget, parent_layout,
row_for_parent[parent_widget],
quantity)
row_for_parent[parent_widget] += 1
def state_for_current_configuration(self) -> Optional[State]:
all_nodes = self.game_description.world_list.all_nodes
state = self._initial_state.copy()
if self._actions:
state.node = self._actions[-1]
for teleporter, combo in self._elevator_id_to_combo.items():
assert combo.currentData() is not None
state.patches.elevator_connection[teleporter] = combo.currentData()
for gate, item in self._translator_gate_to_combo.items():
state.patches.translator_gates[gate] = item.currentData()
for pickup, quantity in self._collected_pickups.items():
for _ in range(quantity):
add_pickup_to_state(state, pickup)
for node in self._collected_nodes:
add_resource_gain_to_current_resources(node.resource_gain_on_collect(state.patches, state.resources,
all_nodes),
state.resources)
return state
| gpl-3.0 | 5,743,560,068,089,182,000 | 43.599725 | 119 | 0.599926 | false |
tkw1536/GitManager | tests/commands/test_state.py | 1 | 4328 | import unittest
import unittest.mock
from GitManager.commands import state
from GitManager.repo import description
from GitManager.utils import format
from GitManager.repo import implementation
class TestState(unittest.TestCase):
""" Tests that the state command works properly """
@unittest.mock.patch(
'GitManager.repo.implementation.LocalRepository')
@unittest.mock.patch(
'builtins.print')
def test_run(self,
builtins_print: unittest.mock.Mock,
implementation_LocalRepository: unittest.mock.Mock):
# create a repository
repo = description.RepositoryDescription('/path/to/source',
'/path/to/clone')
# create a line
line = format.TerminalLine()
# and a command instance
cmd = state.State(line, [repo], "--no-update")
# if we are up-to-date, nothing should have been printed
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remote_status \
.return_value = implementation.RemoteStatus.UP_TO_DATE
self.assertTrue(cmd.run(repo))
implementation_LocalRepository.return_value.remote_status \
.assert_called_with(False)
builtins_print.assert_not_called()
# reset the mock
implementation_LocalRepository.reset_mock()
builtins_print.reset_mock()
# create another command instance
cmd = state.State(line, [repo], "--update")
# if the local repository does not exist, we
implementation_LocalRepository.return_value.exists.return_value = False
self.assertFalse(cmd.run(repo))
# reset the mock
implementation_LocalRepository.reset_mock()
builtins_print.reset_mock()
# if we are up-to-date, nothing should have been printed
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remote_status \
.return_value = implementation.RemoteStatus.UP_TO_DATE
self.assertTrue(cmd.run(repo))
implementation_LocalRepository.return_value.remote_status\
.assert_called_with(True)
builtins_print.assert_not_called()
# reset the mock
implementation_LocalRepository.reset_mock()
builtins_print.reset_mock()
# we need to pull
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remote_status \
.return_value = implementation.RemoteStatus.REMOTE_NEWER
self.assertFalse(cmd.run(repo))
implementation_LocalRepository.return_value.remote_status \
.assert_called_with(True)
builtins_print.assert_called_with(
format.Format.yellow('Upstream is ahead of your branch, '
'pull required. '))
# reset the mock
implementation_LocalRepository.reset_mock()
builtins_print.reset_mock()
# we need to push
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remote_status \
.return_value = implementation.RemoteStatus.LOCAL_NEWER
self.assertFalse(cmd.run(repo))
implementation_LocalRepository.return_value.remote_status \
.assert_called_with(True)
builtins_print.assert_called_with(
format.Format.green('Your branch is ahead of upstream, '
'push required.'))
# reset the mock
implementation_LocalRepository.reset_mock()
builtins_print.reset_mock()
# divergence
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remote_status \
.return_value = implementation.RemoteStatus.DIVERGENCE
self.assertFalse(cmd.run(repo))
implementation_LocalRepository.return_value.remote_status \
.assert_called_with(True)
builtins_print.assert_called_with(
format.Format.red('Your branch and upstream have diverged, '
'merge or rebase required. '))
| mit | -7,933,464,358,307,545,000 | 39.830189 | 79 | 0.654575 | false |
muffinresearch/amo-validator | validator/testcases/javascript/actions.py | 1 | 39965 | from copy import deepcopy
from functools import partial
import sys
import types
# Global import of predefinedentities will cause an import loop
import instanceactions
from validator.constants import (BUGZILLA_BUG, DESCRIPTION_TYPES, FENNEC_GUID,
FIREFOX_GUID, MAX_STR_SIZE)
from validator.decorator import version_range
from jstypes import JSArray, JSContext, JSLiteral, JSObject, JSWrapper
NUMERIC_TYPES = (int, long, float, complex)
# None of these operations (or their augmented assignment counterparts) should
# be performed on non-numeric data. Any time we get non-numeric data for these
# guys, we just return window.NaN.
NUMERIC_OPERATORS = ('-', '*', '/', '%', '<<', '>>', '>>>', '|', '^', '&')
NUMERIC_OPERATORS += tuple('%s=' % op for op in NUMERIC_OPERATORS)
def get_NaN(traverser):
# If we've cached the traverser's NaN instance, just use that.
ncache = getattr(traverser, 'NAN_CACHE', None)
if ncache is not None:
return ncache
# Otherwise, we need to import GLOBAL_ENTITIES and build a raw copy.
from predefinedentities import GLOBAL_ENTITIES
ncache = traverser._build_global('NaN', GLOBAL_ENTITIES[u'NaN'])
# Cache it so we don't need to do this again.
traverser.NAN_CACHE = ncache
return ncache
def _get_member_exp_property(traverser, node):
"""Return the string value of a member expression's property."""
if node['property']['type'] == 'Identifier' and not node.get('computed'):
return unicode(node['property']['name'])
else:
eval_exp = traverser._traverse_node(node['property'])
return _get_as_str(eval_exp.get_literal_value())
def _expand_globals(traverser, node):
"""Expands a global object that has a lambda value."""
if node.is_global and callable(node.value.get('value')):
result = node.value['value'](traverser)
if isinstance(result, dict):
output = traverser._build_global('--', result)
elif isinstance(result, JSWrapper):
output = result
else:
output = JSWrapper(result, traverser)
# Set the node context.
if 'context' in node.value:
traverser._debug('CONTEXT>>%s' % node.value['context'])
output.context = node.value['context']
else:
traverser._debug('CONTEXT>>INHERITED')
output.context = node.context
return output
return node
def trace_member(traverser, node, instantiate=False):
'Traces a MemberExpression and returns the appropriate object'
traverser._debug('TESTING>>%s' % node['type'])
if node['type'] == 'MemberExpression':
# x.y or x[y]
# x = base
base = trace_member(traverser, node['object'], instantiate)
base = _expand_globals(traverser, base)
identifier = _get_member_exp_property(traverser, node)
# Handle the various global entity properties.
if base.is_global:
# If we've got an XPCOM wildcard, return a copy of the entity.
if 'xpcom_wildcard' in base.value:
traverser._debug('MEMBER_EXP>>XPCOM_WILDCARD')
from predefinedentities import CONTRACT_ENTITIES
if identifier in CONTRACT_ENTITIES:
kw = dict(err_id=('js', 'actions', 'dangerous_contract'),
warning='Dangerous XPCOM contract ID')
kw.update(CONTRACT_ENTITIES[identifier])
traverser.warning(**kw)
base.value = base.value.copy()
del base.value['xpcom_wildcard']
return base
test_identifier(traverser, identifier)
traverser._debug('MEMBER_EXP>>PROPERTY: %s' % identifier)
output = base.get(
traverser=traverser, instantiate=instantiate, name=identifier)
output.context = base.context
if base.is_global:
# In the cases of XPCOM objects, methods generally
# remain bound to their parent objects, even when called
# indirectly.
output.parent = base
return output
elif node['type'] == 'Identifier':
traverser._debug('MEMBER_EXP>>ROOT:IDENTIFIER')
test_identifier(traverser, node['name'])
# If we're supposed to instantiate the object and it doesn't already
# exist, instantitate the object.
if instantiate and not traverser._is_defined(node['name']):
output = JSWrapper(JSObject(), traverser=traverser)
traverser.contexts[0].set(node['name'], output)
else:
output = traverser._seek_variable(node['name'])
return _expand_globals(traverser, output)
else:
traverser._debug('MEMBER_EXP>>ROOT:EXPRESSION')
# It's an expression, so just try your damndest.
return traverser._traverse_node(node)
def test_identifier(traverser, name):
'Tests whether an identifier is banned'
import predefinedentities
if name in predefinedentities.BANNED_IDENTIFIERS:
traverser.err.warning(
err_id=('js', 'actions', 'banned_identifier'),
warning='Banned or deprecated JavaScript Identifier',
description=predefinedentities.BANNED_IDENTIFIERS[name],
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context)
def _function(traverser, node):
'Prevents code duplication'
def wrap(traverser, node):
me = JSObject()
traverser.function_collection.append([])
# Replace the current context with a prototypeable JS object.
traverser._pop_context()
me.type_ = 'default' # Treat the function as a normal object.
traverser._push_context(me)
traverser._debug('THIS_PUSH')
traverser.this_stack.append(me) # Allow references to "this"
# Declare parameters in the local scope
params = []
for param in node['params']:
if param['type'] == 'Identifier':
params.append(param['name'])
elif param['type'] == 'ArrayPattern':
for element in param['elements']:
# Array destructuring in function prototypes? LOL!
if element is None or element['type'] != 'Identifier':
continue
params.append(element['name'])
local_context = traverser._peek_context(1)
for param in params:
var = JSWrapper(lazy=True, traverser=traverser)
# We can assume that the params are static because we don't care
# about what calls the function. We want to know whether the
# function solely returns static values. If so, it is a static
# function.
local_context.set(param, var)
traverser._traverse_node(node['body'])
# Since we need to manually manage the "this" stack, pop off that
# context.
traverser._debug('THIS_POP')
traverser.this_stack.pop()
# Call all of the function collection's members to traverse all of the
# child functions.
func_coll = traverser.function_collection.pop()
for func in func_coll:
func()
# Put the function off for traversal at the end of the current block scope.
traverser.function_collection[-1].append(partial(wrap, traverser, node))
return JSWrapper(traverser=traverser, callable=True, dirty=True)
def _define_function(traverser, node):
me = _function(traverser, node)
traverser._peek_context(2).set(node['id']['name'], me)
return me
def _func_expr(traverser, node):
'Represents a lambda function'
return _function(traverser, node)
def _define_with(traverser, node):
'Handles `with` statements'
object_ = traverser._traverse_node(node['object'])
if isinstance(object_, JSWrapper) and isinstance(object_.value, JSObject):
traverser.contexts[-1] = object_.value
traverser.contexts.append(JSContext('block'))
return
def _define_var(traverser, node):
'Creates a local context variable'
traverser._debug('VARIABLE_DECLARATION')
traverser.debug_level += 1
declarations = (node['declarations'] if 'declarations' in node
else node['head'])
kind = node.get('kind', 'let')
for declaration in declarations:
# It could be deconstruction of variables :(
if declaration['id']['type'] == 'ArrayPattern':
vars = []
for element in declaration['id']['elements']:
# NOTE : Multi-level array destructuring sucks. Maybe implement
# it someday if you're bored, but it's so rarely used and it's
# so utterly complex, there's probably no need to ever code it
# up.
if element is None or element['type'] != 'Identifier':
vars.append(None)
continue
vars.append(element['name'])
# The variables are not initialized
if declaration['init'] is None:
# Simple instantiation; no initialization
for var in vars:
if not var:
continue
traverser._declare_variable(var, None)
# The variables are declared inline
elif declaration['init']['type'] == 'ArrayPattern':
# TODO : Test to make sure len(values) == len(vars)
for value in declaration['init']['elements']:
if vars[0]:
traverser._declare_variable(
vars[0], JSWrapper(traverser._traverse_node(value),
traverser=traverser))
vars = vars[1:] # Pop off the first value
# It's being assigned by a JSArray (presumably)
elif declaration['init']['type'] == 'ArrayExpression':
assigner = traverser._traverse_node(declaration['init'])
for value in assigner.value.elements:
if vars[0]:
traverser._declare_variable(vars[0], value)
vars = vars[1:]
elif declaration['id']['type'] == 'ObjectPattern':
init = traverser._traverse_node(declaration['init'])
def _proc_objpattern(init_obj, properties):
for prop in properties:
# Get the name of the init obj's member
if prop['key']['type'] == 'Literal':
prop_name = prop['key']['value']
elif prop['key']['type'] == 'Identifier':
prop_name = prop['key']['name']
else:
continue
if prop['value']['type'] == 'Identifier':
traverser._declare_variable(
prop['value']['name'],
init_obj.get(traverser, prop_name))
elif prop['value']['type'] == 'ObjectPattern':
_proc_objpattern(init_obj.get(traverser, prop_name),
prop['value']['properties'])
if init is not None:
_proc_objpattern(init_obj=init,
properties=declaration['id']['properties'])
else:
var_name = declaration['id']['name']
traverser._debug('NAME>>%s' % var_name)
var_value = traverser._traverse_node(declaration['init'])
traverser._debug('VALUE>>%s' % (var_value.output()
if var_value is not None
else 'None'))
if not isinstance(var_value, JSWrapper):
var = JSWrapper(value=var_value,
const=kind == 'const',
traverser=traverser)
else:
var = var_value
var.const = kind == 'const'
traverser._declare_variable(var_name, var, type_=kind)
if 'body' in node:
traverser._traverse_node(node['body'])
traverser.debug_level -= 1
# The "Declarations" branch contains custom elements.
return True
def _define_obj(traverser, node):
'Creates a local context object'
var = JSObject()
for prop in node['properties']:
if prop['type'] == 'PrototypeMutation':
var_name = 'prototype'
else:
key = prop['key']
if key['type'] == 'Literal':
var_name = key['value']
elif isinstance(key['name'], basestring):
var_name = key['name']
else:
if 'property' in key['name']:
name = key['name']
else:
name = {'property': key['name']}
var_name = _get_member_exp_property(traverser, name)
var_value = traverser._traverse_node(prop['value'])
var.set(var_name, var_value, traverser)
# TODO: Observe "kind"
if not isinstance(var, JSWrapper):
return JSWrapper(var, lazy=True, traverser=traverser)
var.lazy = True
return var
def _define_array(traverser, node):
"""Instantiate an array object from the parse tree."""
arr = JSArray()
arr.elements = map(traverser._traverse_node, node['elements'])
return arr
def _define_template_strings(traverser, node):
"""Instantiate an array of raw and cooked template strings."""
cooked = JSArray()
cooked.elements = map(traverser._traverse_node, node['cooked'])
raw = JSArray()
raw.elements = map(traverser._traverse_node, node['raw'])
cooked.set('raw', raw, traverser)
return cooked
def _define_template(traverser, node):
"""Instantiate a template literal."""
elements = map(traverser._traverse_node, node['elements'])
return reduce(partial(_binary_op, '+', traverser=traverser), elements)
def _define_literal(traverser, node):
"""
Convert a literal node in the parse tree to its corresponding
interpreted value.
"""
value = node['value']
if isinstance(value, dict):
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
wrapper = JSWrapper(value if value is not None else JSLiteral(None),
traverser=traverser)
test_literal(traverser, wrapper)
return wrapper
def test_literal(traverser, wrapper):
"""
Test the value of a literal, in particular only a string literal at the
moment, against possibly dangerous patterns.
"""
value = wrapper.get_literal_value()
if isinstance(value, basestring):
# Local import to prevent import loop.
from validator.testcases.regex import (validate_compat_pref,
validate_string)
validate_string(value, traverser, wrapper=wrapper)
validate_compat_pref(value, traverser, wrapper=wrapper)
def _call_expression(traverser, node):
args = node['arguments']
for arg in args:
traverser._traverse_node(arg, source='arguments')
member = traverser._traverse_node(node['callee'])
if (traverser.filename.startswith('defaults/preferences/') and
('name' not in node['callee'] or
node['callee']['name'] not in (u'pref', u'user_pref'))):
traverser.err.warning(
err_id=('testcases_javascript_actions',
'_call_expression',
'complex_prefs_defaults_code'),
warning='Complex code should not appear in preference defaults '
'files',
description="Calls to functions other than 'pref' and 'user_pref' "
'should not appear in defaults/preferences/ files.',
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context)
if member.is_global and callable(member.value.get('dangerous', None)):
result = member.value['dangerous'](a=args, t=traverser._traverse_node,
e=traverser.err)
name = member.value.get('name', '')
if result and name:
kwargs = {
'err_id': ('testcases_javascript_actions', '_call_expression',
'called_dangerous_global'),
'warning': '`%s` called in potentially dangerous manner' %
member.value['name'],
'description':
'The global `%s` function was called using a set '
'of dangerous parameters. Calls of this nature '
'are deprecated.' % member.value['name']}
if isinstance(result, DESCRIPTION_TYPES):
kwargs['description'] = result
elif isinstance(result, dict):
kwargs.update(result)
traverser.warning(**kwargs)
elif (node['callee']['type'] == 'MemberExpression' and
node['callee']['property']['type'] == 'Identifier'):
# If we can identify the function being called on any member of any
# instance, we can use that to either generate an output value or test
# for additional conditions.
identifier_name = node['callee']['property']['name']
if identifier_name in instanceactions.INSTANCE_DEFINITIONS:
result = instanceactions.INSTANCE_DEFINITIONS[identifier_name](
args, traverser, node, wrapper=member)
return result
if member.is_global and 'return' in member.value:
if 'object' in node['callee']:
member.parent = trace_member(traverser, node['callee']['object'])
return member.value['return'](wrapper=member, arguments=args,
traverser=traverser)
return JSWrapper(JSObject(), dirty=True, traverser=traverser)
def _call_settimeout(a, t, e):
"""
Handler for setTimeout and setInterval. Should determine whether a[0]
is a lambda function or a string. Strings are banned, lambda functions are
ok. Since we can't do reliable type testing on other variables, we flag
those, too.
"""
if not a:
return
if a[0]['type'] in ('FunctionExpression', 'ArrowFunctionExpression'):
return
if t(a[0]).callable:
return
return {'err_id': ('javascript', 'dangerous_global', 'eval'),
'description':
'In order to prevent vulnerabilities, the `setTimeout` '
'and `setInterval` functions should be called only with '
'function expressions as their first argument.',
'signing_help': (
'Please do not ever call `setTimeout` or `setInterval` with '
'string arguments. If you are passing a function which is '
'not being correctly detected as such, please consider '
'passing a closure or arrow function, which in turn calls '
'the original function.'),
'signing_severity': 'high'}
def _call_require(a, t, e):
"""
Tests for unsafe uses of `require()` in SDK add-ons.
"""
args, traverse, err = a, t, e
if not err.metadata.get('is_jetpack') and len(args):
return
module = traverse(args[0]).get_literal_value()
if not isinstance(module, basestring):
return
if module.startswith('sdk/'):
module = module[len('sdk/'):]
LOW_LEVEL = {
# Added from bugs 689340, 731109
'chrome', 'window-utils', 'observer-service',
# Added from bug 845492
'window/utils', 'sdk/window/utils', 'sdk/deprecated/window-utils',
'tab/utils', 'sdk/tab/utils',
'system/events', 'sdk/system/events',
}
if module in LOW_LEVEL:
err.metadata['requires_chrome'] = True
return {'warning': 'Usage of low-level or non-SDK interface',
'description': 'Your add-on uses an interface which bypasses '
'the high-level protections of the add-on SDK. '
'This interface should be avoided, and its use '
'may significantly complicate your review '
'process.'}
if module == 'widget':
return {'warning': 'Use of deprecated SDK module',
'description':
"The 'widget' module has been deprecated due to a number "
'of performance and usability issues, and has been '
'removed from the SDK as of Firefox 40. Please use the '
"'sdk/ui/button/action' or 'sdk/ui/button/toggle' module "
'instead. See '
'https://developer.mozilla.org/Add-ons/SDK/High-Level_APIs'
'/ui for more information.'}
def _call_create_pref(a, t, e):
"""
Handler for pref() and user_pref() calls in defaults/preferences/*.js files
to ensure that they don't touch preferences outside of the "extensions."
branch.
"""
# We really need to clean up the arguments passed to these functions.
traverser = t.im_self
if not traverser.filename.startswith('defaults/preferences/') or not a:
return
instanceactions.set_preference(JSWrapper(JSLiteral(None),
traverser=traverser),
a, traverser)
value = _get_as_str(t(a[0]))
return test_preference(value)
def test_preference(value):
for branch in 'extensions.', 'services.sync.prefs.sync.extensions.':
if value.startswith(branch) and value.rindex('.') > len(branch):
return
return ('Extensions should not alter preferences outside of the '
"'extensions.' preference branch. Please make sure that "
"all of your extension's preferences are prefixed with "
"'extensions.add-on-name.', where 'add-on-name' is a "
'distinct string unique to and indicative of your add-on.')
def _readonly_top(traverser, right, node_right):
"""Handle the readonly callback for window.top."""
traverser.notice(
err_id=('testcases_javascript_actions',
'_readonly_top'),
notice='window.top is a reserved variable',
description='The `top` global variable is reserved and cannot be '
'assigned any values starting with Gecko 6. Review your '
'code for any uses of the `top` global, and refer to '
'%s for more information.' % BUGZILLA_BUG % 654137,
for_appversions={FIREFOX_GUID: version_range('firefox',
'6.0a1', '7.0a1'),
FENNEC_GUID: version_range('fennec',
'6.0a1', '7.0a1')},
compatibility_type='warning',
tier=5)
def _expression(traverser, node):
"""
This is a helper method that allows node definitions to point at
`_traverse_node` without needing a reference to a traverser.
"""
return traverser._traverse_node(node['expression'])
def _get_this(traverser, node):
'Returns the `this` object'
if not traverser.this_stack:
from predefinedentities import GLOBAL_ENTITIES
return traverser._build_global('window', GLOBAL_ENTITIES[u'window'])
return traverser.this_stack[-1]
def _new(traverser, node):
'Returns a new copy of a node.'
# We don't actually process the arguments as part of the flow because of
# the Angry T-Rex effect. For now, we just traverse them to ensure they
# don't contain anything dangerous.
args = node['arguments']
if isinstance(args, list):
for arg in args:
traverser._traverse_node(arg, source='arguments')
else:
traverser._traverse_node(args)
elem = traverser._traverse_node(node['callee'])
if not isinstance(elem, JSWrapper):
elem = JSWrapper(elem, traverser=traverser)
if elem.is_global:
traverser._debug('Making overwritable')
elem.value = deepcopy(elem.value)
elem.value['overwritable'] = True
return elem
def _ident(traverser, node):
'Initiates an object lookup on the traverser based on an identifier token'
name = node['name']
# Ban bits like "newThread"
test_identifier(traverser, name)
if traverser._is_defined(name):
return traverser._seek_variable(name)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def _expr_assignment(traverser, node):
"""Evaluate an AssignmentExpression node."""
traverser._debug('ASSIGNMENT_EXPRESSION')
traverser.debug_level += 1
traverser._debug('ASSIGNMENT>>PARSING RIGHT')
right = traverser._traverse_node(node['right'])
right = JSWrapper(right, traverser=traverser)
# Treat direct assignment different than augmented assignment.
if node['operator'] == '=':
from predefinedentities import GLOBAL_ENTITIES, is_shared_scope
global_overwrite = False
readonly_value = is_shared_scope(traverser)
node_left = node['left']
traverser._debug('ASSIGNMENT:DIRECT(%s)' % node_left['type'])
if node_left['type'] == 'Identifier':
# Identifiers just need the ID name and a value to push.
# Raise a global overwrite issue if the identifier is global.
global_overwrite = traverser._is_global(node_left['name'])
# Get the readonly attribute and store its value if is_global
if global_overwrite:
global_dict = GLOBAL_ENTITIES[node_left['name']]
if 'readonly' in global_dict:
readonly_value = global_dict['readonly']
traverser._declare_variable(node_left['name'], right, type_='glob')
elif node_left['type'] == 'MemberExpression':
member_object = trace_member(traverser, node_left['object'],
instantiate=True)
global_overwrite = (member_object.is_global and
not ('overwritable' in member_object.value and
member_object.value['overwritable']))
member_property = _get_member_exp_property(traverser, node_left)
traverser._debug('ASSIGNMENT:MEMBER_PROPERTY(%s)'
% member_property)
traverser._debug('ASSIGNMENT:GLOB_OV::%s' % global_overwrite)
# Don't do the assignment if we're facing a global.
if not member_object.is_global:
if member_object.value is None:
member_object.value = JSObject()
if not member_object.is_global:
member_object.value.set(member_property, right, traverser)
else:
# It's probably better to do nothing.
pass
elif 'value' in member_object.value:
member_object_value = _expand_globals(traverser,
member_object).value
if member_property in member_object_value['value']:
# If it's a global and the actual member exists, test
# whether it can be safely overwritten.
member = member_object_value['value'][member_property]
if 'readonly' in member:
global_overwrite = True
readonly_value = member['readonly']
traverser._debug('ASSIGNMENT:DIRECT:GLOB_OVERWRITE %s' %
global_overwrite)
traverser._debug('ASSIGNMENT:DIRECT:READONLY %r' %
readonly_value)
if callable(readonly_value):
readonly_value = readonly_value(traverser, right, node['right'])
if readonly_value and global_overwrite:
kwargs = dict(
err_id=('testcases_javascript_actions',
'_expr_assignment',
'global_overwrite'),
warning='Global variable overwrite',
description='An attempt was made to overwrite a global '
'variable in some JavaScript code.')
if isinstance(readonly_value, DESCRIPTION_TYPES):
kwargs['description'] = readonly_value
elif isinstance(readonly_value, dict):
kwargs.update(readonly_value)
traverser.warning(**kwargs)
return right
lit_right = right.get_literal_value()
traverser._debug('ASSIGNMENT>>PARSING LEFT')
left = traverser._traverse_node(node['left'])
traverser._debug('ASSIGNMENT>>DONE PARSING LEFT')
traverser.debug_level -= 1
if isinstance(left, JSWrapper):
if left.dirty:
return left
lit_left = left.get_literal_value()
token = node['operator']
# Don't perform an operation on None. Python freaks out
if lit_left is None:
lit_left = 0
if lit_right is None:
lit_right = 0
# Give them default values so we have them in scope.
gleft, gright = 0, 0
# All of the assignment operators
operators = {'=': lambda: right,
'+=': lambda: lit_left + lit_right,
'-=': lambda: gleft - gright,
'*=': lambda: gleft * gright,
'/=': lambda: 0 if gright == 0 else (gleft / gright),
'%=': lambda: 0 if gright == 0 else (gleft % gright),
'<<=': lambda: int(gleft) << int(gright),
'>>=': lambda: int(gleft) >> int(gright),
'>>>=': lambda: float(abs(int(gleft)) >> gright),
'|=': lambda: int(gleft) | int(gright),
'^=': lambda: int(gleft) ^ int(gright),
'&=': lambda: int(gleft) & int(gright)}
# If we're modifying a non-numeric type with a numeric operator, return
# NaN.
if (not isinstance(lit_left, NUMERIC_TYPES) and
token in NUMERIC_OPERATORS):
left.set_value(get_NaN(traverser), traverser=traverser)
return left
# If either side of the assignment operator is a string, both sides
# need to be casted to strings first.
if (isinstance(lit_left, types.StringTypes) or
isinstance(lit_right, types.StringTypes)):
lit_left = _get_as_str(lit_left)
lit_right = _get_as_str(lit_right)
gleft, gright = _get_as_num(left), _get_as_num(right)
traverser._debug('ASSIGNMENT>>OPERATION:%s' % token)
if token not in operators:
# We don't support that operator. (yet?)
traverser._debug('ASSIGNMENT>>OPERATOR NOT FOUND', 1)
return left
elif token in ('<<=', '>>=', '>>>=') and gright < 0:
# The user is doing weird bitshifting that will return 0 in JS but
# not in Python.
left.set_value(0, traverser=traverser)
return left
elif (token in ('<<=', '>>=', '>>>=', '|=', '^=', '&=') and
(abs(gleft) == float('inf') or abs(gright) == float('inf'))):
# Don't bother handling infinity for integer-converted operations.
left.set_value(get_NaN(traverser), traverser=traverser)
return left
traverser._debug('ASSIGNMENT::L-value global? (%s)' %
('Y' if left.is_global else 'N'), 1)
try:
new_value = operators[token]()
except Exception:
traverser.system_error(exc_info=sys.exc_info())
new_value = None
# Cap the length of analyzed strings.
if (isinstance(new_value, types.StringTypes) and
len(new_value) > MAX_STR_SIZE):
new_value = new_value[:MAX_STR_SIZE]
traverser._debug('ASSIGNMENT::New value >> %s' % new_value, 1)
left.set_value(new_value, traverser=traverser)
return left
# Though it would otherwise be a syntax error, we say that 4=5 should
# evaluate out to 5.
return right
def _expr_binary(traverser, node):
'Evaluates a BinaryExpression node.'
traverser.debug_level += 1
# Select the proper operator.
operator = node['operator']
traverser._debug('BIN_OPERATOR>>%s' % operator)
# Traverse the left half of the binary expression.
with traverser._debug('BIN_EXP>>l-value'):
if (node['left']['type'] == 'BinaryExpression' and
'__traversal' not in node['left']):
# Process the left branch of the binary expression directly. This
# keeps the recursion cap in line and speeds up processing of
# large chains of binary expressions.
left = _expr_binary(traverser, node['left'])
node['left']['__traversal'] = left
else:
left = traverser._traverse_node(node['left'])
# Traverse the right half of the binary expression.
with traverser._debug('BIN_EXP>>r-value'):
if (operator == 'instanceof' and
node['right']['type'] == 'Identifier' and
node['right']['name'] == 'Function'):
# We make an exception for instanceof's r-value if it's a
# dangerous global, specifically Function.
return JSWrapper(True, traverser=traverser)
else:
right = traverser._traverse_node(node['right'])
traverser._debug('Is dirty? %r' % right.dirty, 1)
return _binary_op(operator, left, right, traverser)
def _binary_op(operator, left, right, traverser):
"""Perform a binary operation on two pre-traversed nodes."""
# Dirty l or r values mean we can skip the expression. A dirty value
# indicates that a lazy operation took place that introduced some
# nondeterminacy.
# FIXME(Kris): We should process these as if they're strings anyway.
if left.dirty:
return left
elif right.dirty:
return right
# Binary expressions are only executed on literals.
left = left.get_literal_value()
right_wrap = right
right = right.get_literal_value()
# Coerce the literals to numbers for numeric operations.
gleft = _get_as_num(left)
gright = _get_as_num(right)
operators = {
'==': lambda: left == right or gleft == gright,
'!=': lambda: left != right,
'===': lambda: left == right, # Be flexible.
'!==': lambda: type(left) != type(right) or left != right,
'>': lambda: left > right,
'<': lambda: left < right,
'<=': lambda: left <= right,
'>=': lambda: left >= right,
'<<': lambda: int(gleft) << int(gright),
'>>': lambda: int(gleft) >> int(gright),
'>>>': lambda: float(abs(int(gleft)) >> int(gright)),
'+': lambda: left + right,
'-': lambda: gleft - gright,
'*': lambda: gleft * gright,
'/': lambda: 0 if gright == 0 else (gleft / gright),
'%': lambda: 0 if gright == 0 else (gleft % gright),
'in': lambda: right_wrap.contains(left),
# TODO : implement instanceof
# FIXME(Kris): Treat instanceof the same as `QueryInterface`
}
output = None
if (operator in ('>>', '<<', '>>>') and
(left is None or right is None or gright < 0)):
output = False
elif operator in operators:
# Concatenation can be silly, so always turn undefineds into empty
# strings and if there are strings, make everything strings.
if operator == '+':
if left is None:
left = ''
if right is None:
right = ''
if isinstance(left, basestring) or isinstance(right, basestring):
left = _get_as_str(left)
right = _get_as_str(right)
# Don't even bother handling infinity if it's a numeric computation.
if (operator in ('<<', '>>', '>>>') and
(abs(gleft) == float('inf') or abs(gright) == float('inf'))):
return get_NaN(traverser)
try:
output = operators[operator]()
except Exception:
traverser.system_error(exc_info=sys.exc_info())
output = None
# Cap the length of analyzed strings.
if (isinstance(output, types.StringTypes) and
len(output) > MAX_STR_SIZE):
output = output[:MAX_STR_SIZE]
wrapper = JSWrapper(output, traverser=traverser)
# Test the newly-created literal for dangerous values.
# This may cause duplicate warnings for strings which
# already match a dangerous value prior to concatenation.
test_literal(traverser, wrapper)
return wrapper
return JSWrapper(output, traverser=traverser)
def _expr_unary(traverser, node):
"""Evaluate a UnaryExpression node."""
expr = traverser._traverse_node(node['argument'])
expr_lit = expr.get_literal_value()
expr_num = _get_as_num(expr_lit)
operators = {'-': lambda: -1 * expr_num,
'+': lambda: expr_num,
'!': lambda: not expr_lit,
'~': lambda: -1 * (expr_num + 1),
'void': lambda: None,
'typeof': lambda: _expr_unary_typeof(expr),
'delete': lambda: None} # We never want to empty the context
if node['operator'] in operators:
output = operators[node['operator']]()
else:
output = None
if not isinstance(output, JSWrapper):
output = JSWrapper(output, traverser=traverser)
return output
def _expr_unary_typeof(wrapper):
"""Evaluate the "typeof" value for a JSWrapper object."""
if (wrapper.callable or
(wrapper.is_global and 'return' in wrapper.value and
'value' not in wrapper.value)):
return 'function'
value = wrapper.value
if value is None:
return 'undefined'
elif isinstance(value, JSLiteral):
value = value.value
if isinstance(value, bool):
return 'boolean'
elif isinstance(value, (int, long, float)):
return 'number'
elif isinstance(value, types.StringTypes):
return 'string'
return 'object'
def _get_as_num(value):
"""Return the JS numeric equivalent for a value."""
if isinstance(value, JSWrapper):
value = value.get_literal_value()
if value is None:
return 0
try:
if isinstance(value, types.StringTypes):
if value.startswith('0x'):
return int(value, 16)
else:
return float(value)
elif isinstance(value, (int, float, long)):
return value
else:
return int(value)
except (ValueError, TypeError):
return 0
def _get_as_str(value):
"""Return the JS string equivalent for a literal value."""
if isinstance(value, JSWrapper):
value = value.get_literal_value()
if value is None:
return ''
if isinstance(value, bool):
return u'true' if value else u'false'
elif isinstance(value, (int, float, long)):
if value == float('inf'):
return u'Infinity'
elif value == float('-inf'):
return u'-Infinity'
# Try to see if we can shave off some trailing significant figures.
try:
if int(value) == value:
return unicode(int(value))
except ValueError:
pass
return unicode(value)
| bsd-3-clause | -7,636,647,010,772,091,000 | 36.490619 | 79 | 0.573702 | false |
Cosiroc/bleau-database | Triangulation/MathFunctions.py | 2 | 1561 | ####################################################################################################
#
# Bleau Database - A database of the bouldering area of Fontainebleau
# Copyright (C) Salvaire Fabrice 2016
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
# Fixme: sign_of ?
def sign(x):
return cmp(x, 0)
####################################################################################################
def trignometric_clamp(x):
""" Clamp *x* in the range [-1.,1]. """
if x > 1.:
return 1.
elif x < -1.:
return -1.
else:
return x
####################################################################################################
def is_in_trignometric_range(x):
return -1. <= x <= 1
| agpl-3.0 | -8,246,428,365,087,209,000 | 35.302326 | 100 | 0.465727 | false |
gplssm/europepstrans | europepstrans/results/__init__.py | 1 | 13654 | """
TimeFrameResults steals methods from oemof.outputlib adapted to the structure
applied here. Most relevant difference is results data stored in self.data
"""
from oemof.outputlib import DataFramePlot, ResultsDataFrame
import pickle
from matplotlib import pyplot as plt
import logging
import pandas as pd
class TimeFrameResults:
"""
Container for results of one time frame (i.e. one year)
Attributes
----------
data : DataFrame
Structure multi-indexed result data
"""
def __init__(self, **kwargs):
"""
Initializes data object based on oemof results class
"""
results_file = kwargs.get('results_file', None)
self.subset = kwargs.get('subset', None)
self.ax = kwargs.get('ax')
if results_file is None:
# self.data = DataFramePlot(energy_system=kwargs.get('energy_system'))
self.data = ResultsDataFrame(energy_system=kwargs.get('energy_system'))
else:
self.data = pickle.load(open(results_file, 'rb'))
self.reformat_data()
def preview(self):
"""
Print short preview of data
"""
return self.data.head()
def reformat_data(self):
"""
Extract region information from bus label put into separate index label
"""
# TODO: get regions list from elsewhere
regions = ['deu', 'xfra', 'xbnl']
regions_leading_underscore = ['_' + x for x in regions]
# put bus_label to column (required to work on)
self.data.reset_index(level='bus_label', inplace=True)
self.data.reset_index(level='obj_label', inplace=True)
# extra region from bus label and write to new column
self.data['region'] = self.data['bus_label'].str.extract(
r"(?=(" + '|'.join(regions) + r"))", expand=True)
self.data['region'].fillna('global', inplace=True)
# remove region from bus_label and obj_label
self.data['bus_label'] = self.data['bus_label'].str.replace(
r"(" + '|'.join(regions_leading_underscore) + r")", '')
self.data['obj_label'] = self.data['obj_label'].str.replace(
r"(" + '|'.join(regions_leading_underscore) + r")", '')
# put bus_label back to index
self.data = self.data.set_index(['bus_label', 'region', 'obj_label'],
append=True)
# reorder and resort levels
level_order = ['bus_label', 'type', 'obj_label', 'region', 'datetime']
self.data = self.data.reorder_levels(level_order)
def slice_by(self, **kwargs):
r""" Method for slicing the ResultsDataFrame. A subset is returned.
Parameters
----------
bus_label : string
type : string (to_bus/from_bus/other)
obj_label: string
date_from : string
Start date selection e.g. "2016-01-01 00:00:00". If not set, the
whole time range will be plotted.
date_to : string
End date selection e.g. "2016-03-01 00:00:00". If not set, the
whole time range will be plotted.
"""
kwargs.setdefault('bus_label', slice(None))
kwargs.setdefault('type', slice(None))
kwargs.setdefault('obj_label', slice(None))
kwargs.setdefault(
'date_from', self.data.index.get_level_values('datetime')[0])
kwargs.setdefault(
'date_to', self.data.index.get_level_values('datetime')[-1])
# slicing
idx = pd.IndexSlice
subset = self.data.loc[idx[
kwargs['bus_label'],
kwargs['type'],
kwargs['obj_label'],
slice(pd.Timestamp(kwargs['date_from']),
pd.Timestamp(kwargs['date_to']))], :]
return subset
def slice_unstacked(self, unstacklevel='obj_label',
formatted=False, **kwargs):
r"""Method for slicing the ResultsDataFrame. An unstacked
subset is returned.
Parameters
----------
unstacklevel : string (default: 'obj_label')
Level to unstack the subset of the DataFrame.
formatted : boolean
missing...
"""
subset = self.slice_by(**kwargs)
subset = subset.unstack(level=unstacklevel)
if formatted is True:
subset.reset_index(level=['bus_label', 'type'], drop=True,
inplace=True)
# user standard insteadt of multi-indexed columns
subset.columns = subset.columns.get_level_values(1).unique()
# return subset
self.subset = subset
def plot(self, **kwargs):
r""" Passing the data attribute to the pandas plotting method. All
parameters will be directly passed to pandas.DataFrame.plot(). See
http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.plot.html
for more information.
Returns
-------
self
"""
self.ax = self.subset.plot(**kwargs)
return self
def io_plot(self, bus_label, cdict, line_kwa=None, lineorder=None,
bar_kwa=None, barorder=None, **kwargs):
r""" Plotting a combined bar and line plot to see the fitting of in-
and outcomming flows of a bus balance.
Parameters
----------
bus_label : string
Uid of the bus to plot the balance.
cdict : dictionary
A dictionary that has all possible components as keys and its
colors as items.
line_kwa : dictionary
Keyword arguments to be passed to the pandas line plot.
bar_kwa : dictionary
Keyword arguments to be passed to the pandas bar plot.
lineorder : list
Order of columns to plot the line plot
barorder : list
Order of columns to plot the bar plot
Note
----
Further keyword arguments will be passed to the
:class:`slice_unstacked method <DataFramePlot.slice_unstacked>`.
Returns
-------
handles, labels
Manipulated labels to correct the unsual construction of the
stack line plot. You can use them for further maipulations.
"""
self.ax = kwargs.get('ax', self.ax)
if bar_kwa is None:
bar_kwa = dict()
if line_kwa is None:
line_kwa = dict()
if self.ax is None:
fig = plt.figure()
self.ax = fig.add_subplot(1, 1, 1)
# Create a bar plot for all input flows
self.slice_unstacked(bus_label=bus_label, type='to_bus', **kwargs)
if barorder is not None:
self.rearrange_subset(barorder)
self.subset.plot(kind='bar', linewidth=0, stacked=True, width=1,
ax=self.ax, color=self.color_from_dict(cdict),
**bar_kwa)
# Create a line plot for all output flows
self.slice_unstacked(bus_label=bus_label, type='from_bus', **kwargs)
if lineorder is not None:
self.rearrange_subset(lineorder)
# The following changes are made to have the bottom line on top layer
# of all lines. Normally the bottom line is the first line that is
# plotted and will be on the lowest layer. This is difficult to read.
new_df = pd.DataFrame(index=self.subset.index)
n = 0
tmp = 0
for col in self.subset.columns:
if n < 1:
new_df[col] = self.subset[col]
else:
new_df[col] = self.subset[col] + tmp
tmp = new_df[col]
n += 1
if lineorder is None:
new_df.sort_index(axis=1, ascending=False, inplace=True)
else:
lineorder.reverse()
new_df = new_df[lineorder]
colorlist = self.color_from_dict(cdict)
if isinstance(colorlist, list):
colorlist.reverse()
separator = len(colorlist)
new_df.plot(kind='line', ax=self.ax, color=colorlist,
drawstyle='steps-mid', **line_kwa)
# Adapt the legend to the new oder
handles, labels = self.ax.get_legend_handles_labels()
tmp_lab = [x for x in reversed(labels[0:separator])]
tmp_hand = [x for x in reversed(handles[0:separator])]
handles = tmp_hand + handles[separator:]
labels = tmp_lab + labels[separator:]
labels.reverse()
handles.reverse()
self.ax.legend(handles, labels)
return handles, labels
def rearrange_subset(self, order):
r"""
Change the order of the subset DataFrame
Parameters
----------
order : list
New order of columns
Returns
-------
self
"""
cols = list(self.subset.columns.values)
neworder = [x for x in list(order) if x in set(cols)]
missing = [x for x in list(cols) if x not in set(order)]
if len(missing) > 0:
logging.warning(
"Columns that are not part of the order list are removed: " +
str(missing))
self.subset = self.subset[neworder]
def color_from_dict(self, colordict):
r""" Method to convert a dictionary containing the components and its
colors to a color list that can be directly useed with the color
parameter of the pandas plotting method.
Parameters
----------
colordict : dictionary
A dictionary that has all possible components as keys and its
colors as items.
Returns
-------
list
Containing the colors of all components of the subset attribute
"""
tmplist = list(
map(colordict.get, list(self.subset.columns)))
tmplist = ['#00FFFF' if v is None else v for v in tmplist]
if len(tmplist) == 1:
colorlist = tmplist[0]
else:
colorlist = tmplist
return colorlist
def set_datetime_ticks(self, tick_distance=None, number_autoticks=3,
date_format='%d-%m-%Y %H:%M'):
r""" Set configurable ticks for the time axis. One can choose the
number of ticks or the distance between ticks and the format.
Parameters
----------
tick_distance : real
The disctance between to ticks in hours. If not set autoticks are
set (see number_autoticks).
number_autoticks : int (default: 3)
The number of ticks on the time axis, independent of the time
range. The higher the number of ticks is, the shorter should be the
date_format string.
date_format : string (default: '%d-%m-%Y %H:%M')
The string to define the format of the date and time. See
https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior
for more information.
"""
dates = self.subset.index.get_level_values('datetime').unique()
if tick_distance is None:
tick_distance = int(len(dates) / number_autoticks) - 1
self.ax.set_xticks(range(0, len(dates), tick_distance),
minor=False)
self.ax.set_xticklabels(
[item.strftime(date_format)
for item in dates.tolist()[0::tick_distance]],
rotation=0, minor=False)
def outside_legend(self, reverse=False, plotshare=0.9, **kwargs):
r""" Move the legend outside the plot. Bases on the ideas of Joe
Kington. See
http://stackoverflow.com/questions/4700614/how-to-put-the-legend-out-of-the-plot
for more information.
Parameters
----------
reverse : boolean (default: False)
Print out the legend in reverse order. This is interesting for
stack-plots to have the legend in the same order as the stacks.
plotshare : real (default: 0.9)
Share of the plot area to create space for the legend (0 to 1).
loc : string (default: 'center left')
Location of the plot.
bbox_to_anchor : tuple (default: (1, 0.5))
Set the anchor for the legend.
ncol : integer (default: 1)
Number of columns of the legend.
handles : list of handles
A list of handels if they are already modified by another function
or method. Normally these handles will be automatically taken from
the artis object.
lables : list of labels
A list of labels if they are already modified by another function
or method. Normally these handles will be automatically taken from
the artis object.
Note
----
All keyword arguments (kwargs) will be directly passed to the
matplotlib legend class. See
http://matplotlib.org/api/legend_api.html#matplotlib.legend.Legend
for more parameters.
"""
kwargs.setdefault('loc', 'center left')
kwargs.setdefault('bbox_to_anchor', (1, 0.5))
kwargs.setdefault('ncol', 1)
handles = kwargs.pop('handles', self.ax.get_legend_handles_labels()[0])
labels = kwargs.pop('labels', self.ax.get_legend_handles_labels()[1])
if reverse:
handles.reverse()
labels.reverse()
box = self.ax.get_position()
self.ax.set_position([box.x0, box.y0, box.width * plotshare,
box.height])
self.ax.legend(handles, labels, **kwargs)
if __name__ == '__main__':
pass | gpl-3.0 | -7,392,969,019,323,245,000 | 35.413333 | 90 | 0.576388 | false |
bollu/polymage | sandbox/apps/python/img_proc/interpolate/init.py | 2 | 2104 | import sys
import os.path
from PIL import Image
import numpy as np
from arg_parser import parse_args
from printer import print_header, print_usage, print_line
def init_images(app_data):
print("[init.py] : initializing images...")
app_args = app_data['app_args']
# input image:
img_path = app_args.img_file
image = np.array(Image.open(img_path))
img_path2 = app_args.alpha_file
alpha = np.array(Image.open(img_path2))
if image.shape[0] != alpha.shape[0] or image.shape[1] != alpha.shape[1]:
print("Please use alpha image with the same shape as the image")
sys.exit(0)
R = image.shape[0]
C = image.shape[1]
image_flip = np.rollaxis(image, 2)
# add alpha channel to image along with other colour channels
imgalpha = np.append(image_flip, alpha)
imgalpha = imgalpha.reshape(4, R, C)
imgalpha_region = imgalpha[0:4, 0:R, 0:C]
# add ghost region
imgalpha_ghost = np.empty((4, R+2, C+2), np.float32)
imgalpha_ghost[0:4, 1:R+1, 1:C+1] = imgalpha_region
# convert input image to floating point
imgalpha_f = np.float32(imgalpha_ghost) / 255.0
# result array
res = np.empty((3, R, C), np.float32)
img_data = {}
img_data['IN'] = imgalpha_f
img_data['OUT'] = res
app_data['img_data'] = img_data
app_data['R'] = R
app_data['C'] = C
return
def get_input(app_data):
# parse the command-line arguments
app_args = parse_args()
app_data['app_args'] = app_args
app_data['mode'] = app_args.mode
app_data['runs'] = int(app_args.runs)
app_data['graph_gen'] = bool(app_args.graph_gen)
app_data['timer'] = app_args.timer
# storage optimization
app_data['optimize_storage'] = bool(app_args.optimize_storage)
# early freeing of allocated arrays
app_data['early_free'] = bool(app_args.early_free)
# pool allocate option
app_data['pool_alloc'] = bool(app_args.pool_alloc)
return
def init_all(app_data):
pipe_data = {}
app_data['pipe_data'] = pipe_data
get_input(app_data)
init_images(app_data)
return
| apache-2.0 | -8,058,180,493,064,463,000 | 24.658537 | 76 | 0.63308 | false |
MadeiraCloud/salt | sources/salt/states/locale.py | 1 | 1224 | # -*- coding: utf-8 -*-
'''
Management of languages/locales
==============================+
The locale can be managed for the system:
.. code-block:: yaml
en_US.UTF-8:
locale.system
'''
def __virtual__():
'''
Only load if the locale module is available in __salt__
'''
return 'locale' if 'locale.get_locale' in __salt__ else False
def system(name):
'''
Set the locale for the system
name
The name of the locale to use
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': '',
'state_stdout': ''}
if __salt__['locale.get_locale']() == name:
ret['result'] = True
ret['comment'] = 'System locale {0} already set'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'System locale {0} needs to be set'.format(name)
return ret
if __salt__['locale.set_locale'](name, state_ret=ret):
ret['changes'] = {'locale': name}
ret['result'] = True
ret['comment'] = 'Set system locale {0}'.format(name)
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set system locale'
return ret
| apache-2.0 | 5,171,256,469,383,106,000 | 23.979592 | 73 | 0.521242 | false |
balajiiyer-ufl-projects/OpenStackVaccine | test/registrar_test.py | 1 | 1074 | from controller.registrar import Registrar
import json
class TestRegistrar:
def __init__(self, conf, auth_info):
self.registrar = Registrar(conf, auth_info)
def test_client(self):
assert self.registrar.client != None
assert self.registrar.client != None
print self.registrar.client.handle.flavors.list()
def test_owner(self, conf):
owner = self.registrar.owner
assert owner["name"] == conf["owner"]
print "name:\t%s" % owner["name"]
if "email" in conf:
assert owner["email"] == conf["email"]
print "email:\t%s" % owner["email"]
if "mobile" in conf:
assert owner["mobile"] == conf["mobile"]
print "mobile:\t%s" % owner["mobile"]
if __name__ == "__main__":
conf_file = open("../config/clients.json")
conf_json = json.load(conf_file)
client = conf_json[0]
auth_info = json.load(open("../config/auth.json"))[client["name"]]
test = TestRegistrar(client, auth_info)
test.test_client()
test.test_owner(client)
| gpl-2.0 | 142,240,600,238,883,500 | 27.263158 | 70 | 0.59311 | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/trace.py | 1 | 30152 | #! /usr/bin/python2.7
# portions copyright 2001, Autonomous Zones Industries, Inc., all rights...
# err... reserved and offered to the public under the terms of the
# Python 2.2 license.
# Author: Zooko O'Whielacronx
# http://zooko.com/
# mailto:[email protected]
#
# Copyright 2000, Mojam Media, Inc., all rights reserved.
# Author: Skip Montanaro
#
# Copyright 1999, Bioreason, Inc., all rights reserved.
# Author: Andrew Dalke
#
# Copyright 1995-1997, Automatrix, Inc., all rights reserved.
# Author: Skip Montanaro
#
# Copyright 1991-1995, Stichting Mathematisch Centrum, all rights reserved.
#
#
# Permission to use, copy, modify, and distribute this Python software and
# its associated documentation for any purpose without fee is hereby
# granted, provided that the above copyright notice appears in all copies,
# and that both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of neither Automatrix,
# Bioreason or Mojam Media be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior permission.
#
"""program/module to trace Python program or function execution
Sample use, command line:
trace.py -c -f counts --ignore-dir '$prefix' spam.py eggs
trace.py -t --ignore-dir '$prefix' spam.py eggs
trace.py --trackcalls spam.py eggs
Sample use, programmatically
import sys
# create a Trace object, telling it what to ignore, and whether to
# do tracing or line-counting or both.
tracer = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix,], trace=0,
count=1)
# run the new command using the given tracer
tracer.run('main()')
# make a report, placing output in /tmp
r = tracer.results()
r.write_results(show_missing=True, coverdir="/tmp")
"""
import linecache
import os
import re
import sys
import time
import token
import tokenize
import inspect
import gc
import dis
try:
import cPickle
pickle = cPickle
except ImportError:
import pickle
try:
import threading
except ImportError:
_settrace = sys.settrace
def _unsettrace():
sys.settrace(None)
else:
def _settrace(func):
threading.settrace(func)
sys.settrace(func)
def _unsettrace():
sys.settrace(None)
threading.settrace(None)
def usage(outfile):
outfile.write("""Usage: %s [OPTIONS] <file> [ARGS]
Meta-options:
--help Display this help then exit.
--version Output version information then exit.
Otherwise, exactly one of the following three options must be given:
-t, --trace Print each line to sys.stdout before it is executed.
-c, --count Count the number of times each line is executed
and write the counts to <module>.cover for each
module executed, in the module's directory.
See also `--coverdir', `--file', `--no-report' below.
-l, --listfuncs Keep track of which functions are executed at least
once and write the results to sys.stdout after the
program exits.
-T, --trackcalls Keep track of caller/called pairs and write the
results to sys.stdout after the program exits.
-r, --report Generate a report from a counts file; do not execute
any code. `--file' must specify the results file to
read, which must have been created in a previous run
with `--count --file=FILE'.
Modifiers:
-f, --file=<file> File to accumulate counts over several runs.
-R, --no-report Do not generate the coverage report files.
Useful if you want to accumulate over several runs.
-C, --coverdir=<dir> Directory where the report files. The coverage
report for <package>.<module> is written to file
<dir>/<package>/<module>.cover.
-m, --missing Annotate executable lines that were not executed
with '>>>>>> '.
-s, --summary Write a brief summary on stdout for each file.
(Can only be used with --count or --report.)
-g, --timing Prefix each line with the time since the program started.
Only used while tracing.
Filters, may be repeated multiple times:
--ignore-module=<mod> Ignore the given module(s) and its submodules
(if it is a package). Accepts comma separated
list of module names
--ignore-dir=<dir> Ignore files in the given directory (multiple
directories can be joined by os.pathsep).
""" % sys.argv[0])
PRAGMA_NOCOVER = "#pragma NO COVER"
# Simple rx to find lines with no code.
rx_blank = re.compile(r'^\s*(#.*)?$')
class Ignore:
def __init__(self, modules = None, dirs = None):
self._mods = modules or []
self._dirs = dirs or []
self._dirs = map(os.path.normpath, self._dirs)
self._ignore = { '<string>': 1 }
def names(self, filename, modulename):
if modulename in self._ignore:
return self._ignore[modulename]
# haven't seen this one before, so see if the module name is
# on the ignore list. Need to take some care since ignoring
# "cmp" musn't mean ignoring "cmpcache" but ignoring
# "Spam" must also mean ignoring "Spam.Eggs".
for mod in self._mods:
if mod == modulename: # Identical names, so ignore
self._ignore[modulename] = 1
return 1
# check if the module is a proper submodule of something on
# the ignore list
n = len(mod)
# (will not overflow since if the first n characters are the
# same and the name has not already occurred, then the size
# of "name" is greater than that of "mod")
if mod == modulename[:n] and modulename[n] == '.':
self._ignore[modulename] = 1
return 1
# Now check that __file__ isn't in one of the directories
if filename is None:
# must be a built-in, so we must ignore
self._ignore[modulename] = 1
return 1
# Ignore a file when it contains one of the ignorable paths
for d in self._dirs:
# The '+ os.sep' is to ensure that d is a parent directory,
# as compared to cases like:
# d = "/usr/local"
# filename = "/usr/local.py"
# or
# d = "/usr/local.py"
# filename = "/usr/local.py"
if filename.startswith(d + os.sep):
self._ignore[modulename] = 1
return 1
# Tried the different ways, so we don't ignore this module
self._ignore[modulename] = 0
return 0
def modname(path):
"""Return a plausible module name for the patch."""
base = os.path.basename(path)
filename, ext = os.path.splitext(base)
return filename
def fullmodname(path):
"""Return a plausible module name for the path."""
# If the file 'path' is part of a package, then the filename isn't
# enough to uniquely identify it. Try to do the right thing by
# looking in sys.path for the longest matching prefix. We'll
# assume that the rest is the package name.
comparepath = os.path.normcase(path)
longest = ""
for dir in sys.path:
dir = os.path.normcase(dir)
if comparepath.startswith(dir) and comparepath[len(dir)] == os.sep:
if len(dir) > len(longest):
longest = dir
if longest:
base = path[len(longest) + 1:]
else:
base = path
# the drive letter is never part of the module name
drive, base = os.path.splitdrive(base)
base = base.replace(os.sep, ".")
if os.altsep:
base = base.replace(os.altsep, ".")
filename, ext = os.path.splitext(base)
return filename.lstrip(".")
class CoverageResults:
def __init__(self, counts=None, calledfuncs=None, infile=None,
callers=None, outfile=None):
self.counts = counts
if self.counts is None:
self.counts = {}
self.counter = self.counts.copy() # map (filename, lineno) to count
self.calledfuncs = calledfuncs
if self.calledfuncs is None:
self.calledfuncs = {}
self.calledfuncs = self.calledfuncs.copy()
self.callers = callers
if self.callers is None:
self.callers = {}
self.callers = self.callers.copy()
self.infile = infile
self.outfile = outfile
if self.infile:
# Try to merge existing counts file.
try:
counts, calledfuncs, callers = \
pickle.load(open(self.infile, 'rb'))
self.update(self.__class__(counts, calledfuncs, callers))
except (IOError, EOFError, ValueError), err:
print >> sys.stderr, ("Skipping counts file %r: %s"
% (self.infile, err))
def update(self, other):
"""Merge in the data from another CoverageResults"""
counts = self.counts
calledfuncs = self.calledfuncs
callers = self.callers
other_counts = other.counts
other_calledfuncs = other.calledfuncs
other_callers = other.callers
for key in other_counts.keys():
counts[key] = counts.get(key, 0) + other_counts[key]
for key in other_calledfuncs.keys():
calledfuncs[key] = 1
for key in other_callers.keys():
callers[key] = 1
def write_results(self, show_missing=True, summary=False, coverdir=None):
"""
@param coverdir
"""
if self.calledfuncs:
print
print "functions called:"
calls = self.calledfuncs.keys()
calls.sort()
for filename, modulename, funcname in calls:
print ("filename: %s, modulename: %s, funcname: %s"
% (filename, modulename, funcname))
if self.callers:
print
print "calling relationships:"
calls = self.callers.keys()
calls.sort()
lastfile = lastcfile = ""
for ((pfile, pmod, pfunc), (cfile, cmod, cfunc)) in calls:
if pfile != lastfile:
print
print "***", pfile, "***"
lastfile = pfile
lastcfile = ""
if cfile != pfile and lastcfile != cfile:
print " -->", cfile
lastcfile = cfile
print " %s.%s -> %s.%s" % (pmod, pfunc, cmod, cfunc)
# turn the counts data ("(filename, lineno) = count") into something
# accessible on a per-file basis
per_file = {}
for filename, lineno in self.counts.keys():
lines_hit = per_file[filename] = per_file.get(filename, {})
lines_hit[lineno] = self.counts[(filename, lineno)]
# accumulate summary info, if needed
sums = {}
for filename, count in per_file.iteritems():
# skip some "files" we don't care about...
if filename == "<string>":
continue
if filename.startswith("<doctest "):
continue
if filename.endswith((".pyc", ".pyo")):
filename = filename[:-1]
if coverdir is None:
dir = os.path.dirname(os.path.abspath(filename))
modulename = modname(filename)
else:
dir = coverdir
if not os.path.exists(dir):
os.makedirs(dir)
modulename = fullmodname(filename)
# If desired, get a list of the line numbers which represent
# executable content (returned as a dict for better lookup speed)
if show_missing:
lnotab = find_executable_linenos(filename)
else:
lnotab = {}
source = linecache.getlines(filename)
coverpath = os.path.join(dir, modulename + ".cover")
n_hits, n_lines = self.write_results_file(coverpath, source,
lnotab, count)
if summary and n_lines:
percent = 100 * n_hits // n_lines
sums[modulename] = n_lines, percent, modulename, filename
if summary and sums:
mods = sums.keys()
mods.sort()
print "lines cov% module (path)"
for m in mods:
n_lines, percent, modulename, filename = sums[m]
print "%5d %3d%% %s (%s)" % sums[m]
if self.outfile:
# try and store counts and module info into self.outfile
try:
pickle.dump((self.counts, self.calledfuncs, self.callers),
open(self.outfile, 'wb'), 1)
except IOError, err:
print >> sys.stderr, "Can't save counts files because %s" % err
def write_results_file(self, path, lines, lnotab, lines_hit):
"""Return a coverage results file in path."""
try:
outfile = open(path, "w")
except IOError, err:
print >> sys.stderr, ("trace: Could not open %r for writing: %s"
"- skipping" % (path, err))
return 0, 0
n_lines = 0
n_hits = 0
for i, line in enumerate(lines):
lineno = i + 1
# do the blank/comment match to try to mark more lines
# (help the reader find stuff that hasn't been covered)
if lineno in lines_hit:
outfile.write("%5d: " % lines_hit[lineno])
n_hits += 1
n_lines += 1
elif rx_blank.match(line):
outfile.write(" ")
else:
# lines preceded by no marks weren't hit
# Highlight them if so indicated, unless the line contains
# #pragma: NO COVER
if lineno in lnotab and not PRAGMA_NOCOVER in lines[i]:
outfile.write(">>>>>> ")
n_lines += 1
else:
outfile.write(" ")
outfile.write(lines[i].expandtabs(8))
outfile.close()
return n_hits, n_lines
def find_lines_from_code(code, strs):
"""Return dict where keys are lines in the line number table."""
linenos = {}
for _, lineno in dis.findlinestarts(code):
if lineno not in strs:
linenos[lineno] = 1
return linenos
def find_lines(code, strs):
"""Return lineno dict for all code objects reachable from code."""
# get all of the lineno information from the code of this scope level
linenos = find_lines_from_code(code, strs)
# and check the constants for references to other code objects
for c in code.co_consts:
if inspect.iscode(c):
# find another code object, so recurse into it
linenos.update(find_lines(c, strs))
return linenos
def find_strings(filename):
"""Return a dict of possible docstring positions.
The dict maps line numbers to strings. There is an entry for
line that contains only a string or a part of a triple-quoted
string.
"""
d = {}
# If the first token is a string, then it's the module docstring.
# Add this special case so that the test in the loop passes.
prev_ttype = token.INDENT
f = open(filename)
for ttype, tstr, start, end, line in tokenize.generate_tokens(f.readline):
if ttype == token.STRING:
if prev_ttype == token.INDENT:
sline, scol = start
eline, ecol = end
for i in range(sline, eline + 1):
d[i] = 1
prev_ttype = ttype
f.close()
return d
def find_executable_linenos(filename):
"""Return dict where keys are line numbers in the line number table."""
try:
prog = open(filename, "rU").read()
except IOError, err:
print >> sys.stderr, ("Not printing coverage data for %r: %s"
% (filename, err))
return {}
code = compile(prog, filename, "exec")
strs = find_strings(filename)
return find_lines(code, strs)
class Trace:
def __init__(self, count=1, trace=1, countfuncs=0, countcallers=0,
ignoremods=(), ignoredirs=(), infile=None, outfile=None,
timing=False):
"""
@param count true iff it should count number of times each
line is executed
@param trace true iff it should print out each line that is
being counted
@param countfuncs true iff it should just output a list of
(filename, modulename, funcname,) for functions
that were called at least once; This overrides
`count' and `trace'
@param ignoremods a list of the names of modules to ignore
@param ignoredirs a list of the names of directories to ignore
all of the (recursive) contents of
@param infile file from which to read stored counts to be
added into the results
@param outfile file in which to write the results
@param timing true iff timing information be displayed
"""
self.infile = infile
self.outfile = outfile
self.ignore = Ignore(ignoremods, ignoredirs)
self.counts = {} # keys are (filename, linenumber)
self.blabbed = {} # for debugging
self.pathtobasename = {} # for memoizing os.path.basename
self.donothing = 0
self.trace = trace
self._calledfuncs = {}
self._callers = {}
self._caller_cache = {}
self.start_time = None
if timing:
self.start_time = time.time()
if countcallers:
self.globaltrace = self.globaltrace_trackcallers
elif countfuncs:
self.globaltrace = self.globaltrace_countfuncs
elif trace and count:
self.globaltrace = self.globaltrace_lt
self.localtrace = self.localtrace_trace_and_count
elif trace:
self.globaltrace = self.globaltrace_lt
self.localtrace = self.localtrace_trace
elif count:
self.globaltrace = self.globaltrace_lt
self.localtrace = self.localtrace_count
else:
# Ahem -- do nothing? Okay.
self.donothing = 1
def run(self, cmd):
import __main__
dict = __main__.__dict__
if not self.donothing:
threading.settrace(self.globaltrace)
sys.settrace(self.globaltrace)
try:
exec cmd in dict, dict
finally:
if not self.donothing:
sys.settrace(None)
threading.settrace(None)
def runctx(self, cmd, globals=None, locals=None):
if globals is None: globals = {}
if locals is None: locals = {}
if not self.donothing:
_settrace(self.globaltrace)
try:
exec cmd in globals, locals
finally:
if not self.donothing:
_unsettrace()
def runfunc(self, func, *args, **kw):
result = None
if not self.donothing:
sys.settrace(self.globaltrace)
try:
result = func(*args, **kw)
finally:
if not self.donothing:
sys.settrace(None)
return result
def file_module_function_of(self, frame):
code = frame.f_code
filename = code.co_filename
if filename:
modulename = modname(filename)
else:
modulename = None
funcname = code.co_name
clsname = None
if code in self._caller_cache:
if self._caller_cache[code] is not None:
clsname = self._caller_cache[code]
else:
self._caller_cache[code] = None
## use of gc.get_referrers() was suggested by Michael Hudson
# all functions which refer to this code object
funcs = [f for f in gc.get_referrers(code)
if inspect.isfunction(f)]
# require len(func) == 1 to avoid ambiguity caused by calls to
# new.function(): "In the face of ambiguity, refuse the
# temptation to guess."
if len(funcs) == 1:
dicts = [d for d in gc.get_referrers(funcs[0])
if isinstance(d, dict)]
if len(dicts) == 1:
classes = [c for c in gc.get_referrers(dicts[0])
if hasattr(c, "__bases__")]
if len(classes) == 1:
# ditto for new.classobj()
clsname = classes[0].__name__
# cache the result - assumption is that new.* is
# not called later to disturb this relationship
# _caller_cache could be flushed if functions in
# the new module get called.
self._caller_cache[code] = clsname
if clsname is not None:
funcname = "%s.%s" % (clsname, funcname)
return filename, modulename, funcname
def globaltrace_trackcallers(self, frame, why, arg):
"""Handler for call events.
Adds information about who called who to the self._callers dict.
"""
if why == 'call':
# XXX Should do a better job of identifying methods
this_func = self.file_module_function_of(frame)
parent_func = self.file_module_function_of(frame.f_back)
self._callers[(parent_func, this_func)] = 1
def globaltrace_countfuncs(self, frame, why, arg):
"""Handler for call events.
Adds (filename, modulename, funcname) to the self._calledfuncs dict.
"""
if why == 'call':
this_func = self.file_module_function_of(frame)
self._calledfuncs[this_func] = 1
def globaltrace_lt(self, frame, why, arg):
"""Handler for call events.
If the code block being entered is to be ignored, returns `None',
else returns self.localtrace.
"""
if why == 'call':
code = frame.f_code
filename = frame.f_globals.get('__file__', None)
if filename:
# XXX modname() doesn't work right for packages, so
# the ignore support won't work right for packages
modulename = modname(filename)
if modulename is not None:
ignore_it = self.ignore.names(filename, modulename)
if not ignore_it:
if self.trace:
print (" --- modulename: %s, funcname: %s"
% (modulename, code.co_name))
return self.localtrace
else:
return None
def localtrace_trace_and_count(self, frame, why, arg):
if why == "line":
# record the file name and line number of every trace
filename = frame.f_code.co_filename
lineno = frame.f_lineno
key = filename, lineno
self.counts[key] = self.counts.get(key, 0) + 1
if self.start_time:
print '%.2f' % (time.time() - self.start_time),
bname = os.path.basename(filename)
print "%s(%d): %s" % (bname, lineno,
linecache.getline(filename, lineno)),
return self.localtrace
def localtrace_trace(self, frame, why, arg):
if why == "line":
# record the file name and line number of every trace
filename = frame.f_code.co_filename
lineno = frame.f_lineno
if self.start_time:
print '%.2f' % (time.time() - self.start_time),
bname = os.path.basename(filename)
print "%s(%d): %s" % (bname, lineno,
linecache.getline(filename, lineno)),
return self.localtrace
def localtrace_count(self, frame, why, arg):
if why == "line":
filename = frame.f_code.co_filename
lineno = frame.f_lineno
key = filename, lineno
self.counts[key] = self.counts.get(key, 0) + 1
return self.localtrace
def results(self):
return CoverageResults(self.counts, infile=self.infile,
outfile=self.outfile,
calledfuncs=self._calledfuncs,
callers=self._callers)
def _err_exit(msg):
sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
sys.exit(1)
def main(argv=None):
import getopt
if argv is None:
argv = sys.argv
try:
opts, prog_argv = getopt.getopt(argv[1:], "tcrRf:d:msC:lTg",
["help", "version", "trace", "count",
"report", "no-report", "summary",
"file=", "missing",
"ignore-module=", "ignore-dir=",
"coverdir=", "listfuncs",
"trackcalls", "timing"])
except getopt.error, msg:
sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
sys.stderr.write("Try `%s --help' for more information\n"
% sys.argv[0])
sys.exit(1)
trace = 0
count = 0
report = 0
no_report = 0
counts_file = None
missing = 0
ignore_modules = []
ignore_dirs = []
coverdir = None
summary = 0
listfuncs = False
countcallers = False
timing = False
for opt, val in opts:
if opt == "--help":
usage(sys.stdout)
sys.exit(0)
if opt == "--version":
sys.stdout.write("trace 2.0\n")
sys.exit(0)
if opt == "-T" or opt == "--trackcalls":
countcallers = True
continue
if opt == "-l" or opt == "--listfuncs":
listfuncs = True
continue
if opt == "-g" or opt == "--timing":
timing = True
continue
if opt == "-t" or opt == "--trace":
trace = 1
continue
if opt == "-c" or opt == "--count":
count = 1
continue
if opt == "-r" or opt == "--report":
report = 1
continue
if opt == "-R" or opt == "--no-report":
no_report = 1
continue
if opt == "-f" or opt == "--file":
counts_file = val
continue
if opt == "-m" or opt == "--missing":
missing = 1
continue
if opt == "-C" or opt == "--coverdir":
coverdir = val
continue
if opt == "-s" or opt == "--summary":
summary = 1
continue
if opt == "--ignore-module":
for mod in val.split(","):
ignore_modules.append(mod.strip())
continue
if opt == "--ignore-dir":
for s in val.split(os.pathsep):
s = os.path.expandvars(s)
# should I also call expanduser? (after all, could use $HOME)
s = s.replace("$prefix",
os.path.join(sys.prefix, "lib",
"python" + sys.version[:3]))
s = s.replace("$exec_prefix",
os.path.join(sys.exec_prefix, "lib",
"python" + sys.version[:3]))
s = os.path.normpath(s)
ignore_dirs.append(s)
continue
assert 0, "Should never get here"
if listfuncs and (count or trace):
_err_exit("cannot specify both --listfuncs and (--trace or --count)")
if not (count or trace or report or listfuncs or countcallers):
_err_exit("must specify one of --trace, --count, --report, "
"--listfuncs, or --trackcalls")
if report and no_report:
_err_exit("cannot specify both --report and --no-report")
if report and not counts_file:
_err_exit("--report requires a --file")
if no_report and len(prog_argv) == 0:
_err_exit("missing name of file to run")
# everything is ready
if report:
results = CoverageResults(infile=counts_file, outfile=counts_file)
results.write_results(missing, summary=summary, coverdir=coverdir)
else:
sys.argv = prog_argv
progname = prog_argv[0]
sys.path[0] = os.path.split(progname)[0]
t = Trace(count, trace, countfuncs=listfuncs,
countcallers=countcallers, ignoremods=ignore_modules,
ignoredirs=ignore_dirs, infile=counts_file,
outfile=counts_file, timing=timing)
try:
with open(progname) as fp:
code = compile(fp.read(), progname, 'exec')
# try to emulate __main__ namespace as much as possible
globs = {
'__file__': progname,
'__name__': '__main__',
'__package__': None,
'__cached__': None,
}
t.runctx(code, globs, globs)
except IOError, err:
_err_exit("Cannot run file %r because: %s" % (sys.argv[0], err))
except SystemExit:
pass
results = t.results()
if not no_report:
results.write_results(missing, summary=summary, coverdir=coverdir)
if __name__=='__main__':
main()
| gpl-3.0 | -8,048,764,754,056,504,000 | 35.459492 | 79 | 0.541457 | false |
twbarber/pygooglevoice | googlevoice/voice.py | 1 | 11310 | from conf import config
from util import *
import settings
import base64
qpat = re.compile(r'\?')
if settings.DEBUG:
import logging
logging.basicConfig()
log = logging.getLogger('PyGoogleVoice')
log.setLevel(logging.DEBUG)
else:
log = None
class Voice(object):
"""
Main voice instance for interacting with the Google Voice service
Handles login/logout and most of the baser HTTP methods
"""
def __init__(self):
install_opener(build_opener(HTTPCookieProcessor(CookieJar())))
for name in settings.FEEDS:
setattr(self, name, self.__get_xml_page(name))
setattr(self, 'message', self.__get_xml_page('message'))
######################
# Some handy methods
######################
def special(self):
"""
Returns special identifier for your session (if logged in)
"""
if hasattr(self, '_special') and getattr(self, '_special'):
return self._special
try:
try:
regex = bytes("('_rnr_se':) '(.+)'", 'utf8')
except TypeError:
regex = bytes("('_rnr_se':) '(.+)'")
except NameError:
regex = r"('_rnr_se':) '(.+)'"
try:
sp = re.search(regex, urlopen(settings.INBOX).read()).group(2)
except AttributeError:
sp = None
self._special = sp
return sp
special = property(special)
def login(self, email=None, passwd=None, smsKey=None):
"""
Login to the service using your Google Voice account
Credentials will be propmpted for if not given as args or in the ``~/.gvoice`` config file
"""
if hasattr(self, '_special') and getattr(self, '_special'):
return self
if email is None:
email = config.email
if email is None:
email = input('Email address: ')
if passwd is None:
passwd = config.password
if passwd is None:
from getpass import getpass
passwd = getpass()
content = self.__do_page('login').read()
# holy hackjob
galx = re.search(r"type=\"hidden\"\s+name=\"GALX\"\s+value=\"(.+)\"", content).group(1)
result = self.__do_page('login', {'Email': email, 'Passwd': passwd, 'GALX': galx})
if result.geturl().startswith(getattr(settings, "SMSAUTH")):
content = self.__smsAuth(smsKey)
try:
smsToken = re.search(r"name=\"smsToken\"\s+value=\"([^\"]+)\"", content).group(1)
galx = re.search(r"name=\"GALX\"\s+value=\"([^\"]+)\"", content).group(1)
content = self.__do_page('login', {'smsToken': smsToken, 'service': "grandcentral", 'GALX': galx})
except AttributeError:
raise LoginError
del smsKey, smsToken, galx
del email, passwd
try:
assert self.special
except (AssertionError, AttributeError):
raise LoginError
return self
def __smsAuth(self, smsKey=None):
if smsKey is None:
smsKey = config.smsKey
if smsKey is None:
from getpass import getpass
smsPin = getpass("SMS PIN: ")
content = self.__do_page('smsauth', {'smsUserPin': smsPin}).read()
else:
smsKey = base64.b32decode(re.sub(r' ', '', smsKey), casefold=True).encode("hex")
content = self.__oathtoolAuth(smsKey)
try_count = 1
while "The code you entered didn't verify." in content and try_count < 5:
sleep_seconds = 10
try_count += 1
print('invalid code, retrying after %s seconds (attempt %s)' % (sleep_seconds, try_count))
import time
time.sleep(sleep_seconds)
content = self.__oathtoolAuth(smsKey)
del smsKey
return content
def __oathtoolAuth(self, smsKey):
import commands
smsPin = commands.getstatusoutput('oathtool --totp ' + smsKey)[1]
content = self.__do_page('smsauth', {'smsUserPin': smsPin}).read()
del smsPin
return content
def logout(self):
"""
Logs out an instance and makes sure it does not still have a session
"""
self.__do_page('logout')
del self._special
assert self.special == None
return self
def call(self, outgoingNumber, forwardingNumber=None, phoneType=None, subscriberNumber=None):
"""
Make a call to an ``outgoingNumber`` from your ``forwardingNumber`` (optional).
If you pass in your ``forwardingNumber``, please also pass in the correct ``phoneType``
"""
if forwardingNumber is None:
forwardingNumber = config.forwardingNumber
if phoneType is None:
phoneType = config.phoneType
self.__validate_special_page('call', {
'outgoingNumber': outgoingNumber,
'forwardingNumber': forwardingNumber,
'subscriberNumber': subscriberNumber or 'undefined',
'phoneType': phoneType,
'remember': '1'
})
__call__ = call
def cancel(self, outgoingNumber=None, forwardingNumber=None):
"""
Cancels a call matching outgoing and forwarding numbers (if given).
Will raise an error if no matching call is being placed
"""
self.__validate_special_page('cancel', {
'outgoingNumber': outgoingNumber or 'undefined',
'forwardingNumber': forwardingNumber or 'undefined',
'cancelType': 'C2C',
})
def phones(self):
"""
Returns a list of ``Phone`` instances attached to your account.
"""
return [Phone(self, data) for data in self.contacts['phones'].values()]
phones = property(phones)
def settings(self):
"""
Dict of current Google Voice settings
"""
return AttrDict(self.contacts['settings'])
settings = property(settings)
def send_sms(self, phoneNumber, text):
"""
Send an SMS message to a given ``phoneNumber`` with the given ``text`` message
"""
self.__validate_special_page('sms', {'phoneNumber': phoneNumber, 'text': text})
def search(self, query):
"""
Search your Google Voice Account history for calls, voicemails, and sms
Returns ``Folder`` instance containting matching messages
"""
return self.__get_xml_page('search', data='?q=%s' % quote(query))()
def archive(self, msg, archive=1):
"""
Archive the specified message by removing it from the Inbox.
"""
if isinstance(msg, Message):
msg = msg.id
assert is_sha1(msg), 'Message id not a SHA1 hash'
self.__messages_post('archive', msg, archive=archive)
def delete(self, msg, trash=1):
"""
Moves this message to the Trash. Use ``message.delete(0)`` to move it out of the Trash.
"""
if isinstance(msg, Message):
msg = msg.id
assert is_sha1(msg), 'Message id not a SHA1 hash'
self.__messages_post('delete', msg, trash=trash)
def download(self, msg, adir=None):
"""
Download a voicemail or recorded call MP3 matching the given ``msg``
which can either be a ``Message`` instance, or a SHA1 identifier.
Saves files to ``adir`` (defaults to current directory).
Message hashes can be found in ``self.voicemail().messages`` for example.
Returns location of saved file.
"""
from os import path, getcwd
if isinstance(msg, Message):
msg = msg.id
assert is_sha1(msg), 'Message id not a SHA1 hash'
if adir is None:
adir = getcwd()
try:
response = self.__do_page('download', msg)
except:
raise DownloadError
fn = path.join(adir, '%s.mp3' % msg)
with open(fn, 'wb') as fo:
fo.write(response.read())
return fn
def contacts(self):
"""
Partial data of your Google Account Contacts related to your Voice account.
For a more comprehensive suite of APIs, check out http://code.google.com/apis/contacts/docs/1.0/developers_guide_python.html
"""
if hasattr(self, '_contacts'):
return self._contacts
self._contacts = self.__get_xml_page('contacts')()
return self._contacts
contacts = property(contacts)
######################
# Helper methods
######################
def __do_page(self, page, data=None, headers={}, terms={}):
"""
Loads a page out of the settings and pass it on to urllib Request
"""
page = page.upper()
if isinstance(data, dict) or isinstance(data, tuple):
data = urlencode(data)
headers.update({'User-Agent': 'PyGoogleVoice/0.5'})
if log:
log.debug('%s?%s - %s' % (getattr(settings, page)[22:], data or '', headers))
if page in ('DOWNLOAD', 'XML_SEARCH'):
return urlopen(Request(getattr(settings, page) + data, None, headers))
if data:
headers.update({'Content-type': 'application/x-www-form-urlencoded;charset=utf-8'})
pageuri = getattr(settings, page)
if len(terms) > 0:
m = qpat.match(page)
if m:
pageuri += '&'
else:
pageuri += '?'
for i, k in enumerate(terms.keys()):
pageuri += k + '=' + terms[k]
if i < len(terms) - 1:
pageuri += '&'
return urlopen(Request(pageuri, data, headers))
def __validate_special_page(self, page, data={}, **kwargs):
"""
Validates a given special page for an 'ok' response
"""
data.update(kwargs)
load_and_validate(self.__do_special_page(page, data))
_Phone__validate_special_page = __validate_special_page
def __do_special_page(self, page, data=None, headers={}, terms={}):
"""
Add self.special to request data
"""
assert self.special, 'You must login before using this page'
if isinstance(data, tuple):
data += ('_rnr_se', self.special)
elif isinstance(data, dict):
data.update({'_rnr_se': self.special})
return self.__do_page(page, data, headers, terms)
_Phone__do_special_page = __do_special_page
def __get_xml_page(self, page, data=None, headers={}):
"""
Return XMLParser instance generated from given page
"""
return XMLParser(self, page, lambda terms={}: self.__do_special_page('XML_%s' % page.upper(), data, headers, terms).read())
def __messages_post(self, page, *msgs, **kwargs):
"""
Performs message operations, eg deleting,staring,moving
"""
data = kwargs.items()
for msg in msgs:
if isinstance(msg, Message):
msg = msg.id
assert is_sha1(msg), 'Message id not a SHA1 hash'
data += (('messages', msg),)
return self.__do_special_page(page, dict(data))
_Message__messages_post = __messages_post
| bsd-3-clause | 1,199,714,198,842,147,000 | 34.01548 | 132 | 0.558267 | false |
jtakayama/makahiki-draft | install/run_initialize_instance.py | 1 | 7542 | import os
import sys
import subprocess
import shlex
import sys
import StringIO
import datetime
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki" + os.sep)
from apps.utils import script_utils
def termination_string():
"""
Gets the current system time and appends it to a termination notice.
"""
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M:%S")
end_time = "Script exiting at %s\n" % time
return end_time
# Modified from manage_py_dir() in script_utils.py
def local_manage_py_dir():
"""Returns the directory holding the manage.py file as a string."""
return os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki")
# Modified from local_reset_db(heroku_app) in script_utils.py
def local_reset_db(logfile):
"""reset db.
Returns a tuple result_tuple. result_tuple[0] has the logfile.
result_tuple[1] is True if the reset was aborted, and False if was not.
"""
local_reset_db_cancel = False
logfile.write("WARNING: This command will reset the database. " \
"All existing data will be deleted. This process is irreversible.\n")
print "WARNING: This command will reset the database. " \
"All existing data will be deleted. This process is irreversible.\n"
value = raw_input("Do you wish to continue (Y/n)? ")
while value != "Y" and value != "n":
logfile.write("Invalid option %s\n" % value)
print "Invalid option %s\n" % value
value = raw_input("Do you wish to continue (Y/n)? ")
if value == "n":
logfile.write("Do you wish to continue (Y/n)? %s\n" % value)
logfile.write("Operation cancelled.")
print "Operation cancelled.\n"
local_reset_db_cancel = True
result_tuple = [logfile, local_reset_db_cancel]
return result_tuple
elif value =="Y":
logfile.write("Do you wish to continue (Y/n)? %s\n" % value)
print "resetting the db..."
os.system("cd " + local_manage_py_dir() + "; python scripts/initialize_postgres.py")
result_tuple = [logfile, local_reset_db_cancel]
return result_tuple
def run(logfile):
"""
Initializes the Makahiki database with default options and logs the
output to a file. This should only be used to initialize local
installations.
"""
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M:%S")
start_time = "Makahiki instance initialization script started at %s\n" % time
logfile.write(start_time)
print start_time
try:
# Retrieve the user's home directory
USER_HOME = subprocess.check_output(["echo $HOME"], stderr=subprocess.STDOUT, shell=True)
# Remove newline from expected "/home/<username>\n"
USER_HOME = USER_HOME[:-1]
USER_PROJECT_HOME = USER_HOME + os.sep + "makahiki"
# cd to makahiki directory
os.chdir(USER_PROJECT_HOME)
# Capture console output from script_utils functions:
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Runs the initialization scripts in same order as
# makahiki/makahiki/scripts/initialize_instance.py
instance_type = None
heroku_app = None
manage_py = script_utils.manage_py_command()
manage_command = "python " + manage_py
fixture_path = "makahiki" + os.sep + "fixtures"
# Install requirements
script_utils.install_requirements()
# Switch back to standard I/O
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Reset the database
reset_db_result = local_reset_db(logfile)
# If successful, write the output of local_reset_db to a logfile
logfile = reset_db_result[0]
local_reset_db_cancel = reset_db_result[1]
if local_reset_db_cancel:
logfile.write("Makahiki instance initialization was cancelled by the user.")
print "Makahiki instance initialization was cancelled by the user."
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
else:
# Resume capturing I/O
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Sync the database
script_utils.syncdb(manage_command)
# Switch I/O back, write output to logfile
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Resume capturing I/O
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Copy static files
script_utils.copy_static_media(heroku_app)
# Switch I/O back, write output to logfile
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Resume capturing I/O
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Load data
script_utils.load_data(manage_command, instance_type, fixture_path)
# Switch I/O back, write output to logfile
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Print a closing message
closing = "\nMakahiki initialization script has completed.\n"
logfile.write(closing)
print closing
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
except subprocess.CalledProcessError as cpe:
logfile.write("CalledProcessError: ")
print "CalledProcessError: "
logfile.write(cpe.output)
print cpe.output
logfile.write("Warning: Makahiki initialization did not complete successfully.")
print "Warning: Makahiki initialization did not complete successfully."
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
except OSError as ose:
logfile.write("OSError: ")
print "OSError: "
oserror_output = " errno: %s\n filename: %s\n strerror: %s\n" % (ose.errno, ose.filename, ose.strerror)
logfile.write(oserror_output)
print oserror_output
logfile.write("Warning: Makahiki initialization did not complete successfully.")
print "Warning: Makahiki initialization did not complete successfully."
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
| mit | -7,530,258,053,230,034,000 | 37.284264 | 115 | 0.60647 | false |
xuru/pyvisdk | pyvisdk/do/host_das_ok_event.py | 1 | 1156 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostDasOkEvent(vim, *args, **kwargs):
'''This event records when HA on a host returns to normal after an error.'''
obj = vim.client.factory.create('ns0:HostDasOkEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 4:
raise IndexError('Expected at least 5 arguments got: %d' % len(args))
required = [ 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | 3,296,307,332,034,000,000 | 33.029412 | 124 | 0.594291 | false |
jcberquist/SublimeText-Lucee | src/inline_documentation.py | 1 | 2952 | import sublime, sublime_plugin, webbrowser
from os.path import dirname, realpath
from . import utils
from collections import namedtuple
Documentation = namedtuple('Documentation', 'doc_html_variables on_navigate priority')
FILE_PATH = dirname(realpath(__file__)).replace("\\", "/")
DOC_TEMPLATE = ""
PAGINATION_TEMPLATE = ""
documentation_sources = []
def add_documentation_source(callback):
documentation_sources.append(callback)
def get_inline_documentation(view, position):
docs = [ ]
for callback in documentation_sources:
inline_doc = callback(view, position)
if inline_doc:
docs.append(inline_doc)
return docs
def plugin_loaded():
global DOC_TEMPLATE, PAGINATION_TEMPLATE
DOC_TEMPLATE = sublime.load_resource("Packages/" + utils.get_plugin_name() + "/templates/inline_documentation.html")
PAGINATION_TEMPLATE = sublime.load_resource("Packages/" + utils.get_plugin_name() + "/templates/pagination.html")
def build_links(links):
html_links = ['<a class="link" href="' + link["href"] + '">' + link["text"] + '</a>' for link in links]
return "<br>".join(html_links)
def build_pagination(current_index, total_pages):
pagination_variables = {"current_page": str(current_index + 1), "total_pages": str(total_pages)}
previous_index = current_index - 1 if current_index > 0 else total_pages - 1
pagination_variables["prev"] = "page_" + str(previous_index)
next_index = current_index + 1 if current_index < total_pages - 1 else 0
pagination_variables["next"] = "page_" + str(next_index)
return sublime.expand_variables(PAGINATION_TEMPLATE, pagination_variables)
def build_doc_html(inline_doc):
return sublime.expand_variables(DOC_TEMPLATE, inline_doc)
def get_on_navigate(view, docs, current_index):
def on_navigate(href):
if href.startswith("page_"):
new_index = int(href.split("_").pop())
display_documentation(view, docs, new_index)
elif docs[current_index].on_navigate:
docs[current_index].on_navigate(href)
else:
webbrowser.open_new_tab(href)
return on_navigate
def generate_documentation(docs, current_index):
doc_html_variables = dict(docs[current_index].doc_html_variables)
doc_html_variables["pagination"] = build_pagination(current_index, len(docs)) if len(docs) > 1 else ""
doc_html_variables["links"] = build_links(doc_html_variables["links"]) if "links" in doc_html_variables else ""
return build_doc_html(doc_html_variables)
def display_documentation(view, docs, current_index=0):
doc_html = generate_documentation(docs, current_index)
on_navigate = get_on_navigate(view, docs, current_index)
view.show_popup(doc_html, max_width=640, max_height=320, on_navigate=on_navigate)
class LuceeInlineDocumentationCommand(sublime_plugin.TextCommand):
def run(self, edit):
position = self.view.sel()[0].begin()
docs = get_inline_documentation(self.view, position)
if len(docs) > 0:
display_documentation(self.view, sorted(docs, key=lambda doc: doc.priority, reverse=True)) | mit | 6,132,070,834,389,892,000 | 36.379747 | 117 | 0.732724 | false |
BibMartin/python-nvd3 | nvd3/__init__.py | 1 | 1025 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Python-nvd3 is a Python wrapper for NVD3 graph library.
NVD3 is an attempt to build re-usable charts and chart components
for d3.js without taking away the power that d3.js gives you.
Project location : https://github.com/areski/python-nvd3
"""
__version__ = '0.13.10'
__all__ = ['lineChart', 'pieChart', 'lineWithFocusChart',
'stackedAreaChart', 'multiBarHorizontalChart',
'linePlusBarChart', 'cumulativeLineChart',
'scatterChart', 'discreteBarChart', 'multiBarChart']
from .lineChart import lineChart
from .pieChart import pieChart
from .lineWithFocusChart import lineWithFocusChart
from .stackedAreaChart import stackedAreaChart
from .multiBarHorizontalChart import multiBarHorizontalChart
from .linePlusBarChart import linePlusBarChart
from .cumulativeLineChart import cumulativeLineChart
from .scatterChart import scatterChart
from .discreteBarChart import discreteBarChart
from .multiBarChart import multiBarChart
#from . import ipynb
| mit | -8,305,914,760,808,070,000 | 34.344828 | 65 | 0.774634 | false |
pivotal-jbarrett/geode-native | tools/gnmsg/server_message_decoder.py | 1 | 9484 | #!/usr/local/bin/python3
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import struct
from dateutil import parser
from server_messages import parse_server_message
from decoder_base import DecoderBase
from message_types import message_types
from numeric_conversion import to_hex_digit
class ServerMessageDecoder(DecoderBase):
def __init__(self, output_queue):
super(ServerMessageDecoder, self).__init__(output_queue)
self.STATE_NEUTRAL_ = 0
self.STATE_WAITING_FOR_MESSAGE_BODY_ = 1
self.receive_trace_parts_retriever_ = None
self.receive_trace_parser_ = None
self.connection_states_ = {}
self.last_header_ = {}
self.nc_version_ = None
self.get_receive_trace_parts_functions_ = {
"0.0.42": self.get_receive_trace_header_base,
"10.0.3": self.get_receive_trace_header_base,
"10.1.1": self.get_receive_trace_header_base,
"10.1.2": self.get_receive_trace_header_base,
"10.1.3": self.get_receive_trace_header_base,
"9.1.1": self.get_receive_trace_header_v911,
}
self.receive_trace_parsers_ = {
"0.0.42": self.parse_response_fields_base,
"10.0.3": self.parse_response_fields_base,
"10.1.1": self.parse_response_fields_base,
"10.1.2": self.parse_response_fields_base,
"10.1.3": self.parse_response_fields_base,
"9.1.1": self.parse_response_fields_v911,
}
def search_for_version(self, line):
if self.nc_version_ == None:
expression = re.compile(r"Product version:.*Native (\d+)\.(\d+)\.(\d+)-")
match = expression.search(line)
if match:
major = match.group(1)
minor = match.group(2)
patch = match.group(3)
self.nc_version_ = major + "." + minor + "." + patch
self.receive_trace_parts_retriever_ = self.get_receive_trace_parts_functions_[
self.nc_version_
]
self.receive_trace_parser_ = self.receive_trace_parsers_[
self.nc_version_
]
def get_receive_trace_header_with_pointer(self, line, parts):
result = False
expression = re.compile(
r"(\d\d:\d\d:\d\d\.\d+).*TcrConnection::readMessage:\s*\[([\d|a-f|A-F|x|X]+).*received header from endpoint.*bytes:\s*([\d|a-f|A-F]+)"
)
match = expression.search(line)
if match:
parts.append(match.group(1))
parts.append(match.group(2))
parts.append(match.group(3))
result = True
return result
def get_receive_trace_header_without_pointer(self, line, parts):
result = False
expression = re.compile(
r"(\d\d:\d\d:\d\d\.\d+).*TcrConnection::readMessage:\s*received header from endpoint.*bytes:\s*([\d|a-f|A-F]+)"
)
match = expression.search(line)
if match:
parts.append(match.group(1))
parts.append("0")
parts.append(match.group(2))
result = True
return result
def get_receive_trace_header_base(self, line, parts):
result = self.get_receive_trace_header_with_pointer(line, parts)
if not result:
result = self.get_receive_trace_header_without_pointer(line, parts)
return result
def get_receive_trace_header_v911(self, line, parts):
result = False
expression = re.compile(
r"(\d\d:\d\d:\d\d\.\d+).*TcrConnection::readMessage: received header from endpoint.*bytes:\s*([\d| ]+)"
)
match = expression.search(line)
if match:
parts.append(parser.parse(match.group(1)))
parts.append("0")
parts.append(match.group(2))
result = True
return result
def get_receive_trace_body_parts(self, line, parts):
result = False
expression = re.compile(
"received message body from endpoint.*bytes:\s*([\d|a-f|A-F]+)"
)
match = expression.search(line)
if match:
message = match.group(1)
parts.append(message)
result = True
return result
def get_receive_trace_parts(self, line, parts):
if self.receive_trace_parts_retriever_ is not None:
return self.receive_trace_parts_retriever_(line, parts)
def get_add_security_trace_parts(self, line, parts):
result = False
expression = re.compile(
r"(\d\d:\d\d:\d\d\.\d+).*TcrMessage::addSecurityPart\s*\[(0x[\d|a-f|A-F]*).*length\s*=\s*(\d+)\s*,\s*encrypted\s+ID\s*=\s*([\d|a-f|A-F]+)"
)
match = expression.search(line)
if match:
parts.append(parser.parse(match.group(1)))
parts.append(match.group(2))
parts.append(match.group(3))
parts.append(match.group(4))
result = True
return result
def decimal_string_to_hex_string(self, byte):
high_nibble = int(int(byte) / 16)
low_nibble = int(byte) % 16
return to_hex_digit[high_nibble] + to_hex_digit[low_nibble]
def format_bytes_as_hex_v911(self, message_bytes):
byte_list = message_bytes.split(" ")
hex_string = ""
for byte in byte_list:
if byte:
hex_string += self.decimal_string_to_hex_string(byte)
return hex_string
def parse_response_fields_base(self, message_bytes):
message_type = message_types[int(message_bytes[0:8], 16)]
message_length = int(message_bytes[8:16], 16)
message_number_of_parts = int(message_bytes[16:24], 16)
message_transaction_id = struct.unpack(
">i", bytes.fromhex(message_bytes[24:32])
)[0]
message_security_flag = (int(message_bytes[32:34], 16) & 0x02) >> 1
return (
message_type,
message_length,
message_number_of_parts,
message_transaction_id,
message_security_flag,
)
def parse_response_fields_v911(self, message_bytes):
hex_message_bytes = self.format_bytes_as_hex_v911(message_bytes)
message_type = message_types[int(hex_message_bytes[0:8], 16)]
message_length = int(hex_message_bytes[8:16], 16)
message_number_of_parts = int(hex_message_bytes[16:24], 16)
message_transaction_id = struct.unpack(
">i", bytes.fromhex(hex_message_bytes[24:32])
)[0]
message_security_flag = (int(hex_message_bytes[32:34], 16) & 0x02) >> 1
return (
message_type,
message_length,
message_number_of_parts,
message_transaction_id,
message_security_flag,
)
def parse_response_fields(self, message_bytes):
if self.receive_trace_parser_ is not None:
return self.receive_trace_parser_(message_bytes)
def process_line(self, line):
connection = None
message_bytes = None
message_body = None
self.search_for_version(line)
parts = []
if self.get_receive_trace_parts(line, parts):
(
self.last_header_["Timestamp"],
self.last_header_["Connection"],
message_bytes,
) = parts
elif self.get_receive_trace_body_parts(line, parts):
message_body = parts[0]
elif self.get_add_security_trace_parts(line, parts):
connection = parts[1]
else:
return
if connection not in self.connection_states_:
self.connection_states_[connection] = self.STATE_NEUTRAL_
if self.connection_states_[connection] == self.STATE_NEUTRAL_:
if message_bytes:
self.last_header_["Direction"] = "<---"
(
self.last_header_["Type"],
self.last_header_["Length"],
self.last_header_["Parts"],
self.last_header_["TransactionId"],
self.last_header_["SecurityFlag"],
) = self.parse_response_fields(message_bytes)
self.connection_states_[
connection
] = self.STATE_WAITING_FOR_MESSAGE_BODY_
elif (
self.connection_states_[connection] == self.STATE_WAITING_FOR_MESSAGE_BODY_
):
if message_body:
receive_trace = self.last_header_
self.last_header_ = {}
parse_server_message(receive_trace, message_body)
self.connection_states_[connection] = self.STATE_NEUTRAL_
self.output_queue_.put({"message": receive_trace})
| apache-2.0 | -3,065,811,444,816,776,000 | 37.552846 | 150 | 0.575812 | false |
Azure/azure-sdk-for-python | sdk/managedservices/azure-mgmt-managedservices/azure/mgmt/managedservices/aio/operations/_marketplace_registration_definitions_without_scope_operations.py | 1 | 8182 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class MarketplaceRegistrationDefinitionsWithoutScopeOperations:
"""MarketplaceRegistrationDefinitionsWithoutScopeOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.managedservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
filter: Optional[str] = None,
**kwargs
) -> AsyncIterable["_models.MarketplaceRegistrationDefinitionList"]:
"""Gets a list of the marketplace registration definitions for the marketplace identifier.
:param filter: The filter query parameter. Might be used to filter marketplace registration
definition by plan identifier, publisher, version etc.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MarketplaceRegistrationDefinitionList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.managedservices.models.MarketplaceRegistrationDefinitionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MarketplaceRegistrationDefinitionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-02-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('MarketplaceRegistrationDefinitionList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.ManagedServices/marketplaceRegistrationDefinitions'} # type: ignore
async def get(
self,
marketplace_identifier: str,
**kwargs
) -> "_models.MarketplaceRegistrationDefinition":
"""Get the marketplace registration definition for the marketplace identifier.
:param marketplace_identifier: Market place identifier. Expected Formats -
{publisher}.{product[-preview]}.{planName}.{version} or
{publisher}.{product[-preview]}.{planName} or {publisher}.{product[-preview]} or {publisher}).
:type marketplace_identifier: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MarketplaceRegistrationDefinition, or the result of cls(response)
:rtype: ~azure.mgmt.managedservices.models.MarketplaceRegistrationDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MarketplaceRegistrationDefinition"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-02-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'marketplaceIdentifier': self._serialize.url("marketplace_identifier", marketplace_identifier, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MarketplaceRegistrationDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/providers/Microsoft.ManagedServices/marketplaceRegistrationDefinitions/{marketplaceIdentifier}'} # type: ignore
| mit | -8,387,111,801,339,155,000 | 47.994012 | 141 | 0.664141 | false |
westernx/vee | vee/environmentrepo.py | 1 | 6434 | from subprocess import CalledProcessError
import os
import re
from vee import log
from vee.cli import style_note, style_warning, style_error, style
from vee.environment import Environment
from vee.exceptions import CliMixin
from vee.git import GitRepo
from vee.packageset import PackageSet
from vee.requirements import Requirements, Header
from vee.utils import cached_property, makedirs
class EnvironmentRepo(GitRepo):
def __init__(self, dbrow, home):
super(EnvironmentRepo, self).__init__(
work_tree=dbrow['path'] or home._abs_path('repos', dbrow['name']),
remote_name=dbrow['remote'],
branch_name=dbrow['branch'],
)
self.id = dbrow['id']
self.name = dbrow['name']
self.home = home
self._req_path = os.path.join(self.work_tree, 'requirements.txt')
def fetch(self):
return super(EnvironmentRepo, self).fetch(self.remote_name, self.branch_name)
def checkout(self, force=False):
super(EnvironmentRepo, self).checkout(
revision='%s/%s' % (self.remote_name, self.branch_name),
branch=self.branch_name,
force=force
)
def get_environment(self):
return Environment(repo=self, home=self.home)
def load_requirements(self, revision=None):
reqs = Requirements(env_repo=self, home=self.home)
if revision is not None:
contents = self.show(revision, 'requirements.txt')
if contents:
reqs.parse_file(contents.splitlines())
else:
if os.path.exists(self._req_path):
reqs.parse_file(self._req_path)
return reqs
def dump_requirements(self, req_set):
tmp = self._req_path + '.tmp'
with open(tmp, 'wb') as fh:
for line in req_set.iter_dump():
fh.write(line)
os.rename(tmp, self._req_path)
def commit(self, message, semver_level=None):
self.git('add', self._req_path, silent=True)
status = list(self.status())
if not status:
raise RuntimeError('nothing to commit')
# Make sure there are no other changes.
for idx, tree, name in status:
if tree.strip():
raise RuntimeError('work-tree is dirty')
req_set = self.load_requirements()
version_header = req_set.headers.get('Version')
if not version_header:
version_header = req_set.add_header('Version', '0.0.0')
if semver_level is not None:
version = []
for i, x in enumerate(re.split(r'[.-]', version_header.value)):
try:
version.append(int(x))
except ValueError:
version.append(x)
while len(version) <= semver_level:
version.append(0)
version[semver_level] = version[semver_level] + 1
for i in xrange(semver_level + 1, len(version)):
version[i] = 0
version_header.value = '.'.join(str(x) for x in version)
from vee import __about__ as about
req_set.set_header('Vee-Revision', about.__version__ + '+' + about.__revision__)
self.dump_requirements(req_set)
self.git('add', self._req_path, silent=True)
self.git('commit', '-m', message, silent=True)
def update(self, force=False):
log.info(style_note('Updating repo', self.name))
self.clone_if_not_exists()
if self.remote_name not in self.remotes():
log.warning(style_warning('"%s" does not have remote "%s"' % (self.name, self.remote_name)))
return True
rev = self.fetch()
if not force and not self.check_ff_safety(rev):
log.error('Cannot fast-forward; skipping.')
return False
self.checkout(force=force)
return True
def upgrade(self, dirty=False, subset=None, reinstall=False, relink=False,
no_deps=False, force_branch_link=True
):
self.clone_if_not_exists()
try:
head = self.head
except CalledProcessError:
log.warning(style_warning('no commits in repository'))
head = None
try:
remote_head = self.rev_parse('%s/%s' % (self.remote_name, self.branch_name))
except ValueError:
log.warning(style_warning('tracked %s/%s does not exist in self' % (self.remote_name, self.branch_name)))
remote_head = None
if remote_head and head != remote_head:
log.warning(style_warning('%s repo not checked out to %s/%s' % (
self.name, self.remote_name, self.branch_name)))
dirty = bool(list(self.status()))
if not dirty and self.is_dirty():
log.error('%s repo is dirty; force with --dirty' % self.name)
return False
env = self.get_environment()
req_set = self.load_requirements()
pkg_set = PackageSet(env=env, home=self.home)
# Register the whole set, so that dependencies are pulled from here instead
# of weakly resolved from installed packages.
# TODO: This blanket reinstalls things, even if no_deps is set.
pkg_set.resolve_set(req_set, check_existing=not reinstall)
# Install and/or link.
pkg_set.install(subset or None, link_env=env, reinstall=reinstall, relink=relink, no_deps=no_deps)
if pkg_set._errored and not force_branch_link:
log.warning(style_warning("Not creating branch or version links; force with --force-branch-link"))
return False
# Create a symlink by branch.
path_by_branch = self.home._abs_path('environments', self.name, self.branch_name)
if os.path.lexists(path_by_branch):
os.unlink(path_by_branch)
makedirs(os.path.dirname(path_by_branch))
os.symlink(env.path, path_by_branch)
# Create a symlink by version.
version = req_set.headers.get('Version')
if version:
path_by_version = self.home._abs_path('environments', self.name, 'versions', version.value + ('-dirty' if dirty else ''))
if os.path.lexists(path_by_version):
os.unlink(path_by_version)
makedirs(os.path.dirname(path_by_version))
os.symlink(env.path, path_by_version)
return True
| bsd-3-clause | 6,445,287,524,280,731,000 | 34.546961 | 133 | 0.591389 | false |
etkirsch/pyna-colada | pyna/core/Manager.py | 1 | 1512 | import hashlib, json, os
from pyna.base.Settings import Settings
from pyna.core.Packager import Packager
from pyna.core.NodeList import NodeList
from pyna.ui.PynaDisplay import PynaDisplay
class Manager(Settings):
'''
Manages all things related to other nodes and configuration settings
'''
def __init__(self, alias, location, port):
super().__init__(alias)
self.most_recent_whisperer = None
self.location = '{0}:{1}'.format(location, port)
self.node_list = NodeList()
self.load()
def load(self):
'''Load all configuration settings and nodes'''
config_filename = self.config_path('config.json')
config = json.load(open(config_filename, 'r'))
self.version = config['version']
self.node_list.load()
def whisper_toggle(self, target):
'''Toggles between whispering and all chatting as default command'''
if self.default_command == 'chat':
self.default_command = 'whisperlock'
if target is '':
target = self.most_recent_whisperer
self.whisper_lock_target = target
return
self.default_command = 'chat'
def create_packager(self):
'''Create a packager from our config settings'''
args = {"alias": self.alias, "location": self.location, "uid": self.uid}
return Packager(self.version, args)
def get_node_hash(self):
'''Create a Node Hash for nodeListHash command'''
# create a list of publickeys
publickeys = self.node_list.hash()
# actually encode
hashed = hashlib.sha512()
hashed.update(publickeys.encode('utf-8'))
return hashed.hexdigest()
| gpl-2.0 | -5,107,208,855,984,028,000 | 28.647059 | 74 | 0.71164 | false |
kparal/anaconda | pyanaconda/localization.py | 1 | 23995 | # Localization classes and functions
#
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Martin Gracik <[email protected]>
# Vratislav Podzimek <[email protected]>
#
import gettext
import os
import re
import langtable
import locale as locale_mod
import glob
from collections import namedtuple
from pyanaconda import constants
from pyanaconda.iutil import upcase_first_letter, setenv, execWithRedirect
from pyanaconda.iutil import open # pylint: disable=redefined-builtin
import logging
log = logging.getLogger("anaconda")
LOCALE_CONF_FILE_PATH = "/etc/locale.conf"
SCRIPTS_SUPPORTED_BY_CONSOLE = {'Latn', 'Cyrl', 'Grek'}
#e.g. 'SR_RS.UTF-8@latin'
LANGCODE_RE = re.compile(r'(?P<language>[A-Za-z]+)'
r'(_(?P<territory>[A-Za-z]+))?'
r'(\.(?P<encoding>[-A-Za-z0-9]+))?'
r'(@(?P<script>[-A-Za-z0-9]+))?')
class LocalizationConfigError(Exception):
"""Exception class for localization configuration related problems"""
pass
class InvalidLocaleSpec(LocalizationConfigError):
"""Exception class for the errors related to invalid locale specs"""
pass
def parse_langcode(langcode):
"""
For a given langcode (e.g. 'SR_RS.UTF-8@latin') returns a dictionary
with the following keys and example values:
'language' : 'SR'
'territory' : 'RS'
'encoding' : 'UTF-8'
'script' : 'latin'
or None if the given string doesn't match the LANGCODE_RE.
"""
if not langcode:
return None
match = LANGCODE_RE.match(langcode)
if match:
return match.groupdict()
else:
return None
def is_supported_locale(locale):
"""
Function that tells if the given locale is supported by the Anaconda or
not. We consider locales supported by the langtable as supported by the
Anaconda.
:param locale: locale to test
:type locale: str
:return: whether the given locale is supported or not
:rtype: bool
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
en_name = get_english_name(locale)
return bool(en_name)
def locale_supported_in_console(locale):
"""
Function that tells if the given locale can be displayed by the Linux console.
The Linux console can display Latin, Cyrillic and Greek characters reliably,
but others such as Japanese, can't be correctly installed.
:param str locale: locale to test
:return: whether the given locale is supported by the console or not
:rtype: bool
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
locale_scripts = get_locale_scripts(locale)
return set(locale_scripts).issubset(SCRIPTS_SUPPORTED_BY_CONSOLE)
def langcode_matches_locale(langcode, locale):
"""
Function that tells if the given langcode matches the given locale. I.e. if
all parts of appearing in the langcode (language, territory, script and
encoding) are the same as the matching parts of the locale.
:param langcode: a langcode (e.g. en, en_US, en_US@latin, etc.)
:type langcode: str
:param locale: a valid locale (e.g. en_US.UTF-8 or sr_RS.UTF-8@latin, etc.)
:type locale: str
:return: whether the given langcode matches the given locale or not
:rtype: bool
"""
langcode_parts = parse_langcode(langcode)
locale_parts = parse_langcode(locale)
if not langcode_parts or not locale_parts:
# to match, both need to be valid langcodes (need to have at least
# language specified)
return False
# Check parts one after another. If some part appears in the langcode and
# doesn't match the one from the locale (or is missing in the locale),
# return False, otherwise they match
for part in ("language", "territory", "script", "encoding"):
if langcode_parts[part] and langcode_parts[part] != locale_parts.get(part):
return False
return True
def find_best_locale_match(locale, langcodes):
"""
Find the best match for the locale in a list of langcodes. This is useful
when e.g. pt_BR is a locale and there are possibilities to choose an item
(e.g. rnote) for a list containing both pt and pt_BR or even also pt_PT.
:param locale: a valid locale (e.g. en_US.UTF-8 or sr_RS.UTF-8@latin, etc.)
:type locale: str
:param langcodes: a list or generator of langcodes (e.g. en, en_US, en_US@latin, etc.)
:type langcodes: list(str) or generator(str)
:return: the best matching langcode from the list of None if none matches
:rtype: str or None
"""
SCORE_MAP = {"language" : 1000,
"territory": 100,
"script" : 10,
"encoding" : 1}
def get_match_score(locale, langcode):
score = 0
locale_parts = parse_langcode(locale)
langcode_parts = parse_langcode(langcode)
if not locale_parts or not langcode_parts:
return score
for part, part_score in SCORE_MAP.items():
if locale_parts[part] and langcode_parts[part]:
if locale_parts[part] == langcode_parts[part]:
# match
score += part_score
else:
# not match
score -= part_score
elif langcode_parts[part] and not locale_parts[part]:
# langcode has something the locale doesn't have
score -= part_score
return score
scores = []
# get score for each langcode
for langcode in langcodes:
scores.append((langcode, get_match_score(locale, langcode)))
# find the best one
sorted_langcodes = sorted(scores, key=lambda item_score: item_score[1], reverse=True)
# matches matching only script or encoding or both are not useful
if sorted_langcodes and sorted_langcodes[0][1] > SCORE_MAP["territory"]:
return sorted_langcodes[0][0]
else:
return None
def setup_locale(locale, lang=None, text_mode=False):
"""
Procedure setting the system to use the given locale and store it in to the
ksdata.lang object (if given). DOES NOT PERFORM ANY CHECKS OF THE GIVEN
LOCALE.
$LANG must be set by the caller in order to set the language used by gettext.
Doing this in a thread-safe way is up to the caller.
We also try to set a proper console font for the locale in text mode.
If the font for the locale can't be displayed in the Linux console,
we fall back to the English locale.
:param str locale: locale to setup
:param lang: ksdata.lang object or None
:param bool text_mode: if the locale is being setup for text mode
:return: None
:rtype: None
"""
if lang:
lang.lang = locale
# not all locales might be displayable in text mode
if text_mode:
# check if the script corresponding to the locale/language
# can be displayed by the Linux console
# * all scripts for the given locale/language need to be
# supported by the linux console
# * otherwise users might get a screen full of white rectangles
# (also known as "tofu") in text mode
# then we also need to check if we have information about what
# font to use for correctly displaying the given language/locale
script_supported = locale_supported_in_console(locale)
log.debug("scripts found for locale %s: %s", locale, get_locale_scripts(locale))
console_fonts = get_locale_console_fonts(locale)
log.debug("console fonts found for locale %s: %s", locale, console_fonts)
font_set = False
if script_supported and console_fonts:
# try to set console font
for font in console_fonts:
if set_console_font(font):
# console font set successfully, skip the rest
font_set = True
break
if not font_set:
log.warning("can't set console font for locale %s", locale)
# report what exactly went wrong
if not(script_supported):
log.warning("script not supported by console for locale %s", locale)
if not(console_fonts): # no fonts known for locale
log.warning("no console font found for locale %s", locale)
if script_supported and console_fonts:
log.warning("none of the suggested fonts can be set for locale %s", locale)
log.warning("falling back to the English locale")
locale = constants.DEFAULT_LANG
os.environ["LANG"] = locale # pylint: disable=environment-modify
# set the locale to the value we have selected
log.debug("setting locale to: %s", locale)
setenv("LANG", locale)
locale_mod.setlocale(locale_mod.LC_ALL, locale)
def get_english_name(locale):
"""
Function returning english name for the given locale.
:param locale: locale to return english name for
:type locale: str
:return: english name for the locale or empty string if unknown
:rtype: st
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
name = langtable.language_name(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""),
languageIdQuery="en")
return upcase_first_letter(name)
def get_native_name(locale):
"""
Function returning native name for the given locale.
:param locale: locale to return native name for
:type locale: str
:return: english name for the locale or empty string if unknown
:rtype: st
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
name = langtable.language_name(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""),
languageIdQuery=parts["language"],
territoryIdQuery=parts.get("territory", ""),
scriptIdQuery=parts.get("script", ""))
return upcase_first_letter(name)
def get_available_translations(localedir=None):
"""
Method that generates (i.e. returns a generator) available translations for
the installer in the given localedir.
:type localedir: str
:return: generator yielding available translations (languages)
:rtype: generator yielding strings
"""
localedir = localedir or gettext._default_localedir
# usually there are no message files for en
messagefiles = sorted(glob.glob(localedir + "/*/LC_MESSAGES/anaconda.mo") +
["blob/en/blob/blob"])
trans_gen = (path.split(os.path.sep)[-3] for path in messagefiles)
langs = set()
for trans in trans_gen:
parts = parse_langcode(trans)
lang = parts.get("language", "")
if lang and lang not in langs:
langs.add(lang)
# check if there are any locales for the language
locales = get_language_locales(lang)
if not locales:
continue
yield lang
def get_language_locales(lang):
"""
Function returning all locales available for the given language.
:param lang: language to get available locales for
:type lang: str
:return: a list of available locales
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(lang)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid language" % lang)
return langtable.list_locales(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_territory_locales(territory):
"""
Function returning list of locales for the given territory. The list is
sorted from the most probable locale to the least probable one (based on
langtable's ranking.
:param territory: territory to return locales for
:type territory: str
:return: list of locales
:rtype: list of strings
"""
return langtable.list_locales(territoryId=territory)
def get_locale_keyboards(locale):
"""
Function returning preferred keyboard layouts for the given locale.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: list of preferred keyboard layouts
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return langtable.list_keyboards(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_locale_timezones(locale):
"""
Function returning preferred timezones for the given locale.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: list of preferred timezones
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return langtable.list_timezones(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_locale_territory(locale):
"""
Function returning locale's territory.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: territory or None
:rtype: str or None
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return parts.get("territory", None)
def get_locale_console_fonts(locale):
"""
Function returning preferred console fonts for the given locale.
:param str locale: locale string (see LANGCODE_RE)
:return: list of preferred console fonts
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return langtable.list_consolefonts(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_locale_scripts(locale):
"""
Function returning preferred scripts (writing systems) for the given locale.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: list of preferred scripts
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return langtable.list_scripts(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_xlated_timezone(tz_spec_part):
"""
Function returning translated name of a region, city or complete timezone
name according to the current value of the $LANG variable.
:param tz_spec_part: a region, city or complete timezone name
:type tz_spec_part: str
:return: translated name of the given region, city or timezone
:rtype: str
"""
locale = os.environ.get("LANG", constants.DEFAULT_LANG)
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
xlated = langtable.timezone_name(tz_spec_part, languageIdQuery=parts["language"],
territoryIdQuery=parts.get("territory", ""),
scriptIdQuery=parts.get("script", ""))
return xlated
def write_language_configuration(lang, root):
"""
Write language configuration to the $root/etc/locale.conf file.
:param lang: ksdata.lang object
:param root: path to the root of the installed system
"""
try:
fpath = os.path.normpath(root + LOCALE_CONF_FILE_PATH)
with open(fpath, "w") as fobj:
fobj.write('LANG="%s"\n' % lang.lang)
except IOError as ioerr:
msg = "Cannot write language configuration file: %s" % ioerr.strerror
raise LocalizationConfigError(msg)
def load_firmware_language(lang, text_mode=False):
"""
Procedure that loads firmware language information (if any). It stores the
information in the given ksdata.lang object and sets the $LANG environment
variable.
This method must be run before any other threads are started.
:param lang: ksdata.lang object
:return: None
:rtype: None
"""
if lang.lang and lang.seen:
# set in kickstart, do not override
return
try:
n = "/sys/firmware/efi/efivars/PlatformLang-8be4df61-93ca-11d2-aa0d-00e098032b8c"
d = open(n, 'r', 0).read()
except IOError:
return
# the contents of the file are:
# 4-bytes of attribute data that we don't care about
# NUL terminated ASCII string like 'en-US'.
if len(d) < 10:
log.debug("PlatformLang was too short")
return
d = d[4:]
if d[2] != '-':
log.debug("PlatformLang was malformed")
return
# they use - and we use _, so fix it...
d = d[:2] + '_' + d[3:-1]
# UEFI 2.3.1 Errata C specifies 2 aliases in common use that
# aren't part of RFC 4646, but are allowed in PlatformLang.
# Because why make anything simple?
if d.startswith('zh_chs'):
d = 'zh_Hans'
elif d.startswith('zh_cht'):
d = 'zh_Hant'
d += '.UTF-8'
if not is_supported_locale(d):
log.debug("PlatformLang was '%s', which is unsupported.", d)
return
locales = get_language_locales(d)
if not locales:
log.debug("No locales found for the PlatformLang '%s'.", d)
return
log.debug("Using UEFI PlatformLang '%s' ('%s') as our language.", d, locales[0])
setup_locale(locales[0], lang, text_mode)
os.environ["LANG"] = locales[0] # pylint: disable=environment-modify
_DateFieldSpec = namedtuple("DateFieldSpec", ["format", "suffix"])
def resolve_date_format(year, month, day, fail_safe=True):
"""
Puts the year, month and day objects in the right order according to the
currently set locale and provides format specification for each of the
fields.
:param year: any object or value representing year
:type year: any
:param month: any object or value representing month
:type month: any
:param day: any object or value representing day
:type day: any
:param bool fail_safe: whether to fall back to default in case of invalid
format or raise exception instead
:returns: a pair where the first field contains a tuple with the year, month
and day objects/values put in the right order and where the second
field contains a tuple with three :class:`_DateFieldSpec` objects
specifying formats respectively to the first (year, month, day)
field, e.g. ((year, month, day), (y_fmt, m_fmt, d_fmt))
:rtype: tuple
:raise ValueError: in case currently set locale has unsupported date
format and fail_safe is set to False
"""
FAIL_SAFE_DEFAULT = "%Y-%m-%d"
def order_terms_formats(fmt_str):
# see date (1), 'O' (not '0') is a mystery, 'E' is Buddhist calendar, '(.*)'
# is an arbitrary suffix
field_spec_re = re.compile(r'([-_0OE^#]*)([yYmbBde])(.*)')
# see date (1)
fmt_str = fmt_str.replace("%F", "%Y-%m-%d")
# e.g. "%d.%m.%Y" -> ['d.', 'm.', 'Y']
fields = fmt_str.split("%")[1:]
ordered_terms = []
ordered_formats = []
for field in fields:
match = field_spec_re.match(field)
if not match:
# ignore fields we are not interested in (like %A for weekday name, etc.)
continue
prefix, item, suffix = match.groups()
if item in ("d", "e"):
# "e" is the same as "_d"
ordered_terms.append(day)
elif item in ("Y", "y"):
# 4-digit year, 2-digit year
ordered_terms.append(year)
elif item in ("m", "b", "B"):
# month number, short month name, long month name
ordered_terms.append(month)
# "%" + prefix + item gives a format for date/time formatting functions
ordered_formats.append(_DateFieldSpec("%" + prefix + item, suffix.strip()))
if len(ordered_terms) != 3 or len(ordered_formats) != 3:
raise ValueError("Not all fields successfully identified in the format '%s'" % fmt_str)
return (tuple(ordered_terms), tuple(ordered_formats))
fmt_str = locale_mod.nl_langinfo(locale_mod.D_FMT)
if not fmt_str or "%" not in fmt_str:
if fail_safe:
# use some sane default
fmt_str = FAIL_SAFE_DEFAULT
else:
raise ValueError("Invalid date format string for current locale: '%s'" % fmt_str)
try:
return order_terms_formats(fmt_str)
except ValueError:
if not fail_safe:
raise
else:
# if this call fails too, something is going terribly wrong and we
# should be informed about it
return order_terms_formats(FAIL_SAFE_DEFAULT)
def set_console_font(font):
"""
Try to set console font to the given value.
:param str font: console font name
:returns: True on success, False on failure
:rtype: Bool
"""
log.debug("setting console font to %s", font)
rc = execWithRedirect("setfont", [font])
if rc == 0:
log.debug("console font set successfully to %s", font)
return True
else:
log.error("setting console font to %s failed", font)
return False
| gpl-2.0 | 3,819,219,907,003,919,000 | 33.978134 | 99 | 0.628506 | false |
hsolbrig/SNOMEDToOWL | SNOMEDCTToOWL/RF2Files/Transitive.py | 1 | 2725 | from typing import Dict, Set
from SNOMEDCTToOWL.SNOMEDToOWLConstants import RelationshipFilePrefix
class Transitive:
relationship_prefix = RelationshipFilePrefix
def __init__(self):
self._children = {} # parent -> set(children) Dict[int, Set[int]]
self._parents = {} # child -> set(parents) Dict[int, Set[int]]
self.__desc_cache = {} # parent -> set(descendants)
self.__ancestor_cache = {} # child -> set(ancestors)
@classmethod
def filtr(cls, fname: str) -> bool:
"""
Return true if this is a computed relationship file. Transitivity is always based on computed
:param fname: file name to test
:return: true if it should be processed
"""
return fname.startswith(cls.relationship_prefix)
def add(self, row: Dict) -> None:
"""
Add an RF2 relationship row to the Transitive file
:param row: row to add -- already tested for active
"""
child = int(row["sourceId"])
parent = int(row["destinationId"])
self._children.setdefault(parent, set()).add(child)
self._parents.setdefault(child, set()).add(parent)
def descendants_of(self, parent: int) -> Set[int]:
"""
Return all descendants of parent
:param parent: parent concept
:return: set of concepts
"""
return self._children.get(parent, set())\
.union(*[self.descendants_of(x) for x in self._children.get(parent, set())])
def is_descendant_of(self, desc: int, parent: int) -> bool:
"""
Determine whether desc is a descendant of parent
:param desc: descendant to test
:param parent: parent concept
:return: True or False
"""
if parent not in self.__desc_cache:
self.__desc_cache[parent] = self.descendants_of(parent)
return desc in self.__desc_cache[parent]
def is_descendant_or_self_of(self, desc: int, parent: int) -> bool:
"""
Determine whether desc is a descendant of the parent or is the parent itself
:param desc: descendant to test
:param parent: parent concept
:return: True or False
"""
return self.is_descendant_of(desc, parent) or desc == parent
def ancestors_of(self, child: int) -> Set[int]:
return self._parents.get(child, set())\
.union(*[self.ancestors_of(x) for x in self._parents.get(child, set())])
def is_ancestor_of(self, ancestor: int, child: int) -> bool:
if child not in self.__ancestor_cache:
self.__ancestor_cache[child] = self.ancestors_of(child)
return ancestor in self.__ancestor_cache[child]
| apache-2.0 | -6,966,169,800,768,780,000 | 38.492754 | 102 | 0.605872 | false |
ConservationInternational/ldmp-qgis-plugin | LDMP/processing_provider/carbon.py | 1 | 8231 | import numpy as np
from osgeo import gdal, osr
from qgis import processing
from qgis.core import (QgsGeometry,
QgsProcessing,
QgsProcessingAlgorithm,
QgsProcessingException,
QgsProcessingParameterFile,
QgsProcessingParameterFileDestination,
QgsProcessingParameterNumber,
QgsProcessingOutputString,
QgsProcessingOutputNumber)
from qgis.PyQt.QtCore import QCoreApplication
from LDMP import log
from LDMP.summary import calc_cell_area
class TCSummary(QgsProcessingAlgorithm):
"""
Used for summarizing results of output of the carbon change analysis.
"""
def tr(self, string):
return QCoreApplication.translate('processing\\carbon', string)
def createInstance(self):
# Must return a new copy of your algorithm.
return TCSummary()
def name(self):
return 'carbon_summary'
def displayName(self):
return self.tr('Carbon change summary')
def group(self):
return self.tr('Carbon change')
def groupId(self):
return 'trendsearth'
def shortHelpString(self):
return self.tr('Summarize output of a carbon change analysis')
def initAlgorithm(self, config=None):
# Inputs
self.addParameter(
QgsProcessingParameterFile(
'INPUT',
self.tr('Input carbon analysis file')
)
)
self.addParameter(
QgsProcessingParameterNumber(
'YEAR_START',
self.tr('Starting year')
)
)
self.addParameter(
QgsProcessingParameterNumber(
'YEAR_END',
self.tr('Ending year')
)
)
# Outputs
self.addOutput(
QgsProcessingOutputString(
'FOREST_LOSS',
self.tr('Forest loss per year in sq km.')
)
)
self.addOutput(
QgsProcessingOutputString(
'CARBON_LOSS',
self.tr('Carbon loss per year in tonnes of C')
)
)
self.addOutput(
QgsProcessingOutputNumber(
'CARBON_INITIAL',
self.tr('Initial tonnes of C')
)
)
self.addOutput(
QgsProcessingOutputNumber(
'AREA_FOREST',
self.tr('Area of forest in sq km')
)
)
self.addOutput(
QgsProcessingOutputNumber(
'AREA_NON_FOREST',
self.tr('Area of non-forest in sq km')
)
)
self.addOutput(
QgsProcessingOutputNumber(
'AREA_MISSING',
self.tr('Area of missing data in sq km')
)
)
self.addOutput(
QgsProcessingOutputNumber(
'AREA_WATER',
self.tr('Area of water in sq km')
)
)
self.addOutput(
QgsProcessingOutputNumber(
'AREA_SITE',
self.tr('Area of site in sq km')
)
)
def processAlgorithm(self, parameters, context, feedback):
src_file = self.parameterAsFile(parameters,'INPUT', context)
year_start = self.parameterAsInt(parameters,'YEAR_START', context)
year_end = self.parameterAsInt(parameters,'YEAR_END', context)
src_ds = gdal.Open(src_file)
band_f_loss = src_ds.GetRasterBand(1)
band_tc = src_ds.GetRasterBand(2)
block_sizes = band_f_loss.GetBlockSize()
xsize = band_f_loss.XSize
ysize = band_f_loss.YSize
n_out_bands = 1
x_block_size = block_sizes[0]
y_block_size = block_sizes[1]
src_gt = src_ds.GetGeoTransform()
# Width of cells in longitude
long_width = src_gt[1]
# Set initial lat ot the top left corner latitude
lat = src_gt[3]
# Width of cells in latitude
pixel_height = src_gt[5]
area_missing = 0
area_non_forest = 0
area_water = 0
area_site = 0
initial_forest_area = 0
initial_carbon_total = 0
forest_loss = np.zeros(year_end - year_start)
carbon_loss = np.zeros(year_end - year_start)
blocks = 0
for y in range(0, ysize, y_block_size):
if y + y_block_size < ysize:
rows = y_block_size
else:
rows = ysize - y
for x in range(0, xsize, x_block_size):
if feedback.isCanceled():
log("Processing of {} killed by user after processing {} out of {} blocks.".format(src_file, y, ysize))
break
feedback.setProgress(100 * (float(y) + (float(x)/xsize)*y_block_size) / ysize)
if x + x_block_size < xsize:
cols = x_block_size
else:
cols = xsize - x
f_loss_array = band_f_loss.ReadAsArray(x, y, cols, rows)
tc_array = band_tc.ReadAsArray(x, y, cols, rows)
# Caculate cell area for each horizontal line
cell_areas = np.array([calc_cell_area(lat + pixel_height*n, lat + pixel_height*(n + 1), long_width) for n in range(rows)])
cell_areas.shape = (cell_areas.size, 1)
# Make an array of the same size as the input arrays containing
# the area of each cell (which is identicalfor all cells ina
# given row - cell areas only vary among rows)
cell_areas_array = np.repeat(cell_areas, cols, axis=1)
initial_forest_pixels = (f_loss_array == 0) | (f_loss_array > (year_start - 2000))
# The site area includes everything that isn't masked
area_missing = area_missing + np.sum(((f_loss_array == -32768) | (tc_array == -32768)) * cell_areas_array)
area_water = area_water + np.sum((f_loss_array == -2) * cell_areas_array)
area_non_forest = area_non_forest + np.sum((f_loss_array == -1) * cell_areas_array)
area_site = area_site + np.sum((f_loss_array != -32767) * cell_areas_array)
initial_forest_area = initial_forest_area + np.sum(initial_forest_pixels * cell_areas_array)
initial_carbon_total = initial_carbon_total + np.sum(initial_forest_pixels * tc_array * (tc_array >= 0) * cell_areas_array)
for n in range(year_end - year_start):
# Note the codes are year - 2000
forest_loss[n] = forest_loss[n] + np.sum((f_loss_array == year_start - 2000 + n + 1) * cell_areas_array)
# Check units here - is tc_array in per m or per ha?
carbon_loss[n] = carbon_loss[n] + np.sum((f_loss_array == year_start - 2000 + n + 1) * tc_array * (tc_array >= 0) * cell_areas_array)
blocks += 1
lat += pixel_height * rows
feedback.setProgress(100)
if feedback.isCanceled():
return {}
else:
# Convert all area tables from meters into hectares
forest_loss = forest_loss * 1e-4
# Note that carbon is scaled by 10
carbon_loss = carbon_loss * 1e-4 / 10
area_missing = area_missing * 1e-4
area_water = area_water * 1e-4
area_non_forest = area_non_forest * 1e-4
area_site = area_site * 1e-4
initial_forest_area = initial_forest_area * 1e-4
# Note that carbon is scaled by 10
initial_carbon_total = initial_carbon_total * 1e-4 / 10
return {'FOREST_LOSS': np.array2string(forest_loss),
'CARBON_LOSS': np.array2string(carbon_loss),
'CARBON_INITIAL': initial_carbon_total,
'AREA_FOREST': initial_forest_area,
'AREA_NON_FOREST': area_non_forest,
'AREA_WATER': area_water,
'AREA_MISSING': area_missing,
'AREA_SITE': area_site}
| gpl-2.0 | 3,302,916,721,898,712,000 | 35.910314 | 153 | 0.532864 | false |
hasgeek/funnel | migrations/versions/887db555cca9_adding_uuid_to_commentset.py | 1 | 1769 | """Adding uuid to commentset.
Revision ID: 887db555cca9
Revises: 222b78a8508d
Create Date: 2020-05-08 19:16:15.324555
"""
from uuid import uuid4
from alembic import op
from sqlalchemy.sql import column, table
from sqlalchemy_utils import UUIDType
import sqlalchemy as sa
from progressbar import ProgressBar
import progressbar.widgets
# revision identifiers, used by Alembic.
revision = '887db555cca9'
down_revision = '222b78a8508d'
branch_labels = None
depends_on = None
commentset = table(
'commentset', column('id', sa.Integer()), column('uuid', UUIDType(binary=False))
)
def get_progressbar(label, maxval):
return ProgressBar(
maxval=maxval,
widgets=[
label,
': ',
progressbar.widgets.Percentage(),
' ',
progressbar.widgets.Bar(),
' ',
progressbar.widgets.ETA(),
' ',
],
)
def upgrade():
conn = op.get_bind()
op.add_column(
'commentset', sa.Column('uuid', UUIDType(binary=False), nullable=True)
)
count = conn.scalar(sa.select([sa.func.count('*')]).select_from(commentset))
progress = get_progressbar("Commentsets", count)
progress.start()
items = conn.execute(sa.select([commentset.c.id]))
for counter, item in enumerate(items):
conn.execute(
sa.update(commentset).where(commentset.c.id == item.id).values(uuid=uuid4())
)
progress.update(counter)
progress.finish()
op.alter_column('commentset', 'uuid', nullable=False)
op.create_unique_constraint('commentset_uuid_key', 'commentset', ['uuid'])
def downgrade():
op.drop_constraint('commentset_uuid_key', 'commentset', type_='unique')
op.drop_column('commentset', 'uuid')
| agpl-3.0 | -8,246,667,750,301,554,000 | 23.915493 | 88 | 0.642736 | false |
brigittebigi/proceed | proceed/src/wxgui/frames/import_wizard.py | 1 | 18211 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# ___ __ ___ ___ ____ ____ __
# | \ | \ | | / | | | \ Automatic
# |__/ |__/ | | | |__ |__ | | Conference
# | |\_ | | | | | | | Proceedings
# | | \ |___| \___ |___ |___ |__/ Generator
# ==========================================================
#
# http://www.lpl-aix.fr/~bigi/
#
# ---------------------------------------------------------------------------
# developed at:
#
# Laboratoire Parole et Langage
#
# Copyright (C) 2013-2014 Brigitte Bigi
#
# Use of this software is governed by the GPL, v3
# This banner notice must not be removed
# ---------------------------------------------------------------------------
#
# Proceed is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Proceed is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Proceed. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
__docformat__ = "epytext"
# ---------------------------------------------------------------------------
import wx
import wx.lib.newevent
import wx.wizard
import logging
import os.path
import sys
sys.path.append( os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__))))), "src") )
from wxgui.cutils.imageutils import spBitmap
from wxgui.sp_consts import HEADER_FONTSIZE
from wxgui.sp_consts import FRAME_STYLE
from wxgui.sp_consts import FRAME_TITLE
from wxgui.sp_icons import IMPORT_EXPORT_ICON
from wxgui.sp_icons import GRID_ICON
from wxgui.sp_icons import TEX_ICON
from wxgui.sp_icons import WWW_ICON
from DataIO.Read.reader import Reader
from DataIO.Write.writer import Writer
from structs.prefs import Preferences
from structs.abstracts_themes import all_themes
from wxgui.frames.processprogress import ProcessProgressDialog
# ---------------------------------------------------------------------------
ImportFinishedEvent, EVT_IMPORT_WIZARD_FINISHED = wx.lib.newevent.NewEvent()
ImportFinishedCommandEvent, EVT_IMPORT_WIZARD_FINISHED_COMMAND = wx.lib.newevent.NewCommandEvent()
# ---------------------------------------------------------------------------
class ImportWizard( wx.wizard.Wizard ):
def __init__(self, parent):
wx.wizard.Wizard.__init__(self, parent, -1, title=FRAME_TITLE+" - Import", style=FRAME_STYLE)
self.output = ""
self.page0 = InputPage(self)
self.page0.SetName("input")
self.page1 = OutputPage(self)
self.page1.SetName("output")
self.page2 = LatexPage(self)
self.page2.SetName("latex")
wx.wizard.WizardPageSimple.Chain(self.page0, self.page1)
wx.wizard.WizardPageSimple.Chain(self.page1, self.page2)
self.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGED, self.onPageChanged)
self.Bind(wx.wizard.EVT_WIZARD_FINISHED, self.onFinished)
wx.CallAfter(self.SetSize,(520,440))
self.RunWizard(self.page0)
self.Destroy()
#----------------------------------------------------------------------
def onPageChanged(self, event):
""""""
page = event.GetPage()
if page.GetName() == "output":
if not os.path.exists(self.page0.urlFld.GetValue()):
wx.MessageBox("A valid input file name is required.", 'Info', wx.OK | wx.ICON_INFORMATION)
self.RunWizard(self.page0)
return
else:
p = ProcessProgressDialog(self)
p.Show()
arguments = {}
arguments['readername'] = self.page0.confname
arguments['filename'] = self.page0.urlFld.GetValue()
arguments['authorsfilename'] = self.page0.urlauthFld.GetValue()
arguments['progress'] = p
try:
self.reader = Reader( arguments )
p.close()
except Exception as e:
wx.MessageBox("Error while reading file:\n%s"%str(e), 'Info', wx.OK | wx.ICON_INFORMATION)
self.Destroy()
elif page.GetName() == "latex":
# if len(self.page1.urlFld.GetValue().strip()):
# wx.MessageBox("A directory is required.", 'Info', wx.OK | wx.ICON_INFORMATION)
# self.RunWizard(self.page1)
# return
self.output = self.page1.urlFld.GetValue().strip()
if not os.path.exists( self.output ):
try:
os.mkdir( self.output )
except Exception as e:
wx.MessageBox("Error while creating output directory:\n%s"%str(e), 'Info', wx.OK | wx.ICON_INFORMATION)
self.RunWizard(self.page1)
return
try:
self.writer = Writer( self.reader.docs )
self.writer.set_status( self.page1.status )
if self.page1.exportcsv:
self.writer.writeCSV( self.output )
if self.page1.exporthtml:
self.writer.writeHTML( self.output )
except Exception as e:
wx.MessageBox("Error while creating output files:\n%s"%str(e), 'Info', wx.OK | wx.ICON_INFORMATION)
self.RunWizard(self.page1)
return
#----------------------------------------------------------------------
def onFinished(self, event):
""""""
if self.page2.export is True:
# Create preferences
prefs = Preferences()
theme = all_themes.get_theme( self.page2.theme )
prefs.SetTheme( theme )
prefs.SetValue('COMPILER', 'str', self.page2.compiler.strip())
# Write as LaTeX in the same dir as proceed CSV files
p = ProcessProgressDialog(self)
p.Show()
self.writer.set_progress(p)
self.writer.writeLaTeX_as_Dir( self.output, prefs )
self.writer.set_progress(None)
p.close()
evt = ImportFinishedEvent(path=self.output)
evt.SetEventObject(self)
wx.PostEvent(self.GetParent(), evt)
#----------------------------------------------------------------------
# ----------------------------------------------------------------------------
class InputPage(wx.wizard.WizardPageSimple):
""" Parameters for the input data. """
def __init__(self, parent):
"""
Constructor.
"""
wx.wizard.WizardPageSimple.__init__(self, parent)
sizer = wx.BoxSizer(wx.VERTICAL)
self.dirname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
title_layout = wx.BoxSizer(wx.HORIZONTAL)
bmp = wx.BitmapButton(self, bitmap=spBitmap(IMPORT_EXPORT_ICON, 32), style=wx.NO_BORDER)
font = wx.Font(HEADER_FONTSIZE, wx.MODERN, wx.NORMAL, wx.BOLD, False, u'Consolas')
title_label = wx.StaticText(self, label="File to import and related information:", style=wx.ALIGN_CENTER)
title_label.SetFont( font )
title_layout.Add(bmp, flag=wx.TOP|wx.RIGHT|wx.ALIGN_RIGHT, border=5)
title_layout.Add(title_label, flag=wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, border=5)
sizer.Add(title_layout, 0, flag=wx.ALL, border=0)
sizer.Add((-1, 10))
# --------- Conference web site
confnames = ['sciencesconf', 'easychair']
self.confname = 'sciencesconf'
readername = wx.RadioBox(self, label=" The file to import comes from: ", size=(410,-1), choices=confnames, majorDimension=1)
readername.SetSelection( 0 )
readername.Bind(wx.EVT_RADIOBOX, self.onConfName)
sizer.Add(readername, 0, flag=wx.ALL, border=0)
sizer.Add((-1, 10))
# --------- Input file name
hBox = wx.BoxSizer(wx.HORIZONTAL)
hBox.Add(wx.StaticText(self, label="File name:", size=(100,30)), flag=wx.TOP|wx.ALIGN_CENTER_VERTICAL, border=5)
self.urlFld = wx.TextCtrl(self, size=(300,30))
hBox.Add(self.urlFld, 1, flag=wx.LEFT, border=2)
checkBtn = wx.Button(self, -1, "Choose...", size=(80,30))
checkBtn.Bind(wx.EVT_BUTTON, lambda evt, temp="input": self.onOpen(evt, temp) )
hBox.Add(checkBtn, 0, flag=wx.LEFT, border=10)
sizer.Add(hBox, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP)
sizer.Add((-1, 10))
# --------- Input file name for authors
hBox = wx.BoxSizer(wx.HORIZONTAL)
self.authtext = wx.StaticText(self, label="Authors file:", size=(100,30))
hBox.Add(self.authtext, flag=wx.TOP|wx.ALIGN_CENTER_VERTICAL, border=5)
self.urlauthFld = wx.TextCtrl(self, size=(300,30))
hBox.Add(self.urlauthFld, 1, flag=wx.LEFT, border=2)
self.checkauthBtn = wx.Button(self, -1, "Choose...", size=(80,30))
self.checkauthBtn.Bind(wx.EVT_BUTTON, lambda evt, temp="author": self.onOpen(evt, temp) )
hBox.Add(self.checkauthBtn, 0, flag=wx.LEFT, border=10)
sizer.Add(hBox, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP)
self.enable()
self.Layout()
self.SetSizerAndFit(sizer)
def onOpen(self, event, temp):
filename = self.file_open()
if filename:
if temp == "input":
self.urlFld.SetValue(filename)
else:
self.urlauthFld.SetValue(filename)
def onConfName(self, event):
o = event.GetEventObject()
self.confname = o.GetStringSelection()
self.enable()
def enable(self):
if self.confname == 'easychair':
self.authtext.SetForegroundColour( wx.Colour(180,80,80))
self.checkauthBtn.Enable(True)
else:
self.authtext.SetForegroundColour( wx.Colour(128,128,128))
self.checkauthBtn.Enable(False)
def file_open(self):
with wx.FileDialog(self, "Choose a file to import", self.dirname,
"", "*.*", wx.OPEN) as dlg:
if dlg.ShowModal() == wx.ID_OK:
directory, filename = dlg.GetDirectory(), dlg.GetFilename()
return os.path.join(directory, filename)
return None
# ----------------------------------------------------------------------------
class OutputPage(wx.wizard.WizardPageSimple):
""" Parameters for the output data. """
def __init__(self, parent):
"""
Constructor.
"""
wx.wizard.WizardPageSimple.__init__(self, parent)
self.urlFld = ""
self.dirname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
sizer = wx.BoxSizer(wx.VERTICAL)
title_layout = wx.BoxSizer(wx.HORIZONTAL)
bmp = wx.BitmapButton(self, bitmap=spBitmap(GRID_ICON, 32), style=wx.NO_BORDER)
font = wx.Font(HEADER_FONTSIZE, wx.MODERN, wx.NORMAL, wx.BOLD, False, u'Consolas')
title_label = wx.StaticText(self, label="Where to save:", style=wx.ALIGN_CENTER)
title_label.SetFont( font )
title_layout.Add(bmp, flag=wx.TOP|wx.RIGHT|wx.ALIGN_RIGHT, border=5)
title_layout.Add(title_label, flag=wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, border=5)
sizer.Add(title_layout, 0, flag=wx.ALL, border=0)
sizer.Add((-1, 10))
# --------- Output directory
hBox = wx.BoxSizer(wx.HORIZONTAL)
hBox.Add(wx.StaticText(self, label="Directory:", size=(100,30)), flag=wx.TOP|wx.ALIGN_CENTER_VERTICAL, border=5)
self.urlFld = wx.TextCtrl(self, size=(300,30))
hBox.Add(self.urlFld, 1, flag=wx.LEFT, border=2)
checkBtn = wx.Button(self, -1, "Choose...", size=(80,30))
checkBtn.Bind(wx.EVT_BUTTON, self.onDirectory )
hBox.Add(checkBtn, 0, flag=wx.LEFT, border=10)
sizer.Add(hBox, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP)
sizer.Add((-1, 10))
self.SetSizer(sizer)
# ---------- Status
allstatus = ['init papers (status=0)', 'only accepted papers (status=1)']
self.status = 1
statusradio = wx.RadioBox(self, label=" Choose papers to save: ", size=(410,-1), choices=allstatus, majorDimension=1)
statusradio.SetSelection( 1 )
statusradio.Bind(wx.EVT_RADIOBOX, self.onStatus)
sizer.Add(statusradio, 0, flag=wx.ALL, border=0)
sizer.Add((-1, 20))
# ----------CSV
self.exportcsv = True
cbp = wx.CheckBox(self, label="Save as CSV files for Proceed", size=(300,-1))
cbp.SetValue(True)
cbp.Bind(wx.EVT_CHECKBOX, self.onExportAsCSV)
sizer.Add(cbp, 0, flag=wx.LEFT, border=0)
sizer.Add((-1, 10))
# ----------HTML
self.exporthtml = False
cbp = wx.CheckBox(self, label="Save the list of papers in HTML", size=(300,-1))
cbp.SetValue(False)
cbp.Bind(wx.EVT_CHECKBOX, self.onExportAsHTML)
sizer.Add(cbp, 0, flag=wx.LEFT, border=0)
self.SetSizerAndFit(sizer)
def onDirectory(self, event):
with wx.DirDialog(self, "Choose a directory to save in", self.dirname, style=wx.DD_CHANGE_DIR) as dlg:
if dlg.ShowModal() == wx.ID_OK:
self.urlFld.SetValue( dlg.GetPath() )
def onStatus(self, event):
o = event.GetEventObject()
self.status = o.GetSelection()
def onExportAsCSV(self, event):
o = event.GetEventObject()
self.exportcsv = bool( o.GetValue() )
def onExportAsHTML(self, event):
o = event.GetEventObject()
self.exporthtml = bool( o.GetValue() )
# ----------------------------------------------------------------------------
class LatexPage(wx.wizard.WizardPageSimple):
""" Process the data. """
def __init__(self, parent):
"""
Constructor.
"""
wx.wizard.WizardPageSimple.__init__(self, parent)
self.urlFld = ""
self.dirname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
sizer = wx.BoxSizer(wx.VERTICAL)
title_layout = wx.BoxSizer(wx.HORIZONTAL)
bmp = wx.BitmapButton(self, bitmap=spBitmap(GRID_ICON, 32), style=wx.NO_BORDER)
font = wx.Font(HEADER_FONTSIZE, wx.MODERN, wx.NORMAL, wx.BOLD, False, u'Consolas')
title_label = wx.StaticText(self, label="Save abstracts as LaTeX...", style=wx.ALIGN_CENTER)
title_label.SetFont( font )
title_layout.Add(bmp, flag=wx.TOP|wx.RIGHT|wx.ALIGN_RIGHT, border=5)
title_layout.Add(title_label, flag=wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, border=5)
sizer.Add(title_layout, 0, flag=wx.ALL, border=0)
sizer.Add((-1, 10))
# ----------CHECK
self.export = False
cbp = wx.CheckBox(self, label="Create each abstract as a LaTeX file", size=(300,-1))
cbp.SetValue(False)
cbp.Bind(wx.EVT_CHECKBOX, self.onExport)
sizer.Add(cbp, 0, flag=wx.LEFT, border=0)
sizer.Add((-1, 10))
# ------------- Theme
self.theme = 'basic'
thlist = sorted(all_themes.get_themes().keys())
self.themeradio = wx.RadioBox(self, label=" Choose a style: ", size=(410,-1), choices=thlist, majorDimension=1)
self.themeradio.SetSelection( thlist.index( 'basic' ) )
self.themeradio.Bind(wx.EVT_RADIOBOX, self.onTheme)
sizer.Add(self.themeradio, 0, flag=wx.LEFT, border=40)
sizer.Add((-1, 10))
# ------------- Compiler
self.compilers = ['pdflatex', 'xetex']
self.compiler = 'pdflatex'
self.comradio = wx.RadioBox(self, label=" Choose the LaTeX compiler: ", size=(410,-1), choices=self.compilers, majorDimension=1)
self.comradio.SetSelection( 0 )
self.comradio.Bind(wx.EVT_RADIOBOX, self.onCompiler)
sizer.Add(self.comradio, 0, flag=wx.LEFT, border=40)
sizer.Add((-1, 10))
# ------------- PDF
self.pdf = True
self.cbp = wx.CheckBox(self, label="Compile the LaTeX files", size=(300,-1))
self.cbp.SetValue(True)
self.cbp.Bind(wx.EVT_CHECKBOX, self.onPDFChange)
sizer.Add(self.cbp, 0, flag=wx.LEFT, border=40)
self.enable(False)
self.SetSizerAndFit(sizer)
def onCompiler(self, event):
o = event.GetEventObject()
self.compiler = o.GetStringSelection()
def onTheme(self, event):
o = event.GetEventObject()
self.theme = o.GetStringSelection()
def onPDFChange(self, event):
o = event.GetEventObject()
self.pdf = bool( o.GetValue() )
def onExport(self, event):
o = event.GetEventObject()
self.export = bool( o.GetValue() )
self.enable(self.export)
def enable(self, value):
if value is False:
self.themeradio.SetForegroundColour(wx.Colour(128,128,128))
self.comradio.SetForegroundColour(wx.Colour(128,128,128))
else:
self.themeradio.SetForegroundColour(wx.Colour(80,80,200))
self.comradio.SetForegroundColour(wx.Colour(80,80,200))
for i in range(len(all_themes.get_themes().keys())):
self.themeradio.EnableItem(i,value)
for i in range(len(self.compilers)):
self.comradio.EnableItem(i,value)
self.cbp.Enable(value)
# ----------------------------------------------------------------------------
if __name__ == "__main__":
app = wx.App(False)
ImportWizard(None)
app.MainLoop()
#----------------------------------------------------------------------
| gpl-3.0 | -3,954,575,004,273,499,000 | 40.768349 | 143 | 0.557905 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.